Adding support for timelapse capture using still camera's takepicture.
Also moving entire implementation into a new class CameraSourceTimeLapse
which inherits from CameraSource.

For timelapse capture using still camera, we start a thread which runs a
loop in which it calls Camera::takePicture() and then sleeps until the next
frame should be captured.
The function dataCallback() handles the callback from the camera with the
raw image data. This function copies the data and creates an artificial
timestamp corresponding to one frame time ahead of the last encoded frame's
time stamp. It then calls dataCallbackTimestamp() of the base class which
will think that it recieved the frame from a video camera and proceed as usual.

For moving the implementation to the subclass CameraSourceTimeLapse, added a
few virtual functions to CameraSource, which do the current thing for the base
class, but specialized things for CameraSourceTimeLapse.
E.g. startCameraRecording() in the base class just calls mCamera->startRecording(),
while in CameraSourceTimeLapse it may start a thread for the still camera case.

Change-Id: Ib787f24bd2e1f41681513f0257e1c4ca10a2b4de
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 24b0e7b..6ac29d8 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -24,6 +24,7 @@
 #include <media/stagefright/AudioSource.h>
 #include <media/stagefright/AMRWriter.h>
 #include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
@@ -895,11 +896,10 @@
     status_t err = setupCameraSource();
     if (err != OK) return err;
 
-    sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
+    sp<CameraSource> cameraSource = (mCaptureTimeLapse) ?
+        CameraSourceTimeLapse::CreateFromCamera(mCamera, true, 3E6, mFrameRate):
+        CameraSource::CreateFromCamera(mCamera);
     CHECK(cameraSource != NULL);
-    if(mCaptureTimeLapse) {
-        cameraSource->enableTimeLapseMode(1E6, mFrameRate);
-    }
 
     sp<MetaData> enc_meta = new MetaData;
     enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 89bfc1f..bf5643d 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,7 @@
         AudioSource.cpp                   \
         AwesomePlayer.cpp                 \
         CameraSource.cpp                  \
+        CameraSourceTimeLapse.cpp                  \
         DataSource.cpp                    \
         ESDS.cpp                          \
         FileSource.cpp                    \
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index bb53d97..aa0893c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -65,6 +65,11 @@
 void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
     LOGV("postData(%d, ptr:%p, size:%d)",
          msgType, dataPtr->pointer(), dataPtr->size());
+
+    sp<CameraSource> source = mSource.promote();
+    if (source.get() != NULL) {
+        source->dataCallback(msgType, dataPtr);
+    }
 }
 
 void CameraSourceListener::postDataTimestamp(
@@ -116,33 +121,17 @@
     return new CameraSource(camera);
 }
 
-void CameraSource::enableTimeLapseMode(
-        int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate) {
-    LOGV("starting time lapse mode");
-    mTimeBetweenTimeLapseFrameCaptureUs = timeBetweenTimeLapseFrameCaptureUs;
-    mTimeBetweenTimeLapseVideoFramesUs = (1E6/videoFrameRate);
-}
-
-void CameraSource::disableTimeLapseMode() {
-    LOGV("stopping time lapse mode");
-    mTimeBetweenTimeLapseFrameCaptureUs = -1;
-    mTimeBetweenTimeLapseVideoFramesUs = 0;
-}
-
 CameraSource::CameraSource(const sp<Camera> &camera)
     : mCamera(camera),
-      mFirstFrameTimeUs(0),
-      mLastFrameTimestampUs(0),
       mNumFramesReceived(0),
+      mLastFrameTimestampUs(0),
+      mStarted(false),
+      mFirstFrameTimeUs(0),
       mNumFramesEncoded(0),
       mNumFramesDropped(0),
       mNumGlitches(0),
       mGlitchDurationThresholdUs(200000),
-      mCollectStats(false),
-      mStarted(false),
-      mTimeBetweenTimeLapseFrameCaptureUs(-1),
-      mTimeBetweenTimeLapseVideoFramesUs(0),
-      mLastTimeLapseFrameRealTimestampUs(0) {
+      mCollectStats(false) {
 
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     String8 s = mCamera->getParameters();
@@ -177,7 +166,6 @@
     mMeta->setInt32(kKeyHeight, height);
     mMeta->setInt32(kKeyStride, stride);
     mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
 }
 
 CameraSource::~CameraSource() {
@@ -186,6 +174,10 @@
     }
 }
 
+void CameraSource::startCameraRecording() {
+    CHECK_EQ(OK, mCamera->startRecording());
+}
+
 status_t CameraSource::start(MetaData *meta) {
     CHECK(!mStarted);
 
@@ -203,13 +195,17 @@
 
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     mCamera->setListener(new CameraSourceListener(this));
-    CHECK_EQ(OK, mCamera->startRecording());
+    startCameraRecording();
     IPCThreadState::self()->restoreCallingIdentity(token);
 
     mStarted = true;
     return OK;
 }
 
+void CameraSource::stopCameraRecording() {
+    mCamera->stopRecording();
+}
+
 status_t CameraSource::stop() {
     LOGV("stop");
     Mutex::Autolock autoLock(mLock);
@@ -218,7 +214,7 @@
 
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     mCamera->setListener(NULL);
-    mCamera->stopRecording();
+    stopCameraRecording();
     releaseQueuedFrames();
     while (!mFramesBeingEncoded.empty()) {
         LOGI("Waiting for outstanding frames being encoded: %d",
@@ -238,11 +234,15 @@
     return OK;
 }
 
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+    mCamera->releaseRecordingFrame(frame);
+}
+
 void CameraSource::releaseQueuedFrames() {
     List<sp<IMemory> >::iterator it;
     while (!mFramesReceived.empty()) {
         it = mFramesReceived.begin();
-        mCamera->releaseRecordingFrame(*it);
+        releaseRecordingFrame(*it);
         mFramesReceived.erase(it);
         ++mNumFramesDropped;
     }
@@ -254,7 +254,7 @@
 
 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mCamera->releaseRecordingFrame(frame);
+    releaseRecordingFrame(frame);
     IPCThreadState::self()->restoreCallingIdentity(token);
 }
 
@@ -263,7 +263,6 @@
     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
          it != mFramesBeingEncoded.end(); ++it) {
         if ((*it)->pointer() ==  buffer->data()) {
-
             releaseOneRecordingFrame((*it));
             mFramesBeingEncoded.erase(it);
             ++mNumFramesEncoded;
@@ -332,33 +331,11 @@
         ++mNumGlitches;
     }
 
-    // time lapse
-    if(mTimeBetweenTimeLapseFrameCaptureUs >= 0) {
-        if(mLastTimeLapseFrameRealTimestampUs == 0) {
-            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
-            // to current time (timestampUs) and save frame data.
-            LOGV("dataCallbackTimestamp timelapse: initial frame");
-
-            mLastTimeLapseFrameRealTimestampUs = timestampUs;
-        } else if (timestampUs <
-                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
-            // Skip all frames from last encoded frame until
-            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
-            // Tell the camera to release its recording frame and return.
-            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
-
-            releaseOneRecordingFrame(data);
-            return;
-        } else {
-            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
-            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
-            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
-            // of the last encoded frame's time stamp.
-            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
-
-            mLastTimeLapseFrameRealTimestampUs = timestampUs;
-            timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
-        }
+    // May need to skip frame or modify timestamp. Currently implemented
+    // by the subclass CameraSourceTimeLapse.
+    if(skipCurrentFrame(timestampUs)) {
+        releaseOneRecordingFrame(data);
+        return;
     }
 
     mLastFrameTimestampUs = timestampUs;
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..0c7ffa3
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <utils/String8.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse,
+        int64_t timeBetweenTimeLapseFrameCaptureUs,
+        int32_t videoFrameRate) {
+    sp<Camera> camera = Camera::connect(0);
+
+    if (camera.get() == NULL) {
+        return NULL;
+    }
+
+    return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
+            timeBetweenTimeLapseFrameCaptureUs, videoFrameRate);
+}
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
+        bool useStillCameraForTimeLapse,
+        int64_t timeBetweenTimeLapseFrameCaptureUs,
+        int32_t videoFrameRate) {
+    if (camera.get() == NULL) {
+        return NULL;
+    }
+
+    return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
+            timeBetweenTimeLapseFrameCaptureUs, videoFrameRate);
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
+        bool useStillCameraForTimeLapse,
+        int64_t timeBetweenTimeLapseFrameCaptureUs,
+        int32_t videoFrameRate)
+    : CameraSource(camera),
+      mUseStillCameraForTimeLapse(useStillCameraForTimeLapse),
+      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+      mLastTimeLapseFrameRealTimestampUs(0),
+      mSkipCurrentFrame(false) {
+
+    LOGV("starting time lapse mode");
+    if(mUseStillCameraForTimeLapse) {
+        // Currently hardcoded the picture size. Will need to choose
+        // automatically or pass in from the app.
+        int32_t width, height;
+        width = 1024;
+        height = 768;
+        mMeta->setInt32(kKeyWidth, width);
+        mMeta->setInt32(kKeyHeight, height);
+    }
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+    source->threadTimeLapseEntry();
+    return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+    while(mStarted) {
+        LOGV("threadTimeLapseEntry loop");
+        sleep(mTimeBetweenTimeLapseFrameCaptureUs/1E6);
+        CHECK_EQ(OK, mCamera->takePicture());
+    }
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+    if(mUseStillCameraForTimeLapse) {
+        LOGV("start time lapse recording using still camera");
+
+        int32_t width;
+        int32_t height;
+        mMeta->findInt32(kKeyWidth, &width);
+        mMeta->findInt32(kKeyHeight, &height);
+
+        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        String8 s = mCamera->getParameters();
+        IPCThreadState::self()->restoreCallingIdentity(token);
+
+        CameraParameters params(s);
+
+        params.setPictureSize(width, height);
+        mCamera->setParameters(params.flatten());
+
+        CHECK_EQ(OK, mCamera->takePicture());
+
+        // create a thread which takes pictures in a loop
+        pthread_attr_t attr;
+        pthread_attr_init(&attr);
+        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+        pthread_attr_destroy(&attr);
+    } else {
+        LOGV("start time lapse recording using video camera");
+        CHECK_EQ(OK, mCamera->startRecording());
+    }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+    if(mUseStillCameraForTimeLapse) {
+        void *dummy;
+        pthread_join(mThreadTimeLapse, &dummy);
+    } else {
+        mCamera->stopRecording();
+    }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+    if(!mUseStillCameraForTimeLapse) {
+        mCamera->releaseRecordingFrame(frame);
+    }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+    size_t source_size = source_data->size();
+    void* source_pointer = source_data->pointer();
+
+    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+    memcpy(newMemory->pointer(), source_pointer, source_size);
+    return newMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+    if(msgType != CAMERA_MSG_RAW_IMAGE) {
+        return;
+    }
+
+    LOGV("dataCallback for timelapse still frame");
+    CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+    int64_t timestampUs;
+    if (mNumFramesReceived == 0) {
+        timestampUs = mStartTimeUs;
+    } else {
+        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+    }
+    sp<IMemory> dataCopy = createIMemoryCopy(data);
+    dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+    if(mSkipCurrentFrame) {
+        mSkipCurrentFrame = false;
+        return true;
+    } else {
+        return false;
+    }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+    if(!mUseStillCameraForTimeLapse) {
+        if(mLastTimeLapseFrameRealTimestampUs == 0) {
+            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+            // to current time (timestampUs) and save frame data.
+            LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+        } else if (*timestampUs <
+                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+            // Skip all frames from last encoded frame until
+            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+            // Tell the camera to release its recording frame and return.
+            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+            return true;
+        } else {
+            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+            // of the last encoded frame's time stamp.
+            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+        }
+    }
+    return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data) {
+    if(!mUseStillCameraForTimeLapse) {
+        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
+    }
+    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+}  // namespace android