am 9e0bb6d5: am 0544d059: Merge "DRM Framework bug fixes."

* commit '9e0bb6d5eef5dfe2da60c50f3aaa39512307d11d':
  DRM Framework bug fixes.
diff --git a/camera/Android.mk b/camera/Android.mk
index 03ff229..2f16923 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -14,7 +14,8 @@
 	libbinder \
 	libhardware \
 	libsurfaceflinger_client \
-	libui
+	libui \
+	libgui
 
 LOCAL_MODULE:= libcamera_client
 
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 743fbb2..907f119 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -80,8 +80,9 @@
         c->mStatus = NO_ERROR;
         c->mCamera = camera;
         camera->asBinder()->linkToDeath(c);
+        return c;
     }
-    return c;
+    return 0;
 }
 
 void Camera::init()
@@ -167,32 +168,34 @@
     return c->unlock();
 }
 
-// pass the buffered ISurface to the camera service
+// pass the buffered Surface to the camera service
 status_t Camera::setPreviewDisplay(const sp<Surface>& surface)
 {
-    LOGV("setPreviewDisplay");
+    LOGV("setPreviewDisplay(%p)", surface.get());
     sp <ICamera> c = mCamera;
     if (c == 0) return NO_INIT;
     if (surface != 0) {
-        return c->setPreviewDisplay(surface->getISurface());
+        return c->setPreviewDisplay(surface);
     } else {
         LOGD("app passed NULL surface");
         return c->setPreviewDisplay(0);
     }
 }
 
-status_t Camera::setPreviewDisplay(const sp<ISurface>& surface)
+// pass the buffered ISurfaceTexture to the camera service
+status_t Camera::setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture)
 {
-    LOGV("setPreviewDisplay");
-    if (surface == 0) {
-        LOGD("app passed NULL surface");
-    }
+    LOGV("setPreviewTexture(%p)", surfaceTexture.get());
     sp <ICamera> c = mCamera;
     if (c == 0) return NO_INIT;
-    return c->setPreviewDisplay(surface);
+    if (surfaceTexture != 0) {
+        return c->setPreviewTexture(surfaceTexture);
+    } else {
+        LOGD("app passed NULL surface");
+        return c->setPreviewTexture(0);
+    }
 }
 
-
 // start preview mode
 status_t Camera::startPreview()
 {
@@ -202,6 +205,31 @@
     return c->startPreview();
 }
 
+int32_t Camera::getNumberOfVideoBuffers() const
+{
+    LOGV("getNumberOfVideoBuffers");
+    sp <ICamera> c = mCamera;
+    if (c == 0) return 0;
+    return c->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> Camera::getVideoBuffer(int32_t index) const
+{
+    LOGV("getVideoBuffer: %d", index);
+    sp <ICamera> c = mCamera;
+    if (c == 0) return 0;
+    return c->getVideoBuffer(index);
+}
+
+status_t Camera::storeMetaDataInBuffers(bool enabled)
+{
+    LOGV("storeMetaDataInBuffers: %s",
+            enabled? "true": "false");
+    sp <ICamera> c = mCamera;
+    if (c == 0) return NO_INIT;
+    return c->storeMetaDataInBuffers(enabled);
+}
+
 // start recording mode, must call setPreviewDisplay first
 status_t Camera::startRecording()
 {
@@ -378,4 +406,3 @@
 }
 
 }; // namespace android
-
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 83e5e57..e9a5f8c 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -73,6 +73,9 @@
 const char CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED[] = "smooth-zoom-supported";
 const char CameraParameters::KEY_FOCUS_DISTANCES[] = "focus-distances";
 const char CameraParameters::KEY_VIDEO_FRAME_FORMAT[] = "video-frame-format";
+const char CameraParameters::KEY_VIDEO_SIZE[] = "video-size";
+const char CameraParameters::KEY_SUPPORTED_VIDEO_SIZES[] = "video-size-values";
+const char CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
 
 const char CameraParameters::TRUE[] = "true";
 const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity";
@@ -129,7 +132,7 @@
 const char CameraParameters::SCENE_MODE_CANDLELIGHT[] = "candlelight";
 const char CameraParameters::SCENE_MODE_BARCODE[] = "barcode";
 
-// Formats for setPreviewFormat and setPictureFormat.
+const char CameraParameters::PIXEL_FORMAT_YUV420P[]  = "yuv420p";
 const char CameraParameters::PIXEL_FORMAT_YUV422SP[] = "yuv422sp";
 const char CameraParameters::PIXEL_FORMAT_YUV420SP[] = "yuv420sp";
 const char CameraParameters::PIXEL_FORMAT_YUV422I[] = "yuv422i-yuyv";
@@ -331,12 +334,41 @@
     parse_pair(p, width, height, 'x');
 }
 
+void CameraParameters::getPreferredPreviewSizeForVideo(int *width, int *height) const
+{
+    *width = *height = -1;
+    const char *p = get(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+    if (p == 0)  return;
+    parse_pair(p, width, height, 'x');
+}
+
 void CameraParameters::getSupportedPreviewSizes(Vector<Size> &sizes) const
 {
     const char *previewSizesStr = get(KEY_SUPPORTED_PREVIEW_SIZES);
     parseSizesList(previewSizesStr, sizes);
 }
 
+void CameraParameters::setVideoSize(int width, int height)
+{
+    char str[32];
+    sprintf(str, "%dx%d", width, height);
+    set(KEY_VIDEO_SIZE, str);
+}
+
+void CameraParameters::getVideoSize(int *width, int *height) const
+{
+    *width = *height = -1;
+    const char *p = get(KEY_VIDEO_SIZE);
+    if (p == 0) return;
+    parse_pair(p, width, height, 'x');
+}
+
+void CameraParameters::getSupportedVideoSizes(Vector<Size> &sizes) const
+{
+    const char *videoSizesStr = get(KEY_SUPPORTED_VIDEO_SIZES);
+    parseSizesList(videoSizesStr, sizes);
+}
+
 void CameraParameters::setPreviewFrameRate(int fps)
 {
     set(KEY_PREVIEW_FRAME_RATE, fps);
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 13673b5..0881d65 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -28,6 +28,7 @@
 enum {
     DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
     SET_PREVIEW_DISPLAY,
+    SET_PREVIEW_TEXTURE,
     SET_PREVIEW_CALLBACK_FLAG,
     START_PREVIEW,
     STOP_PREVIEW,
@@ -45,6 +46,9 @@
     STOP_RECORDING,
     RECORDING_ENABLED,
     RELEASE_RECORDING_FRAME,
+    GET_NUM_VIDEO_BUFFERS,
+    GET_VIDEO_BUFFER,
+    STORE_META_DATA_IN_BUFFERS,
 };
 
 class BpCamera: public BpInterface<ICamera>
@@ -64,17 +68,29 @@
         remote()->transact(DISCONNECT, data, &reply);
     }
 
-    // pass the buffered ISurface to the camera service
-    status_t setPreviewDisplay(const sp<ISurface>& surface)
+    // pass the buffered Surface to the camera service
+    status_t setPreviewDisplay(const sp<Surface>& surface)
     {
         LOGV("setPreviewDisplay");
         Parcel data, reply;
         data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
-        data.writeStrongBinder(surface->asBinder());
+        Surface::writeToParcel(surface, &data);
         remote()->transact(SET_PREVIEW_DISPLAY, data, &reply);
         return reply.readInt32();
     }
 
+    // pass the buffered SurfaceTexture to the camera service
+    status_t setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture)
+    {
+        LOGV("setPreviewTexture");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        sp<IBinder> b(surfaceTexture->asBinder());
+        data.writeStrongBinder(b);
+        remote()->transact(SET_PREVIEW_TEXTURE, data, &reply);
+        return reply.readInt32();
+    }
+
     // set the preview callback flag to affect how the received frames from
     // preview are handled. See Camera.h for details.
     void setPreviewCallbackFlag(int flag)
@@ -133,6 +149,37 @@
         remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
     }
 
+    int32_t getNumberOfVideoBuffers() const
+    {
+        LOGV("getNumberOfVideoBuffers");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        remote()->transact(GET_NUM_VIDEO_BUFFERS, data, &reply);
+        return reply.readInt32();
+    }
+
+    sp<IMemory> getVideoBuffer(int32_t index) const
+    {
+        LOGV("getVideoBuffer: %d", index);
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        data.writeInt32(index);
+        remote()->transact(GET_VIDEO_BUFFER, data, &reply);
+        sp<IMemory> mem = interface_cast<IMemory>(
+                            reply.readStrongBinder());
+        return mem;
+    }
+
+    status_t storeMetaDataInBuffers(bool enabled)
+    {
+        LOGV("storeMetaDataInBuffers: %s", enabled? "true": "false");
+        Parcel data, reply;
+        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+        data.writeInt32(enabled);
+        remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+        return reply.readInt32();
+    }
+
     // check preview state
     bool previewEnabled()
     {
@@ -258,10 +305,17 @@
         case SET_PREVIEW_DISPLAY: {
             LOGV("SET_PREVIEW_DISPLAY");
             CHECK_INTERFACE(ICamera, data, reply);
-            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+            sp<Surface> surface = Surface::readFromParcel(data);
             reply->writeInt32(setPreviewDisplay(surface));
             return NO_ERROR;
         } break;
+        case SET_PREVIEW_TEXTURE: {
+            LOGV("SET_PREVIEW_TEXTURE");
+            CHECK_INTERFACE(ICamera, data, reply);
+            sp<ISurfaceTexture> st = interface_cast<ISurfaceTexture>(data.readStrongBinder());
+            reply->writeInt32(setPreviewTexture(st));
+            return NO_ERROR;
+        } break;
         case SET_PREVIEW_CALLBACK_FLAG: {
             LOGV("SET_PREVIEW_CALLBACK_TYPE");
             CHECK_INTERFACE(ICamera, data, reply);
@@ -300,6 +354,26 @@
             releaseRecordingFrame(mem);
             return NO_ERROR;
         } break;
+        case GET_NUM_VIDEO_BUFFERS: {
+            LOGV("GET_NUM_VIDEO_BUFFERS");
+            CHECK_INTERFACE(ICamera, data, reply);
+            reply->writeInt32(getNumberOfVideoBuffers());
+            return NO_ERROR;
+        } break;
+        case GET_VIDEO_BUFFER: {
+            LOGV("GET_VIDEO_BUFFER");
+            CHECK_INTERFACE(ICamera, data, reply);
+            int32_t index = data.readInt32();
+            reply->writeStrongBinder(getVideoBuffer(index)->asBinder());
+            return NO_ERROR;
+        } break;
+        case STORE_META_DATA_IN_BUFFERS: {
+            LOGV("STORE_META_DATA_IN_BUFFERS");
+            CHECK_INTERFACE(ICamera, data, reply);
+            bool enabled = data.readInt32();
+            reply->writeInt32(storeMetaDataInBuffers(enabled));
+            return NO_ERROR;
+        } break;
         case PREVIEW_ENABLED: {
             LOGV("PREVIEW_ENABLED");
             CHECK_INTERFACE(ICamera, data, reply);
@@ -376,4 +450,3 @@
 // ----------------------------------------------------------------------------
 
 }; // namespace android
-
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 5b74007..178032d 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -7,13 +7,16 @@
 	SineSource.cpp
 
 LOCAL_SHARED_LIBRARIES := \
-	libstagefright libmedia libutils libbinder libstagefright_foundation
+	libstagefright libmedia libutils libbinder libstagefright_foundation \
+        libskia
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
 	frameworks/base/media/libstagefright \
 	frameworks/base/media/libstagefright/include \
-	$(TOP)/frameworks/base/include/media/stagefright/openmax
+	$(TOP)/frameworks/base/include/media/stagefright/openmax \
+        external/skia/include/core \
+        external/skia/include/images \
 
 LOCAL_CFLAGS += -Wno-multichar
 
@@ -53,6 +56,31 @@
 
 LOCAL_SRC_FILES:=         \
         SineSource.cpp    \
+        recordvideo.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder
+
+LOCAL_C_INCLUDES:= \
+	$(JNI_H_INCLUDE) \
+	frameworks/base/media/libstagefright \
+	$(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= recordvideo
+
+include $(BUILD_EXECUTABLE)
+
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=         \
+        SineSource.cpp    \
         audioloop.cpp
 
 LOCAL_SHARED_LIBRARIES := \
@@ -70,3 +98,53 @@
 LOCAL_MODULE:= audioloop
 
 include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=         \
+        stream.cpp    \
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder libsurfaceflinger_client \
+        libstagefright_foundation libmedia
+
+LOCAL_C_INCLUDES:= \
+	$(JNI_H_INCLUDE) \
+	frameworks/base/media/libstagefright \
+	$(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= stream
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=         \
+        sf2.cpp    \
+
+LOCAL_SHARED_LIBRARIES := \
+	libstagefright liblog libutils libbinder libstagefright_foundation \
+        libmedia libsurfaceflinger_client libcutils libui
+
+LOCAL_C_INCLUDES:= \
+	$(JNI_H_INCLUDE) \
+	frameworks/base/media/libstagefright \
+	$(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= sf2
+
+include $(BUILD_EXECUTABLE)
+
+
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
new file mode 100644
index 0000000..1264215
--- /dev/null
+++ b/cmds/stagefright/recordvideo.cpp
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaPlayerInterface.h>
+
+using namespace android;
+
+// Print usage showing how to use this utility to record videos
+static void usage(const char *me) {
+    fprintf(stderr, "usage: %s\n", me);
+    fprintf(stderr, "       -h(elp)\n");
+    fprintf(stderr, "       -b bit rate in bits per second (default: 300000)\n");
+    fprintf(stderr, "       -c YUV420 color format: [0] semi planar or [1] planar (default: 1)\n");
+    fprintf(stderr, "       -f frame rate in frames per second (default: 30)\n");
+    fprintf(stderr, "       -i I frame interval in seconds (default: 1)\n");
+    fprintf(stderr, "       -n number of frames to be recorded (default: 300)\n");
+    fprintf(stderr, "       -w width in pixels (default: 176)\n");
+    fprintf(stderr, "       -t height in pixels (default: 144)\n");
+    fprintf(stderr, "       -l encoder level. see omx il header (default: encoder specific)\n");
+    fprintf(stderr, "       -p encoder profile. see omx il header (default: encoder specific)\n");
+    fprintf(stderr, "       -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n");
+    fprintf(stderr, "The output file is /sdcard/output.mp4\n");
+    exit(1);
+}
+
+class DummySource : public MediaSource {
+
+public:
+    DummySource(int width, int height, int nFrames, int fps, int colorFormat)
+        : mWidth(width),
+          mHeight(height),
+          mMaxNumFrames(nFrames),
+          mFrameRate(fps),
+          mColorFormat(colorFormat),
+          mSize((width * height * 3) / 2) {
+
+        mGroup.add_buffer(new MediaBuffer(mSize));
+
+        // Check the color format to make sure
+        // that the buffer size mSize it set correctly above.
+        CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+              colorFormat == OMX_COLOR_FormatYUV420Planar);
+    }
+
+    virtual sp<MetaData> getFormat() {
+        sp<MetaData> meta = new MetaData;
+        meta->setInt32(kKeyWidth, mWidth);
+        meta->setInt32(kKeyHeight, mHeight);
+        meta->setInt32(kKeyColorFormat, mColorFormat);
+        meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+
+        return meta;
+    }
+
+    virtual status_t start(MetaData *params) {
+        mNumFramesOutput = 0;
+        return OK;
+    }
+
+    virtual status_t stop() {
+        return OK;
+    }
+
+    virtual status_t read(
+            MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+
+        if (mNumFramesOutput % 10 == 0) {
+            fprintf(stderr, ".");
+        }
+        if (mNumFramesOutput == mMaxNumFrames) {
+            return ERROR_END_OF_STREAM;
+        }
+
+        status_t err = mGroup.acquire_buffer(buffer);
+        if (err != OK) {
+            return err;
+        }
+
+        // We don't care about the contents. we just test video encoder
+        // Also, by skipping the content generation, we can return from
+        // read() much faster.
+        //char x = (char)((double)rand() / RAND_MAX * 255);
+        //memset((*buffer)->data(), x, mSize);
+        (*buffer)->set_range(0, mSize);
+        (*buffer)->meta_data()->clear();
+        (*buffer)->meta_data()->setInt64(
+                kKeyTime, (mNumFramesOutput * 1000000) / mFrameRate);
+        ++mNumFramesOutput;
+
+        return OK;
+    }
+
+protected:
+    virtual ~DummySource() {}
+
+private:
+    MediaBufferGroup mGroup;
+    int mWidth, mHeight;
+    int mMaxNumFrames;
+    int mFrameRate;
+    int mColorFormat;
+    size_t mSize;
+    int64_t mNumFramesOutput;;
+
+    DummySource(const DummySource &);
+    DummySource &operator=(const DummySource &);
+};
+
+enum {
+    kYUV420SP = 0,
+    kYUV420P  = 1,
+};
+
+// returns -1 if mapping of the given color is unsuccessful
+// returns an omx color enum value otherwise
+static int translateColorToOmxEnumValue(int color) {
+    switch (color) {
+        case kYUV420SP:
+            return OMX_COLOR_FormatYUV420SemiPlanar;
+        case kYUV420P:
+            return OMX_COLOR_FormatYUV420Planar;
+        default:
+            fprintf(stderr, "Unsupported color: %d\n", color);
+            return -1;
+    }
+}
+
+int main(int argc, char **argv) {
+
+    // Default values for the program if not overwritten
+    int frameRateFps = 30;
+    int width = 176;
+    int height = 144;
+    int bitRateBps = 300000;
+    int iFramesIntervalSeconds = 1;
+    int colorFormat = OMX_COLOR_FormatYUV420Planar;
+    int nFrames = 300;
+    int level = -1;        // Encoder specific default
+    int profile = -1;      // Encoder specific default
+    int codec = 0;
+    const char *fileName = "/sdcard/output.mp4";
+
+    android::ProcessState::self()->startThreadPool();
+    int res;
+    while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
+        switch (res) {
+            case 'b':
+            {
+                bitRateBps = atoi(optarg);
+                break;
+            }
+
+            case 'c':
+            {
+                colorFormat = translateColorToOmxEnumValue(atoi(optarg));
+                if (colorFormat == -1) {
+                    usage(argv[0]);
+                }
+                break;
+            }
+
+            case 'f':
+            {
+                frameRateFps = atoi(optarg);
+                break;
+            }
+
+            case 'i':
+            {
+                iFramesIntervalSeconds = atoi(optarg);
+                break;
+            }
+
+            case 'n':
+            {
+                nFrames = atoi(optarg);
+                break;
+            }
+
+            case 'w':
+            {
+                width = atoi(optarg);
+                break;
+            }
+
+            case 't':
+            {
+                height = atoi(optarg);
+                break;
+            }
+
+            case 'l':
+            {
+                level = atoi(optarg);
+                break;
+            }
+
+            case 'p':
+            {
+                profile = atoi(optarg);
+                break;
+            }
+
+            case 'v':
+            {
+                codec = atoi(optarg);
+                if (codec < 0 || codec > 2) {
+                    usage(argv[0]);
+                }
+                break;
+            }
+
+            case 'h':
+            default:
+            {
+                usage(argv[0]);
+                break;
+            }
+        }
+    }
+
+    OMXClient client;
+    CHECK_EQ(client.connect(), OK);
+
+    status_t err = OK;
+    sp<MediaSource> source =
+        new DummySource(width, height, nFrames, frameRateFps, colorFormat);
+
+    sp<MetaData> enc_meta = new MetaData;
+    switch (codec) {
+        case 1:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+            break;
+        case 2:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+            break;
+        default:
+            enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+            break;
+    }
+    enc_meta->setInt32(kKeyWidth, width);
+    enc_meta->setInt32(kKeyHeight, height);
+    enc_meta->setInt32(kKeyFrameRate, frameRateFps);
+    enc_meta->setInt32(kKeyBitRate, bitRateBps);
+    enc_meta->setInt32(kKeyStride, width);
+    enc_meta->setInt32(kKeySliceHeight, height);
+    enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
+    enc_meta->setInt32(kKeyColorFormat, colorFormat);
+    if (level != -1) {
+        enc_meta->setInt32(kKeyVideoLevel, level);
+    }
+    if (profile != -1) {
+        enc_meta->setInt32(kKeyVideoProfile, profile);
+    }
+
+    sp<MediaSource> encoder =
+        OMXCodec::Create(
+                client.interface(), enc_meta, true /* createEncoder */, source);
+
+    sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+    writer->addSource(encoder);
+    int64_t start = systemTime();
+    CHECK_EQ(OK, writer->start());
+    while (!writer->reachedEOS()) {
+    }
+    err = writer->stop();
+    int64_t end = systemTime();
+
+    fprintf(stderr, "$\n");
+    client.disconnect();
+
+    if (err != OK && err != ERROR_END_OF_STREAM) {
+        fprintf(stderr, "record failed: %d\n", err);
+        return 1;
+    }
+    fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
+    fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
+    return 0;
+}
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
new file mode 100644
index 0000000..1dc08ea
--- /dev/null
+++ b/cmds/stagefright/sf2.cpp
@@ -0,0 +1,564 @@
+#include <binder/ProcessState.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
+#include "include/ESDS.h"
+
+using namespace android;
+
+struct Controller : public AHandler {
+    Controller(const char *uri, bool decodeAudio, const sp<Surface> &surface)
+        : mURI(uri),
+          mDecodeAudio(decodeAudio),
+          mSurface(surface),
+          mCodec(new ACodec) {
+        CHECK(!mDecodeAudio || mSurface == NULL);
+    }
+
+    void startAsync() {
+        (new AMessage(kWhatStart, id()))->post();
+    }
+
+protected:
+    virtual ~Controller() {
+    }
+
+    virtual void onMessageReceived(const sp<AMessage> &msg) {
+        switch (msg->what()) {
+            case kWhatStart:
+            {
+#if 1
+                mDecodeLooper = looper();
+#else
+                mDecodeLooper = new ALooper;
+                mDecodeLooper->setName("sf2 decode looper");
+                mDecodeLooper->start();
+#endif
+
+                sp<DataSource> dataSource =
+                    DataSource::CreateFromURI(mURI.c_str());
+
+                sp<MediaExtractor> extractor =
+                    MediaExtractor::Create(dataSource);
+
+                for (size_t i = 0; i < extractor->countTracks(); ++i) {
+                    sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+                    const char *mime;
+                    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+                    if (!strncasecmp(mDecodeAudio ? "audio/" : "video/",
+                                     mime, 6)) {
+                        mSource = extractor->getTrack(i);
+                        break;
+                    }
+                }
+                CHECK(mSource != NULL);
+
+                CHECK_EQ(mSource->start(), (status_t)OK);
+
+                mDecodeLooper->registerHandler(mCodec);
+
+                mCodec->setNotificationMessage(
+                        new AMessage(kWhatCodecNotify, id()));
+
+                sp<AMessage> format = makeFormat(mSource->getFormat());
+
+                if (mSurface != NULL) {
+                    format->setObject("surface", mSurface);
+                }
+
+                mCodec->initiateSetup(format);
+
+                mCSDIndex = 0;
+                mStartTimeUs = ALooper::GetNowUs();
+                mNumOutputBuffersReceived = 0;
+                mTotalBytesReceived = 0;
+                mLeftOverBuffer = NULL;
+                mFinalResult = OK;
+                mSeekState = SEEK_NONE;
+
+                // (new AMessage(kWhatSeek, id()))->post(5000000ll);
+                break;
+            }
+
+            case kWhatSeek:
+            {
+                printf("+");
+                fflush(stdout);
+
+                CHECK(mSeekState == SEEK_NONE
+                        || mSeekState == SEEK_FLUSH_COMPLETED);
+
+                if (mLeftOverBuffer != NULL) {
+                    mLeftOverBuffer->release();
+                    mLeftOverBuffer = NULL;
+                }
+
+                mSeekState = SEEK_FLUSHING;
+                mSeekTimeUs = 30000000ll;
+
+                mCodec->signalFlush();
+                break;
+            }
+
+            case kWhatStop:
+            {
+                if (mLeftOverBuffer != NULL) {
+                    mLeftOverBuffer->release();
+                    mLeftOverBuffer = NULL;
+                }
+
+                CHECK_EQ(mSource->stop(), (status_t)OK);
+                mSource.clear();
+
+                mCodec->initiateShutdown();
+                break;
+            }
+
+            case kWhatCodecNotify:
+            {
+                int32_t what;
+                CHECK(msg->findInt32("what", &what));
+
+                if (what == ACodec::kWhatFillThisBuffer) {
+                    onFillThisBuffer(msg);
+                } else if (what == ACodec::kWhatDrainThisBuffer) {
+                    if ((mNumOutputBuffersReceived++ % 16) == 0) {
+                        printf(".");
+                        fflush(stdout);
+                    }
+
+                    onDrainThisBuffer(msg);
+                } else if (what == ACodec::kWhatEOS) {
+                    printf("$\n");
+
+                    int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
+
+                    if (mDecodeAudio) {
+                        printf("%lld bytes received. %.2f KB/sec\n",
+                               mTotalBytesReceived,
+                               mTotalBytesReceived * 1E6 / 1024 / delayUs);
+                    } else {
+                        printf("%d frames decoded, %.2f fps. %lld bytes "
+                               "received. %.2f KB/sec\n",
+                               mNumOutputBuffersReceived,
+                               mNumOutputBuffersReceived * 1E6 / delayUs,
+                               mTotalBytesReceived,
+                               mTotalBytesReceived * 1E6 / 1024 / delayUs);
+                    }
+
+                    (new AMessage(kWhatStop, id()))->post();
+                } else if (what == ACodec::kWhatFlushCompleted) {
+                    mSeekState = SEEK_FLUSH_COMPLETED;
+                    mCodec->signalResume();
+
+                    (new AMessage(kWhatSeek, id()))->post(5000000ll);
+                } else {
+                    CHECK_EQ(what, (int32_t)ACodec::kWhatShutdownCompleted);
+
+                    mDecodeLooper->unregisterHandler(mCodec->id());
+
+                    if (mDecodeLooper != looper()) {
+                        mDecodeLooper->stop();
+                    }
+
+                    looper()->stop();
+                }
+                break;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+    }
+
+private:
+    enum {
+        kWhatStart             = 'strt',
+        kWhatStop              = 'stop',
+        kWhatCodecNotify       = 'noti',
+        kWhatSeek              = 'seek',
+    };
+
+    sp<ALooper> mDecodeLooper;
+
+    AString mURI;
+    bool mDecodeAudio;
+    sp<Surface> mSurface;
+    sp<ACodec> mCodec;
+    sp<MediaSource> mSource;
+
+    Vector<sp<ABuffer> > mCSD;
+    size_t mCSDIndex;
+
+    MediaBuffer *mLeftOverBuffer;
+    status_t mFinalResult;
+
+    int64_t mStartTimeUs;
+    int32_t mNumOutputBuffersReceived;
+    int64_t mTotalBytesReceived;
+
+    enum SeekState {
+        SEEK_NONE,
+        SEEK_FLUSHING,
+        SEEK_FLUSH_COMPLETED,
+    };
+    SeekState mSeekState;
+    int64_t mSeekTimeUs;
+
+    sp<AMessage> makeFormat(const sp<MetaData> &meta) {
+        CHECK(mCSD.isEmpty());
+
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+        sp<AMessage> msg = new AMessage;
+        msg->setString("mime", mime);
+
+        if (!strncasecmp("video/", mime, 6)) {
+            int32_t width, height;
+            CHECK(meta->findInt32(kKeyWidth, &width));
+            CHECK(meta->findInt32(kKeyHeight, &height));
+
+            msg->setInt32("width", width);
+            msg->setInt32("height", height);
+        } else {
+            CHECK(!strncasecmp("audio/", mime, 6));
+
+            int32_t numChannels, sampleRate;
+            CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+            CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+            msg->setInt32("channel-count", numChannels);
+            msg->setInt32("sample-rate", sampleRate);
+        }
+
+        uint32_t type;
+        const void *data;
+        size_t size;
+        if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+            // Parse the AVCDecoderConfigurationRecord
+
+            const uint8_t *ptr = (const uint8_t *)data;
+
+            CHECK(size >= 7);
+            CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
+            uint8_t profile = ptr[1];
+            uint8_t level = ptr[3];
+
+            // There is decodable content out there that fails the following
+            // assertion, let's be lenient for now...
+            // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
+
+            size_t lengthSize = 1 + (ptr[4] & 3);
+
+            // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+            // violates it...
+            // CHECK((ptr[5] >> 5) == 7);  // reserved
+
+            size_t numSeqParameterSets = ptr[5] & 31;
+
+            ptr += 6;
+            size -= 6;
+
+            sp<ABuffer> buffer = new ABuffer(1024);
+            buffer->setRange(0, 0);
+
+            for (size_t i = 0; i < numSeqParameterSets; ++i) {
+                CHECK(size >= 2);
+                size_t length = U16_AT(ptr);
+
+                ptr += 2;
+                size -= 2;
+
+                CHECK(size >= length);
+
+                memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+                memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+                buffer->setRange(0, buffer->size() + 4 + length);
+
+                ptr += length;
+                size -= length;
+            }
+
+            buffer->meta()->setInt32("csd", true);
+            mCSD.push(buffer);
+
+            buffer = new ABuffer(1024);
+            buffer->setRange(0, 0);
+
+            CHECK(size >= 1);
+            size_t numPictureParameterSets = *ptr;
+            ++ptr;
+            --size;
+
+            for (size_t i = 0; i < numPictureParameterSets; ++i) {
+                CHECK(size >= 2);
+                size_t length = U16_AT(ptr);
+
+                ptr += 2;
+                size -= 2;
+
+                CHECK(size >= length);
+
+                memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+                memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+                buffer->setRange(0, buffer->size() + 4 + length);
+
+                ptr += length;
+                size -= length;
+            }
+
+            buffer->meta()->setInt32("csd", true);
+            mCSD.push(buffer);
+
+            msg->setObject("csd", buffer);
+        } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+            ESDS esds((const char *)data, size);
+            CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+            const void *codec_specific_data;
+            size_t codec_specific_data_size;
+            esds.getCodecSpecificInfo(
+                    &codec_specific_data, &codec_specific_data_size);
+
+            sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+            memcpy(buffer->data(), codec_specific_data,
+                   codec_specific_data_size);
+
+            buffer->meta()->setInt32("csd", true);
+            mCSD.push(buffer);
+        }
+
+        int32_t maxInputSize;
+        if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+            msg->setInt32("max-input-size", maxInputSize);
+        }
+
+        return msg;
+    }
+
+    void onFillThisBuffer(const sp<AMessage> &msg) {
+        sp<AMessage> reply;
+        CHECK(msg->findMessage("reply", &reply));
+
+        if (mSeekState == SEEK_FLUSHING) {
+            reply->post();
+            return;
+        }
+
+        sp<RefBase> obj;
+        CHECK(msg->findObject("buffer", &obj));
+        sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get());
+
+        if (mCSDIndex < mCSD.size()) {
+            outBuffer = mCSD.editItemAt(mCSDIndex++);
+            outBuffer->meta()->setInt64("timeUs", 0);
+        } else {
+            size_t sizeLeft = outBuffer->capacity();
+            outBuffer->setRange(0, 0);
+
+            int32_t n = 0;
+
+            for (;;) {
+                MediaBuffer *inBuffer;
+
+                if (mLeftOverBuffer != NULL) {
+                    inBuffer = mLeftOverBuffer;
+                    mLeftOverBuffer = NULL;
+                } else if (mFinalResult != OK) {
+                    break;
+                } else {
+                    MediaSource::ReadOptions options;
+                    if (mSeekState == SEEK_FLUSH_COMPLETED) {
+                        options.setSeekTo(mSeekTimeUs);
+                        mSeekState = SEEK_NONE;
+                    }
+                    status_t err = mSource->read(&inBuffer, &options);
+
+                    if (err != OK) {
+                        mFinalResult = err;
+                        break;
+                    }
+                }
+
+                if (inBuffer->range_length() > sizeLeft) {
+                    if (outBuffer->size() == 0) {
+                        LOGE("Unable to fit even a single input buffer of size %d.",
+                             inBuffer->range_length());
+                    }
+                    CHECK_GT(outBuffer->size(), 0u);
+
+                    mLeftOverBuffer = inBuffer;
+                    break;
+                }
+
+                ++n;
+
+                if (outBuffer->size() == 0) {
+                    int64_t timeUs;
+                    CHECK(inBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+                    outBuffer->meta()->setInt64("timeUs", timeUs);
+                }
+
+                memcpy(outBuffer->data() + outBuffer->size(),
+                       (const uint8_t *)inBuffer->data()
+                        + inBuffer->range_offset(),
+                       inBuffer->range_length());
+
+                outBuffer->setRange(
+                        0, outBuffer->size() + inBuffer->range_length());
+
+                sizeLeft -= inBuffer->range_length();
+
+                inBuffer->release();
+                inBuffer = NULL;
+
+                // break;  // Don't coalesce
+            }
+
+            LOGV("coalesced %d input buffers", n);
+
+            if (outBuffer->size() == 0) {
+                CHECK_NE(mFinalResult, (status_t)OK);
+
+                reply->setInt32("err", mFinalResult);
+                reply->post();
+                return;
+            }
+        }
+
+        reply->setObject("buffer", outBuffer);
+        reply->post();
+    }
+
+    void onDrainThisBuffer(const sp<AMessage> &msg) {
+        sp<RefBase> obj;
+        CHECK(msg->findObject("buffer", &obj));
+
+        sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+        mTotalBytesReceived += buffer->size();
+
+        sp<AMessage> reply;
+        CHECK(msg->findMessage("reply", &reply));
+
+        reply->post();
+    }
+
+    DISALLOW_EVIL_CONSTRUCTORS(Controller);
+};
+
+static void usage(const char *me) {
+    fprintf(stderr, "usage: %s\n", me);
+    fprintf(stderr, "       -h(elp)\n");
+    fprintf(stderr, "       -a(udio)\n");
+
+    fprintf(stderr,
+            "       -s(surface) Allocate output buffers on a surface.\n");
+}
+
+int main(int argc, char **argv) {
+    android::ProcessState::self()->startThreadPool();
+
+    bool decodeAudio = false;
+    bool useSurface = false;
+
+    int res;
+    while ((res = getopt(argc, argv, "has")) >= 0) {
+        switch (res) {
+            case 'a':
+                decodeAudio = true;
+                break;
+
+            case 's':
+                useSurface = true;
+                break;
+
+            case '?':
+            case 'h':
+            default:
+            {
+                usage(argv[0]);
+                return 1;
+            }
+        }
+    }
+
+    argc -= optind;
+    argv += optind;
+
+    if (argc != 1) {
+        usage(argv[-optind]);
+        return 1;
+    }
+
+    DataSource::RegisterDefaultSniffers();
+
+    sp<ALooper> looper = new ALooper;
+    looper->setName("sf2");
+
+    sp<SurfaceComposerClient> composerClient;
+    sp<SurfaceControl> control;
+    sp<Surface> surface;
+
+    if (!decodeAudio && useSurface) {
+        composerClient = new SurfaceComposerClient;
+        CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+        control = composerClient->createSurface(
+                getpid(),
+                String8("A Surface"),
+                0,
+                1280,
+                800,
+                PIXEL_FORMAT_RGB_565,
+                0);
+
+        CHECK(control != NULL);
+        CHECK(control->isValid());
+
+        CHECK_EQ(composerClient->openTransaction(), (status_t)OK);
+        CHECK_EQ(control->setLayer(30000), (status_t)OK);
+        CHECK_EQ(control->show(), (status_t)OK);
+        CHECK_EQ(composerClient->closeTransaction(), (status_t)OK);
+
+        surface = control->getSurface();
+        CHECK(surface != NULL);
+    }
+
+    sp<Controller> controller = new Controller(argv[0], decodeAudio, surface);
+    looper->registerHandler(controller);
+
+    controller->startAsync();
+
+    CHECK_EQ(looper->start(true /* runOnCallingThread */), (status_t)OK);
+
+    looper->unregisterHandler(controller->id());
+
+    if (!decodeAudio && useSurface) {
+        composerClient->dispose();
+    }
+
+    return 0;
+}
+
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index f55b746..7ba5291 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -31,7 +31,7 @@
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include "include/ARTSPController.h"
-#include "include/LiveSource.h"
+#include "include/LiveSession.h"
 #include "include/NuCachedSource2.h"
 #include <media/stagefright/AudioPlayer.h>
 #include <media/stagefright/DataSource.h>
@@ -49,6 +49,10 @@
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
 
+#include <private/media/VideoFrame.h>
+#include <SkBitmap.h>
+#include <SkImageEncoder.h>
+
 #include <fcntl.h>
 
 using namespace android;
@@ -59,6 +63,7 @@
 static bool gPreferSoftwareCodec;
 static bool gPlaybackAudio;
 static bool gWriteMP4;
+static bool gDisplayHistogram;
 static String8 gWriteMP4Filename;
 
 static int64_t getNowUs() {
@@ -68,6 +73,58 @@
     return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
 }
 
+static int CompareIncreasing(const int64_t *a, const int64_t *b) {
+    return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
+}
+
+static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
+    printf("decode times:\n");
+
+    decodeTimesUs->sort(CompareIncreasing);
+
+    size_t n = decodeTimesUs->size();
+    int64_t minUs = decodeTimesUs->itemAt(0);
+    int64_t maxUs = decodeTimesUs->itemAt(n - 1);
+
+    printf("min decode time %lld us (%.2f secs)\n", minUs, minUs / 1E6);
+    printf("max decode time %lld us (%.2f secs)\n", maxUs, maxUs / 1E6);
+
+    size_t counts[100];
+    for (size_t i = 0; i < 100; ++i) {
+        counts[i] = 0;
+    }
+
+    for (size_t i = 0; i < n; ++i) {
+        int64_t x = decodeTimesUs->itemAt(i);
+
+        size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
+        if (slot == 100) { slot = 99; }
+
+        ++counts[slot];
+    }
+
+    for (size_t i = 0; i < 100; ++i) {
+        int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
+
+        double fps = 1E6 / slotUs;
+        printf("[%.2f fps]: %d\n", fps, counts[i]);
+    }
+}
+
+static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+        const uint8_t *ptr = (const uint8_t *)data;
+        CHECK(size >= 7);
+        CHECK(ptr[0] == 1);  // configurationVersion == 1
+        uint8_t profile = ptr[1];
+        uint8_t level = ptr[3];
+        fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
+    }
+}
+
 static void playSource(OMXClient *client, sp<MediaSource> &source) {
     sp<MetaData> meta = source->getFormat();
 
@@ -87,6 +144,7 @@
             fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime);
             return;
         }
+        displayAVCProfileLevelIfPossible(meta);
     }
 
     source.clear();
@@ -201,6 +259,8 @@
     int64_t sumDecodeUs = 0;
     int64_t totalBytes = 0;
 
+    Vector<int64_t> decodeTimesUs;
+
     while (numIterationsLeft-- > 0) {
         long numFrames = 0;
 
@@ -224,9 +284,17 @@
                 break;
             }
 
-            if (buffer->range_length() > 0 && (n++ % 16) == 0) {
-                printf(".");
-                fflush(stdout);
+            if (buffer->range_length() > 0) {
+                if (gDisplayHistogram && n > 0) {
+                    // Ignore the first time since it includes some setup
+                    // cost.
+                    decodeTimesUs.push(delayDecodeUs);
+                }
+
+                if ((n++ % 16) == 0) {
+                    printf(".");
+                    fflush(stdout);
+                }
             }
 
             sumDecodeUs += delayDecodeUs;
@@ -266,6 +334,10 @@
                (double)sumDecodeUs / n);
 
         printf("decoded a total of %d frame(s).\n", n);
+
+        if (gDisplayHistogram) {
+            displayDecodeHistogram(&decodeTimesUs);
+        }
     } else if (!strncasecmp("audio/", mime, 6)) {
         // Frame count makes less sense for audio, as the output buffer
         // sizes may be different across decoders.
@@ -466,6 +538,8 @@
     fprintf(stderr, "       -o playback audio\n");
     fprintf(stderr, "       -w(rite) filename (write to .mp4 file)\n");
     fprintf(stderr, "       -k seek test\n");
+    fprintf(stderr, "       -x display a histogram of decoding times/fps "
+                    "(video only)\n");
 }
 
 int main(int argc, char **argv) {
@@ -482,12 +556,14 @@
     gPreferSoftwareCodec = false;
     gPlaybackAudio = false;
     gWriteMP4 = false;
+    gDisplayHistogram = false;
 
     sp<ALooper> looper;
     sp<ARTSPController> rtspController;
+    sp<LiveSession> liveSession;
 
     int res;
-    while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) {
+    while ((res = getopt(argc, argv, "han:lm:b:ptsow:kx")) >= 0) {
         switch (res) {
             case 'a':
             {
@@ -560,6 +636,12 @@
                 break;
             }
 
+            case 'x':
+            {
+                gDisplayHistogram = true;
+                break;
+            }
+
             case '?':
             case 'h':
             default:
@@ -600,17 +682,32 @@
                         METADATA_MODE_FRAME_CAPTURE_AND_METADATA_RETRIEVAL),
                      (status_t)OK);
 
-            sp<IMemory> mem = retriever->captureFrame();
+            sp<IMemory> mem =
+                    retriever->getFrameAtTime(-1,
+                                    MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
 
             if (mem != NULL) {
-                printf("captureFrame(%s) => OK\n", filename);
+                printf("getFrameAtTime(%s) => OK\n", filename);
+
+                VideoFrame *frame = (VideoFrame *)mem->pointer();
+
+                SkBitmap bitmap;
+                bitmap.setConfig(
+                        SkBitmap::kRGB_565_Config, frame->mWidth, frame->mHeight);
+
+                bitmap.setPixels((uint8_t *)frame + sizeof(VideoFrame));
+
+                CHECK(SkImageEncoder::EncodeFile(
+                            "/sdcard/out.jpg", bitmap,
+                            SkImageEncoder::kJPEG_Type,
+                            SkImageEncoder::kDefaultQuality));
             } else {
                 mem = retriever->extractAlbumArt();
 
                 if (mem != NULL) {
                     printf("extractAlbumArt(%s) => OK\n", filename);
                 } else {
-                    printf("both captureFrame and extractAlbumArt "
+                    printf("both getFrameAtTime and extractAlbumArt "
                            "failed on file '%s'.\n", filename);
                 }
             }
@@ -756,8 +853,15 @@
                 String8 uri("http://");
                 uri.append(filename + 11);
 
-                dataSource = new LiveSource(uri.string());
-                dataSource = new NuCachedSource2(dataSource);
+                if (looper == NULL) {
+                    looper = new ALooper;
+                    looper->start();
+                }
+                liveSession = new LiveSession;
+                looper->registerHandler(liveSession);
+
+                liveSession->connect(uri.string());
+                dataSource = liveSession->getDataSource();
 
                 extractor =
                     MediaExtractor::Create(
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
new file mode 100644
index 0000000..9246a00
--- /dev/null
+++ b/cmds/stagefright/stream.cpp
@@ -0,0 +1,190 @@
+#include <binder/ProcessState.h>
+
+#include <media/IStreamSource.h>
+#include <media/mediaplayer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
+#include <fcntl.h>
+
+using namespace android;
+
+struct MyStreamSource : public BnStreamSource {
+    // Caller retains ownership of fd.
+    MyStreamSource(int fd);
+
+    virtual void setListener(const sp<IStreamListener> &listener);
+    virtual void setBuffers(const Vector<sp<IMemory> > &buffers);
+
+    virtual void onBufferAvailable(size_t index);
+
+protected:
+    virtual ~MyStreamSource();
+
+private:
+    int mFd;
+    off64_t mFileSize;
+    int64_t mNextSeekTimeUs;
+
+    sp<IStreamListener> mListener;
+    Vector<sp<IMemory> > mBuffers;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MyStreamSource);
+};
+
+MyStreamSource::MyStreamSource(int fd)
+    : mFd(fd),
+      mFileSize(0),
+      mNextSeekTimeUs(-1) {  // ALooper::GetNowUs() + 5000000ll) {
+    CHECK_GE(fd, 0);
+
+    mFileSize = lseek64(fd, 0, SEEK_END);
+    lseek64(fd, 0, SEEK_SET);
+}
+
+MyStreamSource::~MyStreamSource() {
+}
+
+void MyStreamSource::setListener(const sp<IStreamListener> &listener) {
+    mListener = listener;
+}
+
+void MyStreamSource::setBuffers(const Vector<sp<IMemory> > &buffers) {
+    mBuffers = buffers;
+}
+
+void MyStreamSource::onBufferAvailable(size_t index) {
+    CHECK_LT(index, mBuffers.size());
+
+    if (mNextSeekTimeUs >= 0 && mNextSeekTimeUs <= ALooper::GetNowUs()) {
+        off64_t offset = (off64_t)(((float)rand() / RAND_MAX) * mFileSize * 0.8);
+        offset = (offset / 188) * 188;
+
+        lseek(mFd, offset, SEEK_SET);
+
+        mListener->issueCommand(
+                IStreamListener::DISCONTINUITY, false /* synchronous */);
+
+        mNextSeekTimeUs = -1;
+        mNextSeekTimeUs = ALooper::GetNowUs() + 5000000ll;
+    }
+
+    sp<IMemory> mem = mBuffers.itemAt(index);
+
+    ssize_t n = read(mFd, mem->pointer(), mem->size());
+    if (n <= 0) {
+        mListener->issueCommand(IStreamListener::EOS, false /* synchronous */);
+    } else {
+        mListener->queueBuffer(index, n);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct MyClient : public BnMediaPlayerClient {
+    MyClient()
+        : mEOS(false) {
+    }
+
+    virtual void notify(int msg, int ext1, int ext2) {
+        Mutex::Autolock autoLock(mLock);
+
+        if (msg == MEDIA_ERROR || msg == MEDIA_PLAYBACK_COMPLETE) {
+            mEOS = true;
+            mCondition.signal();
+        }
+    }
+
+    void waitForEOS() {
+        Mutex::Autolock autoLock(mLock);
+        while (!mEOS) {
+            mCondition.wait(mLock);
+        }
+    }
+
+protected:
+    virtual ~MyClient() {
+    }
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    bool mEOS;
+
+    DISALLOW_EVIL_CONSTRUCTORS(MyClient);
+};
+
+int main(int argc, char **argv) {
+    android::ProcessState::self()->startThreadPool();
+
+    if (argc != 2) {
+        fprintf(stderr, "Usage: %s filename\n", argv[0]);
+        return 1;
+    }
+
+    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+    CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+    sp<SurfaceControl> control =
+        composerClient->createSurface(
+                getpid(),
+                String8("A Surface"),
+                0,
+                1280,
+                800,
+                PIXEL_FORMAT_RGB_565,
+                0);
+
+    CHECK(control != NULL);
+    CHECK(control->isValid());
+
+    CHECK_EQ(composerClient->openTransaction(), (status_t)OK);
+    CHECK_EQ(control->setLayer(30000), (status_t)OK);
+    CHECK_EQ(control->show(), (status_t)OK);
+    CHECK_EQ(composerClient->closeTransaction(), (status_t)OK);
+
+    sp<Surface> surface = control->getSurface();
+    CHECK(surface != NULL);
+
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+    CHECK(service.get() != NULL);
+
+    int fd = open(argv[1], O_RDONLY);
+
+    if (fd < 0) {
+        fprintf(stderr, "Failed to open file '%s'.", argv[1]);
+        return 1;
+    }
+
+    sp<MyClient> client = new MyClient;
+
+    sp<IMediaPlayer> player =
+        service->create(getpid(), client, new MyStreamSource(fd), 0);
+
+    if (player != NULL) {
+        player->setVideoSurface(surface);
+        player->start();
+
+        client->waitForEOS();
+
+        player->stop();
+    } else {
+        fprintf(stderr, "failed to instantiate player.\n");
+    }
+
+    close(fd);
+    fd = -1;
+
+    composerClient->dispose();
+
+    return 0;
+}
diff --git a/drm/common/DrmEngineBase.cpp b/drm/common/DrmEngineBase.cpp
index ac360eb..9b16c36 100644
--- a/drm/common/DrmEngineBase.cpp
+++ b/drm/common/DrmEngineBase.cpp
@@ -84,7 +84,7 @@
 }
 
 status_t DrmEngineBase::setPlaybackStatus(
-    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     return onSetPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
 }
 
@@ -120,7 +120,7 @@
 }
 
 status_t DrmEngineBase::openDecryptSession(
-    int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) {
+    int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) {
     return onOpenDecryptSession(uniqueId, decryptHandle, fd, offset, length);
 }
 
@@ -150,7 +150,7 @@
 }
 
 ssize_t DrmEngineBase::pread(
-    int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) {
+    int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) {
     return onPread(uniqueId, decryptHandle, buffer, numBytes, offset);
 }
 
diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp
index 723b50e..75edac6 100644
--- a/drm/common/IDrmManagerService.cpp
+++ b/drm/common/IDrmManagerService.cpp
@@ -363,7 +363,7 @@
 }
 
 status_t BpDrmManagerService::setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     LOGV("setPlaybackStatus");
     Parcel data, reply;
 
@@ -459,7 +459,7 @@
     if (0 != reply.dataAvail()) {
         //Filling DRM Converted Status
         const int statusCode = reply.readInt32();
-        const int offset = reply.readInt32();
+        const off64_t offset = reply.readInt64();
 
         DrmBuffer* convertedData = NULL;
         if (0 != reply.dataAvail()) {
@@ -491,7 +491,7 @@
     if (0 != reply.dataAvail()) {
         //Filling DRM Converted Status
         const int statusCode = reply.readInt32();
-        const int offset = reply.readInt32();
+        const off64_t offset = reply.readInt64();
 
         DrmBuffer* convertedData = NULL;
         if (0 != reply.dataAvail()) {
@@ -545,15 +545,15 @@
 }
 
 DecryptHandle* BpDrmManagerService::openDecryptSession(
-            int uniqueId, int fd, int offset, int length) {
+            int uniqueId, int fd, off64_t offset, off64_t length) {
     LOGV("Entering BpDrmManagerService::openDecryptSession");
     Parcel data, reply;
 
     data.writeInterfaceToken(IDrmManagerService::getInterfaceDescriptor());
     data.writeInt32(uniqueId);
     data.writeFileDescriptor(fd);
-    data.writeInt32(offset);
-    data.writeInt32(length);
+    data.writeInt64(offset);
+    data.writeInt64(length);
 
     remote()->transact(OPEN_DECRYPT_SESSION, data, &reply);
 
@@ -569,8 +569,6 @@
             handle->decryptInfo = new DecryptInfo();
             handle->decryptInfo->decryptBufferLength = reply.readInt32();
         }
-    } else {
-        LOGE("no decryptHandle is generated in service side");
     }
     return handle;
 }
@@ -729,7 +727,7 @@
 
 ssize_t BpDrmManagerService::pread(
             int uniqueId, DecryptHandle* decryptHandle, void* buffer,
-            ssize_t numBytes, off_t offset) {
+            ssize_t numBytes, off64_t offset) {
     LOGV("read");
     Parcel data, reply;
     int result;
@@ -749,7 +747,7 @@
     }
 
     data.writeInt32(numBytes);
-    data.writeInt32(offset);
+    data.writeInt64(offset);
 
     remote()->transact(PREAD, data, &reply);
     result = reply.readInt32();
@@ -1185,7 +1183,7 @@
         if (NULL != drmConvertedStatus) {
             //Filling Drm Converted Ststus
             reply->writeInt32(drmConvertedStatus->statusCode);
-            reply->writeInt32(drmConvertedStatus->offset);
+            reply->writeInt64(drmConvertedStatus->offset);
 
             if (NULL != drmConvertedStatus->convertedData) {
                 const DrmBuffer* convertedData = drmConvertedStatus->convertedData;
@@ -1214,7 +1212,7 @@
         if (NULL != drmConvertedStatus) {
             //Filling Drm Converted Ststus
             reply->writeInt32(drmConvertedStatus->statusCode);
-            reply->writeInt32(drmConvertedStatus->offset);
+            reply->writeInt64(drmConvertedStatus->offset);
 
             if (NULL != drmConvertedStatus->convertedData) {
                 const DrmBuffer* convertedData = drmConvertedStatus->convertedData;
@@ -1274,7 +1272,7 @@
         const int fd = data.readFileDescriptor();
 
         DecryptHandle* handle
-            = openDecryptSession(uniqueId, fd, data.readInt32(), data.readInt32());
+            = openDecryptSession(uniqueId, fd, data.readInt64(), data.readInt64());
 
         if (NULL != handle) {
             reply->writeInt32(handle->decryptId);
@@ -1285,8 +1283,6 @@
                 reply->writeInt32(handle->decryptInfo->decryptBufferLength);
                 delete handle->decryptInfo; handle->decryptInfo = NULL;
             }
-        } else {
-            LOGE("NULL decryptHandle is returned");
         }
         delete handle; handle = NULL;
         return DRM_NO_ERROR;
@@ -1481,7 +1477,7 @@
         const int numBytes = data.readInt32();
         char* buffer = new char[numBytes];
 
-        const off_t offset = data.readInt32();
+        const off64_t offset = data.readInt64();
 
         ssize_t result = pread(uniqueId, &handle, buffer, numBytes, offset);
         reply->writeInt32(result);
diff --git a/drm/common/ReadWriteUtils.cpp b/drm/common/ReadWriteUtils.cpp
index 7ec4fa2..c16214e 100644
--- a/drm/common/ReadWriteUtils.cpp
+++ b/drm/common/ReadWriteUtils.cpp
@@ -42,7 +42,7 @@
         struct stat sb;
 
         if (fstat(fd, &sb) == 0 && sb.st_size > 0) {
-            int length = sb.st_size;
+            off64_t length = sb.st_size;
             char* bytes = new char[length];
             if (length == read(fd, (void*) bytes, length)) {
                 string.append(bytes, length);
@@ -57,7 +57,7 @@
 int ReadWriteUtils::readBytes(const String8& filePath, char** buffer) {
     FILE* file = NULL;
     file = fopen(filePath.string(), "r");
-    int length = 0;
+    off64_t length = 0;
 
     if (NULL != file) {
         int fd = fileno(file);
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 49df1c8..b6e0c30 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -272,7 +272,7 @@
 }
 
 status_t DrmManager::setPlaybackStatus(
-    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+    int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     status_t result = DRM_ERROR_UNKNOWN;
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
         IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId);
@@ -384,7 +384,7 @@
     return DRM_NO_ERROR;
 }
 
-DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, int offset, int length) {
+DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) {
     Mutex::Autolock _l(mDecryptLock);
     status_t result = DRM_ERROR_CANNOT_HANDLE;
     Vector<String8> plugInIdList = mPlugInManager.getPlugInIdList();
@@ -407,7 +407,6 @@
     }
     if (DRM_NO_ERROR != result) {
         delete handle; handle = NULL;
-        LOGE("DrmManager::openDecryptSession: no capable plug-in found");
     }
     return handle;
 }
@@ -485,7 +484,7 @@
 }
 
 ssize_t DrmManager::pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     ssize_t result = DECRYPT_FILE_ERROR;
 
     if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) {
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index 4dcfa72..0901a44 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -162,7 +162,7 @@
 }
 
 status_t DrmManagerService::setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     LOGV("Entering setPlaybackStatus");
     return mDrmManager->setPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position);
 }
@@ -207,7 +207,7 @@
 }
 
 DecryptHandle* DrmManagerService::openDecryptSession(
-            int uniqueId, int fd, int offset, int length) {
+            int uniqueId, int fd, off64_t offset, off64_t length) {
     LOGV("Entering DrmManagerService::openDecryptSession");
     if (isProtectedCallAllowed()) {
         return mDrmManager->openDecryptSession(uniqueId, fd, offset, length);
@@ -251,7 +251,7 @@
 }
 
 ssize_t DrmManagerService::pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     LOGV("Entering pread");
     return mDrmManager->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
 }
diff --git a/drm/libdrmframework/DrmManagerClient.cpp b/drm/libdrmframework/DrmManagerClient.cpp
index 8bb00c3..7b51822 100644
--- a/drm/libdrmframework/DrmManagerClient.cpp
+++ b/drm/libdrmframework/DrmManagerClient.cpp
@@ -83,7 +83,7 @@
 }
 
 status_t DrmManagerClient::setPlaybackStatus(
-            DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     return mDrmManagerClientImpl
             ->setPlaybackStatus(mUniqueId, decryptHandle, playbackStatus, position);
 }
@@ -117,7 +117,7 @@
     return mDrmManagerClientImpl->getAllSupportInfo(mUniqueId, length, drmSupportInfoArray);
 }
 
-DecryptHandle* DrmManagerClient::openDecryptSession(int fd, int offset, int length) {
+DecryptHandle* DrmManagerClient::openDecryptSession(int fd, off64_t offset, off64_t length) {
     return mDrmManagerClientImpl->openDecryptSession(mUniqueId, fd, offset, length);
 }
 
@@ -150,7 +150,7 @@
 }
 
 ssize_t DrmManagerClient::pread(
-            DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) {
+            DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) {
     Mutex::Autolock _l(mDecryptLock);
     return mDrmManagerClientImpl->pread(mUniqueId, decryptHandle, buffer, numBytes, offset);
 }
diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp
index eea312b..d20de92 100644
--- a/drm/libdrmframework/DrmManagerClientImpl.cpp
+++ b/drm/libdrmframework/DrmManagerClientImpl.cpp
@@ -179,7 +179,7 @@
 }
 
 status_t DrmManagerClientImpl::setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) {
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) {
     status_t status = DRM_ERROR_UNKNOWN;
     if (NULL != decryptHandle) {
         status = getDrmManagerService()->setPlaybackStatus(
@@ -240,7 +240,7 @@
 }
 
 DecryptHandle* DrmManagerClientImpl::openDecryptSession(
-            int uniqueId, int fd, int offset, int length) {
+            int uniqueId, int fd, off64_t offset, off64_t length) {
     return getDrmManagerService()->openDecryptSession(uniqueId, fd, offset, length);
 }
 
@@ -292,7 +292,7 @@
 }
 
 ssize_t DrmManagerClientImpl::pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     ssize_t retCode = INVALID_VALUE;
     if ((NULL != decryptHandle) && (NULL != buffer) && (0 < numBytes)) {
         retCode = getDrmManagerService()->pread(uniqueId, decryptHandle, buffer, numBytes, offset);
diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h
index bc462c2..e05366d 100644
--- a/drm/libdrmframework/include/DrmManager.h
+++ b/drm/libdrmframework/include/DrmManager.h
@@ -95,7 +95,7 @@
     status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool validateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -112,7 +112,7 @@
 
     status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
 
-    DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
 
@@ -127,7 +127,7 @@
     status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
     void onInfo(const DrmInfoEvent& event);
 
diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h
index ff84fc7..0a7fcd1 100644
--- a/drm/libdrmframework/include/DrmManagerClientImpl.h
+++ b/drm/libdrmframework/include/DrmManagerClientImpl.h
@@ -203,7 +203,7 @@
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -303,7 +303,7 @@
      * @return
      *     Handle for the decryption session
      */
-    DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -381,7 +381,7 @@
      * @return Number of bytes read. Returns -1 for Failure.
      */
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
     /**
      * Notify the event to the registered listener
diff --git a/drm/libdrmframework/include/DrmManagerService.h b/drm/libdrmframework/include/DrmManagerService.h
index f346356..d0a0db7 100644
--- a/drm/libdrmframework/include/DrmManagerService.h
+++ b/drm/libdrmframework/include/DrmManagerService.h
@@ -81,7 +81,7 @@
     status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool validateAction(int uniqueId, const String8& path,
             int action, const ActionDescription& description);
@@ -98,7 +98,7 @@
 
     status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
 
-    DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
 
@@ -113,7 +113,7 @@
     status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
 private:
     DrmManager* mDrmManager;
diff --git a/drm/libdrmframework/include/IDrmManagerService.h b/drm/libdrmframework/include/IDrmManagerService.h
index f1dabd3..2424ea5 100644
--- a/drm/libdrmframework/include/IDrmManagerService.h
+++ b/drm/libdrmframework/include/IDrmManagerService.h
@@ -120,7 +120,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve) = 0;
 
     virtual status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0;
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0;
 
     virtual bool validateAction(
             int uniqueId, const String8& path,
@@ -140,7 +140,7 @@
     virtual status_t getAllSupportInfo(
             int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray) = 0;
 
-    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length) = 0;
+    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) = 0;
 
     virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri) = 0;
 
@@ -156,7 +156,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId) = 0;
 
     virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes,off_t offset) = 0;
+            void* buffer, ssize_t numBytes,off64_t offset) = 0;
 };
 
 /**
@@ -204,7 +204,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     virtual status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     virtual bool validateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -223,7 +223,7 @@
     virtual status_t getAllSupportInfo(
             int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray);
 
-    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length);
+    virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length);
 
     virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri);
 
@@ -239,7 +239,7 @@
             int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 };
 
 /**
diff --git a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
index 67b6355..b61e3d3 100644
--- a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
+++ b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h
@@ -62,7 +62,7 @@
     status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t setPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool validateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -80,7 +80,7 @@
     DrmSupportInfo* getSupportInfo(int uniqueId);
 
     status_t openDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length);
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length);
 
     status_t openDecryptSession(
             int uniqueId, DecryptHandle* decryptHandle, const char* uri);
@@ -96,7 +96,7 @@
     status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
 protected:
     /////////////////////////////////////////////////////
@@ -268,7 +268,7 @@
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
     virtual status_t onSetPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0;
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0;
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -369,7 +369,7 @@
      *     DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success
      */
     virtual status_t onOpenDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0;
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0;
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -450,7 +450,7 @@
      * @return Number of bytes read. Returns -1 for Failure.
      */
     virtual ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) = 0;
+            void* buffer, ssize_t numBytes, off64_t offset) = 0;
 };
 
 };
diff --git a/drm/libdrmframework/plugins/common/include/IDrmEngine.h b/drm/libdrmframework/plugins/common/include/IDrmEngine.h
index f839070..d05c24f 100644
--- a/drm/libdrmframework/plugins/common/include/IDrmEngine.h
+++ b/drm/libdrmframework/plugins/common/include/IDrmEngine.h
@@ -224,7 +224,7 @@
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
     virtual status_t setPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle,
-            int playbackStatus, int position) = 0;
+            int playbackStatus, int64_t position) = 0;
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
@@ -325,7 +325,7 @@
      *     DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success
      */
     virtual status_t openDecryptSession(
-        int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0;
+        int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0;
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -406,7 +406,7 @@
      * @return Number of bytes read. Returns -1 for Failure.
      */
     virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) = 0;
+            void* buffer, ssize_t numBytes, off64_t offset) = 0;
 };
 
 };
diff --git a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
index bbcd9ed..f941f70 100644
--- a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
+++ b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h
@@ -56,7 +56,7 @@
     status_t onConsumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve);
 
     status_t onSetPlaybackStatus(
-            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position);
+            int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     bool onValidateAction(
             int uniqueId, const String8& path, int action, const ActionDescription& description);
@@ -74,7 +74,7 @@
     DrmSupportInfo* onGetSupportInfo(int uniqueId);
 
     status_t onOpenDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length);
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length);
 
     status_t onOpenDecryptSession(
             int uniqueId, DecryptHandle* decryptHandle, const char* uri);
@@ -90,7 +90,7 @@
     status_t onFinalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId);
 
     ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset);
+            void* buffer, ssize_t numBytes, off64_t offset);
 
 private:
     DecryptHandle* openDecryptSessionImpl();
diff --git a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
index dee1fdb..976978f 100644
--- a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
+++ b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp
@@ -187,7 +187,7 @@
 }
 
 status_t DrmPassthruPlugIn::onSetPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle,
-            int playbackStatus, int position) {
+            int playbackStatus, int64_t position) {
     LOGD("DrmPassthruPlugIn::onSetPlaybackStatus() : %d", uniqueId);
     return DRM_NO_ERROR;
 }
@@ -234,7 +234,7 @@
 }
 
 status_t DrmPassthruPlugIn::onOpenDecryptSession(
-            int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) {
+            int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) {
     LOGD("DrmPassthruPlugIn::onOpenDecryptSession() : %d", uniqueId);
 
 #ifdef ENABLE_PASSTHRU_DECRYPTION
@@ -292,7 +292,7 @@
 }
 
 ssize_t DrmPassthruPlugIn::onPread(int uniqueId, DecryptHandle* decryptHandle,
-            void* buffer, ssize_t numBytes, off_t offset) {
+            void* buffer, ssize_t numBytes, off64_t offset) {
     LOGD("DrmPassthruPlugIn::onPread() : %d", uniqueId);
     return 0;
 }
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index e734c38..e5f7e62 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -19,11 +19,10 @@
 
 #include <utils/Timers.h>
 #include <camera/ICameraClient.h>
+#include <gui/ISurfaceTexture.h>
 
 namespace android {
 
-class ISurface;
-
 /*
  * A set of bit masks for specifying how the received preview frames are
  * handled before the previewCallback() call.
@@ -96,11 +95,19 @@
     // or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during
     // preview.
     CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,
+
+    // cmdType to disable/enable shutter sound.
+    // In sendCommand passing arg1 = 0 will disable,
+    // while passing arg1 = 1 will enable the shutter sound.
+    CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,
+
+    // cmdType to play recording sound.
+    CAMERA_CMD_PLAY_RECORDING_SOUND = 5,
 };
 
 // camera fatal errors
 enum {
-    CAMERA_ERROR_UKNOWN  = 1,
+    CAMERA_ERROR_UNKNOWN  = 1,
     CAMERA_ERROR_SERVER_DIED = 100
 };
 
@@ -166,9 +173,11 @@
 
             status_t    getStatus() { return mStatus; }
 
-            // pass the buffered ISurface to the camera service
+            // pass the buffered Surface to the camera service
             status_t    setPreviewDisplay(const sp<Surface>& surface);
-            status_t    setPreviewDisplay(const sp<ISurface>& surface);
+
+            // pass the buffered ISurfaceTexture to the camera service
+            status_t    setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
 
             // start preview mode, must call setPreviewDisplay first
             status_t    startPreview();
@@ -209,6 +218,15 @@
             // send command to camera driver
             status_t    sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
 
+            // return the total number of available video buffers.
+            int32_t     getNumberOfVideoBuffers() const;
+
+            // return the individual video buffer corresponding to the given index.
+            sp<IMemory> getVideoBuffer(int32_t index) const;
+
+            // tell camera hal to store meta data or real YUV in video buffers.
+            status_t    storeMetaDataInBuffers(bool enabled);
+
             void        setListener(const sp<CameraListener>& listener);
             void        setPreviewCallbackFlags(int preview_callback_flag);
 
diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h
index 35c5aa1..86bd849 100644
--- a/include/camera/CameraHardwareInterface.h
+++ b/include/camera/CameraHardwareInterface.h
@@ -18,15 +18,16 @@
 #define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
 
 #include <binder/IMemory.h>
+#include <ui/egl/android_natives.h>
 #include <utils/RefBase.h>
 #include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBuffer.h>
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
 
 namespace android {
 
-class Overlay;
-
 /**
  *  The size of image for display.
  */
@@ -86,8 +87,8 @@
 public:
     virtual ~CameraHardwareInterface() { }
 
-    /** Return the IMemoryHeap for the preview image heap */
-    virtual sp<IMemoryHeap>         getPreviewHeap() const = 0;
+    /** Set the ANativeWindow to which preview frames are sent */
+    virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0;
 
     /** Return the IMemoryHeap for the raw image heap */
     virtual sp<IMemoryHeap>         getRawHeap() const = 0;
@@ -111,6 +112,13 @@
 
     /**
      * Disable a message, or a set of messages.
+     *
+     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal
+     * should not rely on its client to call releaseRecordingFrame() to release
+     * video recording frames sent out by the cameral hal before and after the
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not
+     * modify/access any video recording frame after calling
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
      */
     virtual void        disableMsgType(int32_t msgType) = 0;
 
@@ -127,12 +135,6 @@
     virtual status_t    startPreview() = 0;
 
     /**
-     * Only used if overlays are used for camera preview.
-     */
-    virtual bool         useOverlay() {return false;}
-    virtual status_t     setOverlay(const sp<Overlay> &overlay) {return BAD_VALUE;}
-
-    /**
      * Stop a previously started preview.
      */
     virtual void        stopPreview() = 0;
@@ -143,9 +145,89 @@
     virtual bool        previewEnabled() = 0;
 
     /**
+     * Retrieve the total number of available buffers from camera hal for passing
+     * video frame data in a recording session. Must be called again if a new
+     * recording session is started.
+     *
+     * This method should be called after startRecording(), since
+     * the some camera hal may choose to allocate the video buffers only after
+     * recording is started.
+     *
+     * Some camera hal may not implement this method, and 0 can be returned to
+     * indicate that this feature is not available.
+     *
+     * @return the number of video buffers that camera hal makes available.
+     *      Zero (0) is returned to indicate that camera hal does not support
+     *      this feature.
+     */
+    virtual int32_t     getNumberOfVideoBuffers() const { return 0; }
+
+    /**
+     * Retrieve the video buffer corresponding to the given index in a
+     * recording session. Must be called again if a new recording session
+     * is started.
+     *
+     * It allows a client to retrieve all video buffers that camera hal makes
+     * available to passing video frame data by calling this method with all
+     * valid index values. The valid index value ranges from 0 to n, where
+     * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid
+     * range, 0 must be returned. This method should be called after
+     * startRecording().
+     *
+     * The video buffers should NOT be modified/released by camera hal
+     * until stopRecording() is called and all outstanding video buffers
+     * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released
+     * via releaseVideoBuffer().
+     *
+     * @param index an index to retrieve the corresponding video buffer.
+     *
+     * @return the video buffer corresponding to the given index.
+     */
+    virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; }
+
+    /**
+     * Request the camera hal to store meta data or real YUV data in
+     * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
+     * recording session. If it is not called, the default camera
+     * hal behavior is to store real YUV data in the video buffers.
+     *
+     * This method should be called before startRecording() in order
+     * to be effective.
+     *
+     * If meta data is stored in the video buffers, it is up to the
+     * receiver of the video buffers to interpret the contents and
+     * to find the actual frame data with the help of the meta data
+     * in the buffer. How this is done is outside of the scope of
+     * this method.
+     *
+     * Some camera hal may not support storing meta data in the video
+     * buffers, but all camera hal should support storing real YUV data
+     * in the video buffers. If the camera hal does not support storing
+     * the meta data in the video buffers when it is requested to do
+     * do, INVALID_OPERATION must be returned. It is very useful for
+     * the camera hal to pass meta data rather than the actual frame
+     * data directly to the video encoder, since the amount of the
+     * uncompressed frame data can be very large if video size is large.
+     *
+     * @param enable if true to instruct the camera hal to store
+     *      meta data in the video buffers; false to instruct
+     *      the camera hal to store real YUV data in the video
+     *      buffers.
+     *
+     * @return OK on success.
+     */
+    virtual status_t    storeMetaDataInBuffers(bool enable) {
+                            return enable? INVALID_OPERATION: OK;
+                        }
+
+    /**
      * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
      * message is sent with the corresponding frame. Every record frame must be released
-     * by calling releaseRecordingFrame().
+     * by a cameral hal client via releaseRecordingFrame() before the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility
+     * to manage the life-cycle of the video recording frames, and the client must
+     * not modify/access any video recording frames.
      */
     virtual status_t    startRecording() = 0;
 
@@ -161,6 +243,13 @@
 
     /**
      * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+     *
+     * It is camera hal client's responsibility to release video recording
+     * frames sent out by the camera hal before the camera hal receives
+     * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives
+     * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's
+     * responsibility of managing the life-cycle of the video recording
+     * frames.
      */
     virtual void        releaseRecordingFrame(const sp<IMemory>& mem) = 0;
 
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index 4e770fd..431aaa4 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -59,6 +59,35 @@
     void setPreviewSize(int width, int height);
     void getPreviewSize(int *width, int *height) const;
     void getSupportedPreviewSizes(Vector<Size> &sizes) const;
+
+    // Set the dimensions in pixels to the given width and height
+    // for video frames. The given width and height must be one
+    // of the supported dimensions returned from
+    // getSupportedVideoSizes(). Must not be called if
+    // getSupportedVideoSizes() returns an empty Vector of Size.
+    void setVideoSize(int width, int height);
+    // Retrieve the current dimensions (width and height)
+    // in pixels for video frames, which must be one of the
+    // supported dimensions returned from getSupportedVideoSizes().
+    // Must not be called if getSupportedVideoSizes() returns an
+    // empty Vector of Size.
+    void getVideoSize(int *width, int *height) const;
+    // Retrieve a Vector of supported dimensions (width and height)
+    // in pixels for video frames. If sizes returned from the method
+    // is empty, the camera does not support calls to setVideoSize()
+    // or getVideoSize(). In adddition, it also indicates that
+    // the camera only has a single output, and does not have
+    // separate output for video frames and preview frame.
+    void getSupportedVideoSizes(Vector<Size> &sizes) const;
+    // Retrieve the preferred preview size (width and height) in pixels
+    // for video recording. The given width and height must be one of
+    // supported preview sizes returned from getSupportedPreviewSizes().
+    // Must not be called if getSupportedVideoSizes() returns an empty
+    // Vector of Size. If getSupportedVideoSizes() returns an empty
+    // Vector of Size, the width and height returned from this method
+    // is invalid, and is "-1x-1".
+    void getPreferredPreviewSizeForVideo(int *width, int *height) const;
+
     void setPreviewFrameRate(int fps);
     int getPreviewFrameRate() const;
     void getPreviewFpsRange(int *min_fps, int *max_fps) const;
@@ -288,6 +317,31 @@
     // Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only.
     static const char KEY_FOCUS_DISTANCES[];
 
+    // The current dimensions in pixels (width x height) for video frames.
+    // The width and height must be one of the supported sizes retrieved
+    // via KEY_SUPPORTED_VIDEO_SIZES.
+    // Example value: "1280x720". Read/write.
+    static const char KEY_VIDEO_SIZE[];
+    // A list of the supported dimensions in pixels (width x height)
+    // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in
+    // frameworks/base/include/camera/Camera.h.
+    // Example: "176x144,1280x720". Read only.
+    static const char KEY_SUPPORTED_VIDEO_SIZES[];
+
+    // Preferred preview frame size in pixels for video recording.
+    // The width and height must be one of the supported sizes retrieved
+    // via KEY_SUPPORTED_PREVIEW_SIZES. This key can be used only when
+    // getSupportedVideoSizes() does not return an empty Vector of Size.
+    // Camcorder applications are recommended to set the preview size
+    // to a value that is not larger than the preferred preview size.
+    // In other words, the product of the width and height of the
+    // preview size should not be larger than that of the preferred
+    // preview size. In addition, we recommend to choos a preview size
+    // that has the same aspect ratio as the resolution of video to be
+    // recorded.
+    // Example value: "800x600". Read only.
+    static const char KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+
     // The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in
     // frameworks/base/include/camera/Camera.h.
     // Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only.
@@ -361,7 +415,10 @@
     // for barcode reading.
     static const char SCENE_MODE_BARCODE[];
 
-    // Formats for setPreviewFormat and setPictureFormat.
+    // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT,
+    // and KEY_VIDEO_FRAME_FORMAT
+    // Planar variant of the YUV420 color format
+    static const char PIXEL_FORMAT_YUV420P[];
     static const char PIXEL_FORMAT_YUV422SP[];
     static const char PIXEL_FORMAT_YUV420SP[]; // NV21
     static const char PIXEL_FORMAT_YUV422I[]; // YUY2
diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h
index 6fcf9e5..b2310a6 100644
--- a/include/camera/ICamera.h
+++ b/include/camera/ICamera.h
@@ -20,10 +20,11 @@
 #include <utils/RefBase.h>
 #include <binder/IInterface.h>
 #include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 #include <binder/IMemory.h>
 #include <utils/String8.h>
 #include <camera/Camera.h>
+#include <gui/ISurfaceTexture.h>
 
 namespace android {
 
@@ -45,8 +46,12 @@
     // allow other processes to use this ICamera interface
     virtual status_t        unlock() = 0;
 
-    // pass the buffered ISurface to the camera service
-    virtual status_t        setPreviewDisplay(const sp<ISurface>& surface) = 0;
+    // pass the buffered Surface to the camera service
+    virtual status_t        setPreviewDisplay(const sp<Surface>& surface) = 0;
+
+    // pass the buffered ISurfaceTexture to the camera service
+    virtual status_t        setPreviewTexture(
+            const sp<ISurfaceTexture>& surfaceTexture) = 0;
 
     // set the preview callback flag to affect how the received frames from
     // preview are handled.
@@ -90,6 +95,15 @@
 
     // send command to camera driver
     virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
+
+    // return the total number of available video buffers
+    virtual int32_t         getNumberOfVideoBuffers() const  = 0;
+
+    // return the individual video buffer corresponding to the given index.
+    virtual sp<IMemory>     getVideoBuffer(int32_t index) const = 0;
+
+    // tell the camera hal to store meta data or real YUV data in video buffers.
+    virtual status_t        storeMetaDataInBuffers(bool enabled) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h
index 004556f..085ebf1 100644
--- a/include/drm/DrmManagerClient.h
+++ b/include/drm/DrmManagerClient.h
@@ -69,7 +69,7 @@
      * @return
      *     Handle for the decryption session
      */
-    DecryptHandle* openDecryptSession(int fd, int offset, int length);
+    DecryptHandle* openDecryptSession(int fd, off64_t offset, off64_t length);
 
     /**
      * Open the decrypt session to decrypt the given protected content
@@ -113,7 +113,7 @@
      * @return status_t
      *     Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure
      */
-    status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int position);
+    status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int64_t position);
 
     /**
      * Initialize decryption for the given unit of the protected content
@@ -167,7 +167,7 @@
      *
      * @return Number of bytes read. Returns -1 for Failure.
      */
-    ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset);
+    ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset);
 
     /**
      * Validates whether an action on the DRM content is allowed or not.
diff --git a/include/drm/drm_framework_common.h b/include/drm/drm_framework_common.h
index c5765a9..1758cdd 100644
--- a/include/drm/drm_framework_common.h
+++ b/include/drm/drm_framework_common.h
@@ -217,6 +217,10 @@
      * POSIX based Decrypt API set for container based DRM
      */
     static const int CONTAINER_BASED = 0x02;
+    /**
+     * Decrypt API for Widevine streams
+     */
+    static const int WV_BASED = 0x3;
 };
 
 /**
diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h
index c967efb..cda2be0 100644
--- a/include/media/AudioEffect.h
+++ b/include/media/AudioEffect.h
@@ -403,7 +403,7 @@
      static status_t guidToString(const effect_uuid_t *guid, char *str, size_t maxLen);
 
 protected:
-     volatile int32_t        mEnabled;           // enable state
+     bool                    mEnabled;           // enable state
      int32_t                 mSessionId;         // audio session ID
      int32_t                 mPriority;          // priority for effect control
      status_t                mStatus;            // effect status
@@ -412,6 +412,7 @@
      void*                   mUserData;          // client context for callback function
      effect_descriptor_t     mDescriptor;        // effect descriptor
      int32_t                 mId;                // system wide unique effect engine instance ID
+     Mutex                   mLock;               // Mutex for mEnabled access
 
 private:
 
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 38e3d44..5f7cd90 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -356,7 +356,7 @@
     sp<IAudioRecord>        mAudioRecord;
     sp<IMemory>             mCblkMemory;
     sp<ClientRecordThread>  mClientRecordThread;
-    Mutex                   mRecordThreadLock;
+    Mutex                   mLock;
 
     uint32_t                mFrameCount;
 
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 9fd905f..1e29d82 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -156,6 +156,7 @@
         MODE_NORMAL = 0,
         MODE_RINGTONE,
         MODE_IN_CALL,
+        MODE_IN_COMMUNICATION,
         NUM_MODES  // not a valid entry, denotes end-of-list
     };
 
@@ -262,11 +263,15 @@
         DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100,
         DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200,
         DEVICE_OUT_AUX_DIGITAL = 0x400,
+        DEVICE_OUT_ANLG_DOCK_HEADSET = 0x800,
+        DEVICE_OUT_DGTL_DOCK_HEADSET = 0x1000,
         DEVICE_OUT_DEFAULT = 0x8000,
         DEVICE_OUT_ALL = (DEVICE_OUT_EARPIECE | DEVICE_OUT_SPEAKER | DEVICE_OUT_WIRED_HEADSET |
                 DEVICE_OUT_WIRED_HEADPHONE | DEVICE_OUT_BLUETOOTH_SCO | DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
                 DEVICE_OUT_BLUETOOTH_SCO_CARKIT | DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
-                DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL | DEVICE_OUT_DEFAULT),
+                DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL |
+                DEVICE_OUT_ANLG_DOCK_HEADSET | DEVICE_OUT_DGTL_DOCK_HEADSET |
+                DEVICE_OUT_DEFAULT),
         DEVICE_OUT_ALL_A2DP = (DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
                 DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
 
@@ -309,6 +314,8 @@
         FORCE_WIRED_ACCESSORY,
         FORCE_BT_CAR_DOCK,
         FORCE_BT_DESK_DOCK,
+        FORCE_ANALOG_DOCK,
+        FORCE_DIGITAL_DOCK,
         NUM_FORCE_CONFIG,
         FORCE_DEFAULT = FORCE_NONE
     };
@@ -466,7 +473,7 @@
     AudioParameter(const String8& keyValuePairs);
     virtual ~AudioParameter();
 
-    // reserved parameter keys for changeing standard parameters with setParameters() function.
+    // reserved parameter keys for changing standard parameters with setParameters() function.
     // Using these keys is mandatory for AudioFlinger to properly monitor audio output/input
     // configuration changes and act accordingly.
     //  keyRouting: to change audio routing, value is an int in AudioSystem::audio_devices
@@ -474,11 +481,14 @@
     //  keyFormat: to change audio format, value is an int in AudioSystem::audio_format
     //  keyChannels: to change audio channel configuration, value is an int in AudioSystem::audio_channels
     //  keyFrameCount: to change audio output frame count, value is an int
+    //  keyInputSource: to change audio input source, value is an int in audio_source
+    //     (defined in media/mediarecorder.h)
     static const char *keyRouting;
     static const char *keySamplingRate;
     static const char *keyFormat;
     static const char *keyChannels;
     static const char *keyFrameCount;
+    static const char *keyInputSource;
 
     String8 toString();
 
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 4475d4a..813a905 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -480,6 +480,7 @@
     uint32_t                mFlags;
     int                     mSessionId;
     int                     mAuxEffectId;
+    Mutex                   mLock;
 };
 
 
diff --git a/include/media/EffectApi.h b/include/media/EffectApi.h
index 16fb43c..b97c22e 100644
--- a/include/media/EffectApi.h
+++ b/include/media/EffectApi.h
@@ -602,9 +602,9 @@
 
 // Audio mode
 enum audio_mode_e {
-    AUDIO_MODE_NORMAL,      // phone idle
-    AUDIO_MODE_RINGTONE,    // phone ringing
-    AUDIO_MODE_IN_CALL      // phone call connected
+    AUDIO_MODE_NORMAL,      // device idle
+    AUDIO_MODE_RINGTONE,    // device ringing
+    AUDIO_MODE_IN_CALL      // audio call connected (VoIP or telephony)
 };
 
 // Values for "accessMode" field of buffer_config_t:
diff --git a/include/media/IMediaMetadataRetriever.h b/include/media/IMediaMetadataRetriever.h
index 9baba8e..e517cf0 100644
--- a/include/media/IMediaMetadataRetriever.h
+++ b/include/media/IMediaMetadataRetriever.h
@@ -33,8 +33,7 @@
     virtual status_t        setDataSource(const char* srcUrl) = 0;
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setMode(int mode) = 0;
-    virtual status_t        getMode(int* mode) const = 0;
-    virtual sp<IMemory>     captureFrame() = 0;
+    virtual sp<IMemory>     getFrameAtTime(int64_t timeUs, int option) = 0;
     virtual sp<IMemory>     extractAlbumArt() = 0;
     virtual const char*     extractMetadata(int keyCode) = 0;
 };
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index af9a7ed..bba7ed7 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -25,6 +25,7 @@
 
 class Parcel;
 class ISurface;
+class Surface;
 
 class IMediaPlayer: public IInterface
 {
@@ -33,7 +34,7 @@
 
     virtual void            disconnect() = 0;
 
-    virtual status_t        setVideoSurface(const sp<ISurface>& surface) = 0;
+    virtual status_t        setVideoSurface(const sp<Surface>& surface) = 0;
     virtual status_t        prepareAsync() = 0;
     virtual status_t        start() = 0;
     virtual status_t        stop() = 0;
@@ -46,8 +47,6 @@
     virtual status_t        setAudioStreamType(int type) = 0;
     virtual status_t        setLooping(int loop) = 0;
     virtual status_t        setVolume(float leftVolume, float rightVolume) = 0;
-    virtual status_t        suspend() = 0;
-    virtual status_t        resume() = 0;
     virtual status_t        setAuxEffectSendLevel(float level) = 0;
     virtual status_t        attachAuxEffect(int effectId) = 0;
 
diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h
index 9416ca1..0bfb166 100644
--- a/include/media/IMediaPlayerService.h
+++ b/include/media/IMediaPlayerService.h
@@ -32,6 +32,7 @@
 
 class IMediaRecorder;
 class IOMX;
+struct IStreamSource;
 
 class IMediaPlayerService: public IInterface
 {
@@ -45,6 +46,11 @@
             int audioSessionId = 0) = 0;
     virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client,
             int fd, int64_t offset, int64_t length, int audioSessionId) = 0;
+
+    virtual sp<IMediaPlayer> create(
+            pid_t pid, const sp<IMediaPlayerClient> &client,
+            const sp<IStreamSource> &source, int audioSessionId) = 0;
+
     virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
     virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0;
     virtual sp<IOMX>            getOMX() = 0;
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 54adca8..28be7c1 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -22,7 +22,7 @@
 
 namespace android {
 
-class ISurface;
+class Surface;
 class ICamera;
 class IMediaRecorderClient;
 
@@ -32,7 +32,7 @@
     DECLARE_META_INTERFACE(MediaRecorder);
 
     virtual	status_t		setCamera(const sp<ICamera>& camera) = 0;
-    virtual	status_t		setPreviewSurface(const sp<ISurface>& surface) = 0;
+    virtual	status_t		setPreviewSurface(const sp<Surface>& surface) = 0;
     virtual	status_t		setVideoSource(int vs) = 0;
     virtual	status_t		setAudioSource(int as) = 0;
     virtual	status_t		setOutputFormat(int of) = 0;
@@ -40,6 +40,7 @@
     virtual	status_t		setAudioEncoder(int ae) = 0;
     virtual	status_t		setOutputFile(const char* path) = 0;
     virtual	status_t		setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+    virtual	status_t		setOutputFileAuxiliary(int fd) = 0;
     virtual	status_t		setVideoSize(int width, int height) = 0;
     virtual	status_t		setVideoFrameRate(int frames_per_second) = 0;
     virtual     status_t                setParameters(const String8& params) = 0;
@@ -68,4 +69,3 @@
 }; // namespace android
 
 #endif // ANDROID_IMEDIARECORDER_H
-
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index f794766..cb36bbb 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -19,6 +19,7 @@
 #define ANDROID_IOMX_H_
 
 #include <binder/IInterface.h>
+#include <ui/GraphicBuffer.h>
 #include <utils/List.h>
 #include <utils/String8.h>
 
@@ -78,10 +79,20 @@
             node_id node, OMX_INDEXTYPE index,
             const void *params, size_t size) = 0;
 
+    virtual status_t storeMetaDataInBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
+    virtual status_t enableGraphicBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer) = 0;
 
+    virtual status_t useGraphicBuffer(
+            node_id node, OMX_U32 port_index,
+            const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
+
     // This API clearly only makes sense if the caller lives in the
     // same process as the callee, i.e. is the media_server, as the
     // returned "buffer_data" pointer is just that, a pointer into local
@@ -109,33 +120,6 @@
             node_id node,
             const char *parameter_name,
             OMX_INDEXTYPE *index) = 0;
-
-    virtual sp<IOMXRenderer> createRenderer(
-            const sp<ISurface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees) = 0;
-
-    // Note: These methods are _not_ virtual, it exists as a wrapper around
-    // the virtual "createRenderer" method above facilitating extraction
-    // of the ISurface from a regular Surface or a java Surface object.
-    sp<IOMXRenderer> createRenderer(
-            const sp<Surface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees);
-
-    sp<IOMXRenderer> createRendererFromJavaSurface(
-            JNIEnv *env, jobject javaSurface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees);
 };
 
 struct omx_message {
@@ -182,13 +166,6 @@
     virtual void onMessage(const omx_message &msg) = 0;
 };
 
-class IOMXRenderer : public IInterface {
-public:
-    DECLARE_META_INTERFACE(OMXRenderer);
-
-    virtual void render(IOMX::buffer_id buffer) = 0;
-};
-
 ////////////////////////////////////////////////////////////////////////////////
 
 class BnOMX : public BnInterface<IOMX> {
@@ -205,13 +182,6 @@
             uint32_t flags = 0);
 };
 
-class BnOMXRenderer : public BnInterface<IOMXRenderer> {
-public:
-    virtual status_t onTransact(
-            uint32_t code, const Parcel &data, Parcel *reply,
-            uint32_t flags = 0);
-};
-
 }  // namespace android
 
 #endif  // ANDROID_IOMX_H_
diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h
new file mode 100644
index 0000000..4b698e6
--- /dev/null
+++ b/include/media/IStreamSource.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_ISTREAMSOURCE_H_
+
+#define ANDROID_ISTREAMSOURCE_H_
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+struct AMessage;
+struct IMemory;
+struct IStreamListener;
+
+struct IStreamSource : public IInterface {
+    DECLARE_META_INTERFACE(StreamSource);
+
+    virtual void setListener(const sp<IStreamListener> &listener) = 0;
+    virtual void setBuffers(const Vector<sp<IMemory> > &buffers) = 0;
+
+    virtual void onBufferAvailable(size_t index) = 0;
+};
+
+struct IStreamListener : public IInterface {
+    DECLARE_META_INTERFACE(StreamListener);
+
+    enum Command {
+        EOS,
+        DISCONTINUITY,
+    };
+
+    virtual void queueBuffer(size_t index, size_t size) = 0;
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &msg = NULL) = 0;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct BnStreamSource : public BnInterface<IStreamSource> {
+    virtual status_t onTransact(
+            uint32_t code, const Parcel &data, Parcel *reply,
+            uint32_t flags = 0);
+};
+
+struct BnStreamListener : public BnInterface<IStreamListener> {
+    virtual status_t onTransact(
+            uint32_t code, const Parcel &data, Parcel *reply,
+            uint32_t flags = 0);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_ISTREAMSOURCE_H_
diff --git a/include/media/MediaMetadataRetrieverInterface.h b/include/media/MediaMetadataRetrieverInterface.h
index ff57774..717849d 100644
--- a/include/media/MediaMetadataRetrieverInterface.h
+++ b/include/media/MediaMetadataRetrieverInterface.h
@@ -33,8 +33,7 @@
     virtual status_t    setDataSource(const char *url) = 0;
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t    setMode(int mode) = 0;
-    virtual status_t    getMode(int* mode) const = 0;
-    virtual VideoFrame* captureFrame() = 0;
+    virtual VideoFrame* getFrameAtTime(int64_t timeUs, int option) = 0;
     virtual MediaAlbumArt* extractAlbumArt() = 0;
     virtual const char* extractMetadata(int keyCode) = 0;
 };
@@ -67,7 +66,7 @@
                         }
 
     virtual status_t    getMode(int* mode) const { *mode = mMode; return NO_ERROR; }
-    virtual VideoFrame* captureFrame() { return NULL; }
+    virtual VideoFrame* getFrameAtTime(int64_t timeUs, int option) { return NULL; }
     virtual MediaAlbumArt* extractAlbumArt() { return NULL; }
     virtual const char* extractMetadata(int keyCode) { return NULL; }
 
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 0521709..c0963a6 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -33,13 +33,15 @@
 
 class Parcel;
 class ISurface;
+class Surface;
 
 template<typename T> class SortedVector;
 
 enum player_type {
     PV_PLAYER = 1,
     SONIVOX_PLAYER = 2,
-    STAGEFRIGHT_PLAYER = 4,
+    STAGEFRIGHT_PLAYER = 3,
+    NU_PLAYER = 4,
     // Test players are available only in the 'test' and 'eng' builds.
     // The shared library with the test player is passed passed as an
     // argument to the 'test:' url in the setDataSource call.
@@ -105,7 +107,12 @@
             const KeyedVector<String8, String8> *headers = NULL) = 0;
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length) = 0;
-    virtual status_t    setVideoSurface(const sp<ISurface>& surface) = 0;
+
+    virtual status_t    setDataSource(const sp<IStreamSource> &source) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t    setVideoSurface(const sp<Surface>& surface) = 0;
     virtual status_t    prepare() = 0;
     virtual status_t    prepareAsync() = 0;
     virtual status_t    start() = 0;
@@ -118,8 +125,6 @@
     virtual status_t    reset() = 0;
     virtual status_t    setLooping(int loop) = 0;
     virtual player_type playerType() = 0;
-    virtual status_t    suspend() { return INVALID_OPERATION; }
-    virtual status_t    resume() { return INVALID_OPERATION; }
 
     virtual void        setNotifyCallback(void* cookie, notify_callback_f notifyFunc) {
                             mCookie = cookie; mNotify = notifyFunc; }
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index c3cd361..aa97874 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -25,7 +25,20 @@
 
 enum camcorder_quality {
     CAMCORDER_QUALITY_LOW  = 0,
-    CAMCORDER_QUALITY_HIGH = 1
+    CAMCORDER_QUALITY_HIGH = 1,
+    CAMCORDER_QUALITY_QCIF = 2,
+    CAMCORDER_QUALITY_CIF = 3,
+    CAMCORDER_QUALITY_480P = 4,
+    CAMCORDER_QUALITY_720P = 5,
+    CAMCORDER_QUALITY_1080P = 6,
+
+    CAMCORDER_QUALITY_TIME_LAPSE_LOW  = 1000,
+    CAMCORDER_QUALITY_TIME_LAPSE_HIGH = 1001,
+    CAMCORDER_QUALITY_TIME_LAPSE_QCIF = 1002,
+    CAMCORDER_QUALITY_TIME_LAPSE_CIF = 1003,
+    CAMCORDER_QUALITY_TIME_LAPSE_480P = 1004,
+    CAMCORDER_QUALITY_TIME_LAPSE_720P = 1005,
+    CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006
 };
 
 enum video_decoder {
@@ -68,6 +81,12 @@
                                        camcorder_quality quality) const;
 
     /**
+     * Returns true if a profile for the given camera at the given quality exists,
+     * or false if not.
+     */
+    bool hasCamcorderProfile(int cameraId, camcorder_quality quality) const;
+
+    /**
      * Returns the output file formats supported.
      */
     Vector<output_format> getOutputFileFormats() const;
@@ -252,6 +271,8 @@
         Vector<int> mLevels;
     };
 
+    int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
+
     // Debug
     static void logVideoCodec(const VideoCodec& codec);
     static void logAudioCodec(const AudioCodec& codec);
@@ -281,8 +302,25 @@
 
     // If the xml configuration file does not exist, use hard-coded values
     static MediaProfiles* createDefaultInstance();
-    static CamcorderProfile *createDefaultCamcorderLowProfile();
-    static CamcorderProfile *createDefaultCamcorderHighProfile();
+
+    static CamcorderProfile *createDefaultCamcorderQcifProfile(camcorder_quality quality);
+    static CamcorderProfile *createDefaultCamcorderCifProfile(camcorder_quality quality);
+    static void createDefaultCamcorderLowProfiles(
+            MediaProfiles::CamcorderProfile **lowProfile,
+            MediaProfiles::CamcorderProfile **lowSpecificProfile);
+    static void createDefaultCamcorderHighProfiles(
+            MediaProfiles::CamcorderProfile **highProfile,
+            MediaProfiles::CamcorderProfile **highSpecificProfile);
+
+    static CamcorderProfile *createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality);
+    static CamcorderProfile *createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality);
+    static void createDefaultCamcorderTimeLapseLowProfiles(
+            MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+            MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile);
+    static void createDefaultCamcorderTimeLapseHighProfiles(
+            MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+            MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile);
+
     static void createDefaultCamcorderProfiles(MediaProfiles *profiles);
     static void createDefaultVideoEncoders(MediaProfiles *profiles);
     static void createDefaultAudioEncoders(MediaProfiles *profiles);
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 5e9e368..c42346e 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -22,7 +22,7 @@
 
 namespace android {
 
-class ISurface;
+class Surface;
 
 struct MediaRecorderBase {
     MediaRecorderBase() {}
@@ -37,9 +37,10 @@
     virtual status_t setVideoSize(int width, int height) = 0;
     virtual status_t setVideoFrameRate(int frames_per_second) = 0;
     virtual status_t setCamera(const sp<ICamera>& camera) = 0;
-    virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0;
+    virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0;
     virtual status_t setOutputFile(const char *path) = 0;
     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+    virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;}
     virtual status_t setParameters(const String8& params) = 0;
     virtual status_t setListener(const sp<IMediaRecorderClient>& listener) = 0;
     virtual status_t prepare() = 0;
diff --git a/include/media/PVMediaRecorder.h b/include/media/PVMediaRecorder.h
deleted file mode 100644
index c091c39..0000000
--- a/include/media/PVMediaRecorder.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- **
- ** Copyright 2008, The Android Open Source Project
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- **     http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
- */
-
-#ifndef ANDROID_PVMEDIARECORDER_H
-#define ANDROID_PVMEDIARECORDER_H
-
-#include <media/IMediaRecorderClient.h>
-#include <media/MediaRecorderBase.h>
-
-namespace android {
-
-class ISurface;
-class ICamera;
-class AuthorDriverWrapper;
-
-class PVMediaRecorder : public MediaRecorderBase {
-public:
-    PVMediaRecorder();
-    virtual ~PVMediaRecorder();
-
-    virtual status_t init();
-    virtual status_t setAudioSource(audio_source as);
-    virtual status_t setVideoSource(video_source vs);
-    virtual status_t setOutputFormat(output_format of);
-    virtual status_t setAudioEncoder(audio_encoder ae);
-    virtual status_t setVideoEncoder(video_encoder ve);
-    virtual status_t setVideoSize(int width, int height);
-    virtual status_t setVideoFrameRate(int frames_per_second);
-    virtual status_t setCamera(const sp<ICamera>& camera);
-    virtual status_t setPreviewSurface(const sp<ISurface>& surface);
-    virtual status_t setOutputFile(const char *path);
-    virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
-    virtual status_t setParameters(const String8& params);
-    virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
-    virtual status_t prepare();
-    virtual status_t start();
-    virtual status_t stop();
-    virtual status_t close();
-    virtual status_t reset();
-    virtual status_t getMaxAmplitude(int *max);
-    virtual status_t dump(int fd, const Vector<String16>& args) const;
-
-private:
-    status_t doStop();
-
-    AuthorDriverWrapper*            mAuthorDriverWrapper;
-
-    PVMediaRecorder(const PVMediaRecorder &);
-    PVMediaRecorder &operator=(const PVMediaRecorder &);
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVMEDIARECORDER_H
-
diff --git a/include/media/PVMetadataRetriever.h b/include/media/PVMetadataRetriever.h
deleted file mode 100644
index c202dfe..0000000
--- a/include/media/PVMetadataRetriever.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-**
-** Copyright (C) 2008 The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_PVMETADATARETRIEVER_H
-#define ANDROID_PVMETADATARETRIEVER_H
-
-#include <utils/Errors.h>
-#include <media/MediaMetadataRetrieverInterface.h>
-#include <private/media/VideoFrame.h>
-
-namespace android {
-
-class MetadataDriver;
-
-class PVMetadataRetriever : public MediaMetadataRetrieverInterface
-{
-public:
-                        PVMetadataRetriever();
-    virtual             ~PVMetadataRetriever();
-
-    virtual status_t    setDataSource(const char *url);
-    virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setMode(int mode);
-    virtual status_t    getMode(int* mode) const;
-    virtual VideoFrame* captureFrame();
-    virtual MediaAlbumArt* extractAlbumArt();
-    virtual const char* extractMetadata(int keyCode);
-
-private:
-    mutable Mutex       mLock;
-    MetadataDriver*     mMetadataDriver;
-    char*               mDataSourcePath;
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVMETADATARETRIEVER_H
diff --git a/include/media/PVPlayer.h b/include/media/PVPlayer.h
deleted file mode 100644
index df50981..0000000
--- a/include/media/PVPlayer.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_PVPLAYER_H
-#define ANDROID_PVPLAYER_H
-
-#include <utils/Errors.h>
-#include <media/MediaPlayerInterface.h>
-#include <media/Metadata.h>
-
-#define MAX_OPENCORE_INSTANCES 25
-
-#ifdef MAX_OPENCORE_INSTANCES
-#include <cutils/atomic.h>
-#endif
-
-class PlayerDriver;
-
-namespace android {
-
-class PVPlayer : public MediaPlayerInterface
-{
-public:
-                        PVPlayer();
-    virtual             ~PVPlayer();
-
-    virtual status_t    initCheck();
-
-    virtual status_t    setDataSource(
-            const char *url, const KeyedVector<String8, String8> *headers);
-
-    virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setVideoSurface(const sp<ISurface>& surface);
-    virtual status_t    prepare();
-    virtual status_t    prepareAsync();
-    virtual status_t    start();
-    virtual status_t    stop();
-    virtual status_t    pause();
-    virtual bool        isPlaying();
-    virtual status_t    seekTo(int msec);
-    virtual status_t    getCurrentPosition(int *msec);
-    virtual status_t    getDuration(int *msec);
-    virtual status_t    reset();
-    virtual status_t    setLooping(int loop);
-    virtual player_type playerType() { return PV_PLAYER; }
-    virtual status_t    invoke(const Parcel& request, Parcel *reply);
-    virtual status_t    getMetadata(
-        const SortedVector<media::Metadata::Type>& ids,
-        Parcel *records);
-
-    // make available to PlayerDriver
-    void        sendEvent(int msg, int ext1=0, int ext2=0) { MediaPlayerBase::sendEvent(msg, ext1, ext2); }
-
-private:
-    static void         do_nothing(status_t s, void *cookie, bool cancelled) { }
-    static void         run_init(status_t s, void *cookie, bool cancelled);
-    static void         run_set_video_surface(status_t s, void *cookie, bool cancelled);
-    static void         run_set_audio_output(status_t s, void *cookie, bool cancelled);
-    static void         run_prepare(status_t s, void *cookie, bool cancelled);
-    static void         check_for_live_streaming(status_t s, void *cookie, bool cancelled);
-
-    PlayerDriver*               mPlayerDriver;
-    char *                      mDataSourcePath;
-    bool                        mIsDataSourceSet;
-    sp<ISurface>                mSurface;
-    int                         mSharedFd;
-    status_t                    mInit;
-    int                         mDuration;
-
-#ifdef MAX_OPENCORE_INSTANCES
-    static volatile int32_t     sNumInstances;
-#endif
-};
-
-}; // namespace android
-
-#endif // ANDROID_PVPLAYER_H
diff --git a/include/media/mediametadataretriever.h b/include/media/mediametadataretriever.h
index ddc07f6..03dd52d 100644
--- a/include/media/mediametadataretriever.h
+++ b/include/media/mediametadataretriever.h
@@ -82,8 +82,7 @@
     status_t setDataSource(const char* dataSourceUrl);
     status_t setDataSource(int fd, int64_t offset, int64_t length);
     status_t setMode(int mode);
-    status_t getMode(int* mode);
-    sp<IMemory> captureFrame();
+    sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
     sp<IMemory> extractAlbumArt();
     const char* extractMetadata(int keyCode);
 
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 207191d..88b0c3e 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -169,8 +169,6 @@
             status_t        invoke(const Parcel& request, Parcel *reply);
             status_t        setMetadataFilter(const Parcel& filter);
             status_t        getMetadata(bool update_only, bool apply_filter, Parcel *metadata);
-            status_t        suspend();
-            status_t        resume();
             status_t        setAudioSessionId(int sessionId);
             int             getAudioSessionId();
             status_t        setAuxEffectSendLevel(float level);
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 5ab1640..a710546 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -44,7 +44,8 @@
     AUDIO_SOURCE_VOICE_CALL = 4,
     AUDIO_SOURCE_CAMCORDER = 5,
     AUDIO_SOURCE_VOICE_RECOGNITION = 6,
-    AUDIO_SOURCE_MAX = AUDIO_SOURCE_VOICE_RECOGNITION,
+    AUDIO_SOURCE_VOICE_COMMUNICATION = 7,
+    AUDIO_SOURCE_MAX = AUDIO_SOURCE_VOICE_COMMUNICATION,
 
     AUDIO_SOURCE_LIST_END  // must be last - used to validate audio source type
 };
@@ -173,6 +174,7 @@
     status_t    setAudioEncoder(int ae);
     status_t    setOutputFile(const char* path);
     status_t    setOutputFile(int fd, int64_t offset, int64_t length);
+    status_t    setOutputFileAuxiliary(int fd);
     status_t    setVideoSize(int width, int height);
     status_t    setVideoFrameRate(int frames_per_second);
     status_t    setParameters(const String8& params);
@@ -199,6 +201,7 @@
     bool                        mIsAudioEncoderSet;
     bool                        mIsVideoEncoderSet;
     bool                        mIsOutputFileSet;
+    bool                        mIsAuxiliaryOutputFileSet;
     Mutex                       mLock;
     Mutex                       mNotifyLock;
 };
diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h
index 0d397ac..df5be32 100644
--- a/include/media/mediascanner.h
+++ b/include/media/mediascanner.h
@@ -38,8 +38,7 @@
 
     typedef bool (*ExceptionCheck)(void* env);
     virtual status_t processDirectory(
-            const char *path, const char *extensions,
-            MediaScannerClient &client,
+            const char *path, MediaScannerClient &client,
             ExceptionCheck exceptionCheck, void *exceptionEnv);
 
     void setLocale(const char *locale);
@@ -55,9 +54,8 @@
     char *mLocale;
 
     status_t doProcessDirectory(
-            char *path, int pathRemaining, const char *extensions,
-            MediaScannerClient &client, ExceptionCheck exceptionCheck,
-            void *exceptionEnv);
+            char *path, int pathRemaining, MediaScannerClient &client,
+            ExceptionCheck exceptionCheck, void *exceptionEnv);
 
     MediaScanner(const MediaScanner &);
     MediaScanner &operator=(const MediaScanner &);
@@ -73,7 +71,8 @@
     bool addStringTag(const char* name, const char* value);
     void endFile();
 
-    virtual bool scanFile(const char* path, long long lastModified, long long fileSize) = 0;
+    virtual bool scanFile(const char* path, long long lastModified,
+            long long fileSize, bool isDirectory) = 0;
     virtual bool handleStringTag(const char* name, const char* value) = 0;
     virtual bool setMimeType(const char* mimeType) = 0;
     virtual bool addNoMediaFolder(const char* path) = 0;
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
new file mode 100644
index 0000000..4599d70
--- /dev/null
+++ b/include/media/stagefright/ACodec.h
@@ -0,0 +1,157 @@
+#ifndef A_CODEC_H_
+
+#define A_CODEC_H_
+
+#include <stdint.h>
+#include <android/native_window.h>
+#include <media/IOMX.h>
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
+
+namespace android {
+
+struct ABuffer;
+struct MemoryDealer;
+
+struct ACodec : public AHierarchicalStateMachine {
+    enum {
+        kWhatFillThisBuffer      = 'fill',
+        kWhatDrainThisBuffer     = 'drai',
+        kWhatEOS                 = 'eos ',
+        kWhatShutdownCompleted   = 'scom',
+        kWhatFlushCompleted      = 'fcom',
+        kWhatOutputFormatChanged = 'outC',
+    };
+
+    ACodec();
+
+    void setNotificationMessage(const sp<AMessage> &msg);
+    void initiateSetup(const sp<AMessage> &msg);
+    void signalFlush();
+    void signalResume();
+    void initiateShutdown();
+
+protected:
+    virtual ~ACodec();
+
+private:
+    struct BaseState;
+    struct UninitializedState;
+    struct LoadedToIdleState;
+    struct IdleToExecutingState;
+    struct ExecutingState;
+    struct OutputPortSettingsChangedState;
+    struct ExecutingToIdleState;
+    struct IdleToLoadedState;
+    struct ErrorState;
+    struct FlushingState;
+
+    enum {
+        kWhatSetup                   = 'setu',
+        kWhatOMXMessage              = 'omx ',
+        kWhatInputBufferFilled       = 'inpF',
+        kWhatOutputBufferDrained     = 'outD',
+        kWhatShutdown                = 'shut',
+        kWhatFlush                   = 'flus',
+        kWhatResume                  = 'resm',
+        kWhatDrainDeferredMessages   = 'drai',
+    };
+
+    enum {
+        kPortIndexInput  = 0,
+        kPortIndexOutput = 1
+    };
+
+    struct BufferInfo {
+        enum Status {
+            OWNED_BY_US,
+            OWNED_BY_COMPONENT,
+            OWNED_BY_UPSTREAM,
+            OWNED_BY_DOWNSTREAM,
+            OWNED_BY_NATIVE_WINDOW,
+        };
+
+        IOMX::buffer_id mBufferID;
+        Status mStatus;
+
+        sp<ABuffer> mData;
+        sp<GraphicBuffer> mGraphicBuffer;
+    };
+
+    sp<AMessage> mNotify;
+
+    sp<UninitializedState> mUninitializedState;
+    sp<LoadedToIdleState> mLoadedToIdleState;
+    sp<IdleToExecutingState> mIdleToExecutingState;
+    sp<ExecutingState> mExecutingState;
+    sp<OutputPortSettingsChangedState> mOutputPortSettingsChangedState;
+    sp<ExecutingToIdleState> mExecutingToIdleState;
+    sp<IdleToLoadedState> mIdleToLoadedState;
+    sp<ErrorState> mErrorState;
+    sp<FlushingState> mFlushingState;
+
+    AString mComponentName;
+    sp<IOMX> mOMX;
+    IOMX::node_id mNode;
+    sp<MemoryDealer> mDealer[2];
+
+    sp<ANativeWindow> mNativeWindow;
+
+    Vector<BufferInfo> mBuffers[2];
+    bool mPortEOS[2];
+
+    List<sp<AMessage> > mDeferredQueue;
+
+    bool mSentFormat;
+
+    status_t allocateBuffersOnPort(OMX_U32 portIndex);
+    status_t freeBuffersOnPort(OMX_U32 portIndex);
+    status_t freeBuffer(OMX_U32 portIndex, size_t i);
+
+    status_t allocateOutputBuffersFromNativeWindow();
+    status_t cancelBufferToNativeWindow(BufferInfo *info);
+    status_t freeOutputBuffersOwnedByNativeWindow();
+    BufferInfo *dequeueBufferFromNativeWindow();
+
+    BufferInfo *findBufferByID(
+            uint32_t portIndex, IOMX::buffer_id bufferID,
+            ssize_t *index = NULL);
+
+    void setComponentRole(bool isEncoder, const char *mime);
+    void configureCodec(const char *mime, const sp<AMessage> &msg);
+
+    status_t setVideoPortFormatType(
+            OMX_U32 portIndex,
+            OMX_VIDEO_CODINGTYPE compressionFormat,
+            OMX_COLOR_FORMATTYPE colorFormat);
+
+    status_t setSupportedOutputFormat();
+
+    status_t setupVideoDecoder(
+            const char *mime, int32_t width, int32_t height);
+
+    status_t setVideoFormatOnPort(
+            OMX_U32 portIndex,
+            int32_t width, int32_t height,
+            OMX_VIDEO_CODINGTYPE compressionFormat);
+
+    status_t setupAACDecoder(int32_t numChannels, int32_t sampleRate);
+    status_t setMinBufferSize(OMX_U32 portIndex, size_t size);
+
+    status_t initNativeWindow();
+
+    // Returns true iff all buffers on the given port have status OWNED_BY_US.
+    bool allYourBuffersAreBelongToUs(OMX_U32 portIndex);
+
+    bool allYourBuffersAreBelongToUs();
+
+    void deferMessage(const sp<AMessage> &msg);
+    void processDeferredMessages();
+
+    void sendFormatChange();
+
+    DISALLOW_EVIL_CONSTRUCTORS(ACodec);
+};
+
+}  // namespace android
+
+#endif  // A_CODEC_H_
diff --git a/include/media/stagefright/AMRWriter.h b/include/media/stagefright/AMRWriter.h
index aa965e1..62d57b4 100644
--- a/include/media/stagefright/AMRWriter.h
+++ b/include/media/stagefright/AMRWriter.h
@@ -44,7 +44,7 @@
     virtual ~AMRWriter();
 
 private:
-    FILE *mFile;
+    int   mFd;
     status_t mInitCheck;
     sp<MediaSource> mSource;
     bool mStarted;
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index 3192d03..794355b 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -20,39 +20,167 @@
 
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaSource.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
 #include <utils/List.h>
 #include <utils/RefBase.h>
-#include <utils/threads.h>
 
 namespace android {
 
-class ICamera;
 class IMemory;
 class Camera;
+class Surface;
 
 class CameraSource : public MediaSource, public MediaBufferObserver {
 public:
+    /**
+     * Factory method to create a new CameraSource using the current
+     * settings (such as video size, frame rate, color format, etc)
+     * from the default camera.
+     *
+     * @return NULL on error.
+     */
     static CameraSource *Create();
-    static CameraSource *CreateFromCamera(const sp<Camera> &camera);
+
+    /**
+     * Factory method to create a new CameraSource.
+     *
+     * @param camera the video input frame data source. If it is NULL,
+     *          we will try to connect to the camera with the given
+     *          cameraId.
+     *
+     * @param cameraId the id of the camera that the source will connect
+     *          to if camera is NULL; otherwise ignored.
+     *
+     * @param videoSize the dimension (in pixels) of the video frame
+     * @param frameRate the target frames per second
+     * @param surface the preview surface for display where preview
+     *          frames are sent to
+     * @param storeMetaDataInVideoBuffers true to request the camera
+     *          source to store meta data in video buffers; false to
+     *          request the camera source to store real YUV frame data
+     *          in the video buffers. The camera source may not support
+     *          storing meta data in video buffers, if so, a request
+     *          to do that will NOT be honored. To find out whether
+     *          meta data is actually being stored in video buffers
+     *          during recording, call isMetaDataStoredInVideoBuffers().
+     *
+     * @return NULL on error.
+     */
+    static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
+                                          int32_t cameraId,
+                                          Size videoSize,
+                                          int32_t frameRate,
+                                          const sp<Surface>& surface,
+                                          bool storeMetaDataInVideoBuffers = false);
 
     virtual ~CameraSource();
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
-
-    virtual sp<MetaData> getFormat();
-
     virtual status_t read(
             MediaBuffer **buffer, const ReadOptions *options = NULL);
 
+    /**
+     * Check whether a CameraSource object is properly initialized.
+     * Must call this method before stop().
+     * @return OK if initialization has successfully completed.
+     */
+    virtual status_t initCheck() const;
+
+    /**
+     * Returns the MetaData associated with the CameraSource,
+     * including:
+     * kKeyColorFormat: YUV color format of the video frames
+     * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+     * kKeySampleRate: frame rate in frames per second
+     * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW
+     */
+    virtual sp<MetaData> getFormat();
+
+    /**
+     * Retrieve the total number of video buffers available from
+     * this source.
+     *
+     * This method is useful if these video buffers are used
+     * for passing video frame data to other media components,
+     * such as OMX video encoders, in order to eliminate the
+     * memcpy of the data.
+     *
+     * @return the total numbner of video buffers. Returns 0 to
+     *      indicate that this source does not make the video
+     *      buffer information availalble.
+     */
+    size_t getNumberOfVideoBuffers() const;
+
+    /**
+     * Retrieve the individual video buffer available from
+     * this source.
+     *
+     * @param index the index corresponding to the video buffer.
+     *      Valid range of the index is [0, n], where n =
+     *      getNumberOfVideoBuffers() - 1.
+     *
+     * @return the video buffer corresponding to the given index.
+     *      If index is out of range, 0 should be returned.
+     */
+    sp<IMemory> getVideoBuffer(size_t index) const;
+
+    /**
+     * Tell whether this camera source stores meta data or real YUV
+     * frame data in video buffers.
+     *
+     * @return true if meta data is stored in the video
+     *      buffers; false if real YUV data is stored in
+     *      the video buffers.
+     */
+    bool isMetaDataStoredInVideoBuffers() const;
+
     virtual void signalBufferReturned(MediaBuffer* buffer);
 
+protected:
+    enum CameraFlags {
+        FLAGS_SET_CAMERA = 1L << 0,
+        FLAGS_HOT_CAMERA = 1L << 1,
+    };
+
+    int32_t  mCameraFlags;
+    Size     mVideoSize;
+    int32_t  mVideoFrameRate;
+    int32_t  mColorFormat;
+    status_t mInitCheck;
+
+    sp<Camera>   mCamera;
+    sp<Surface>  mSurface;
+    sp<MetaData> mMeta;
+
+    int64_t mStartTimeUs;
+    int32_t mNumFramesReceived;
+    int64_t mLastFrameTimestampUs;
+    bool mStarted;
+
+    CameraSource(const sp<ICamera>& camera, int32_t cameraId,
+                 Size videoSize, int32_t frameRate,
+                 const sp<Surface>& surface,
+                 bool storeMetaDataInVideoBuffers);
+
+    virtual void startCameraRecording();
+    virtual void stopCameraRecording();
+    virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+    // Returns true if need to skip the current frame.
+    // Called from dataCallbackTimestamp.
+    virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+
+    // Callback called when still camera raw data is available.
+    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+
+    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data);
+
 private:
     friend class CameraSourceListener;
 
-    sp<Camera> mCamera;
-    sp<MetaData> mMeta;
-
     Mutex mLock;
     Condition mFrameAvailableCondition;
     Condition mFrameCompleteCondition;
@@ -60,25 +188,35 @@
     List<sp<IMemory> > mFramesBeingEncoded;
     List<int64_t> mFrameTimes;
 
-    int64_t mStartTimeUs;
     int64_t mFirstFrameTimeUs;
-    int64_t mLastFrameTimestampUs;
-    int32_t mNumFramesReceived;
     int32_t mNumFramesEncoded;
     int32_t mNumFramesDropped;
     int32_t mNumGlitches;
     int64_t mGlitchDurationThresholdUs;
     bool mCollectStats;
-    bool mStarted;
-
-    CameraSource(const sp<Camera> &camera);
-
-    void dataCallbackTimestamp(
-            int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
+    bool mIsMetaDataStoredInVideoBuffers;
 
     void releaseQueuedFrames();
     void releaseOneRecordingFrame(const sp<IMemory>& frame);
 
+
+    status_t init(const sp<ICamera>& camera, int32_t cameraId,
+                Size videoSize, int32_t frameRate,
+                bool storeMetaDataInVideoBuffers);
+    status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId);
+    status_t isCameraColorFormatSupported(const CameraParameters& params);
+    status_t configureCamera(CameraParameters* params,
+                    int32_t width, int32_t height,
+                    int32_t frameRate);
+
+    status_t checkVideoSize(const CameraParameters& params,
+                    int32_t width, int32_t height);
+
+    status_t checkFrameRate(const CameraParameters& params,
+                    int32_t frameRate);
+
+    void releaseCamera();
+
     CameraSource(const CameraSource &);
     CameraSource &operator=(const CameraSource &);
 };
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
new file mode 100644
index 0000000..0e5d534
--- /dev/null
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_SOURCE_TIME_LAPSE_H_
+
+#define CAMERA_SOURCE_TIME_LAPSE_H_
+
+#include <pthread.h>
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class ICamera;
+class IMemory;
+class Camera;
+
+class CameraSourceTimeLapse : public CameraSource {
+public:
+    static CameraSourceTimeLapse *CreateFromCamera(
+        const sp<ICamera> &camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+    virtual ~CameraSourceTimeLapse();
+
+    // If the frame capture interval is large, read will block for a long time.
+    // Due to the way the mediaRecorder framework works, a stop() call from
+    // mediaRecorder waits until the read returns, causing a long wait for
+    // stop() to return. To avoid this, we can make read() return a copy of the
+    // last read frame with the same time stamp frequently. This keeps the
+    // read() call from blocking too long. Calling this function quickly
+    // captures another frame, keeps its copy, and enables this mode of read()
+    // returning quickly.
+    void startQuickReadReturns();
+
+private:
+    // If true, will use still camera takePicture() for time lapse frames
+    // If false, will use the videocamera frames instead.
+    bool mUseStillCameraForTimeLapse;
+
+    // Size of picture taken from still camera. This may be larger than the size
+    // of the video, as still camera may not support the exact video resolution
+    // demanded. See setPictureSizeToClosestSupported().
+    int32_t mPictureWidth;
+    int32_t mPictureHeight;
+
+    // size of the encoded video.
+    int32_t mVideoWidth;
+    int32_t mVideoHeight;
+
+    // True if we need to crop the still camera image to get the video frame.
+    bool mNeedCropping;
+
+    // Start location of the cropping rectangle.
+    int32_t mCropRectStartX;
+    int32_t mCropRectStartY;
+
+    // Time between capture of two frames during time lapse recording
+    // Negative value indicates that timelapse is disabled.
+    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+
+    // Time between two frames in final video (1/frameRate)
+    int64_t mTimeBetweenTimeLapseVideoFramesUs;
+
+    // Real timestamp of the last encoded time lapse frame
+    int64_t mLastTimeLapseFrameRealTimestampUs;
+
+    // Thread id of thread which takes still picture and sleeps in a loop.
+    pthread_t mThreadTimeLapse;
+
+    // Variable set in dataCallbackTimestamp() to help skipCurrentFrame()
+    // to know if current frame needs to be skipped.
+    bool mSkipCurrentFrame;
+
+    // Lock for accessing mCameraIdle
+    Mutex mCameraIdleLock;
+
+    // Condition variable to wait on if camera is is not yet idle. Once the
+    // camera gets idle, this variable will be signalled.
+    Condition mCameraIdleCondition;
+
+    // True if camera is in preview mode and ready for takePicture().
+    // False after a call to takePicture() but before the final compressed
+    // data callback has been called and preview has been restarted.
+    volatile bool mCameraIdle;
+
+    // True if stop() is waiting for camera to get idle, i.e. for the last
+    // takePicture() to complete. This is needed so that dataCallbackTimestamp()
+    // can return immediately.
+    volatile bool mStopWaitingForIdleCamera;
+
+    // Lock for accessing quick stop variables.
+    Mutex mQuickStopLock;
+
+    // Condition variable to wake up still picture thread.
+    Condition mTakePictureCondition;
+
+    // mQuickStop is set to true if we use quick read() returns, otherwise it is set
+    // to false. Once in this mode read() return a copy of the last read frame
+    // with the same time stamp. See startQuickReadReturns().
+    volatile bool mQuickStop;
+
+    // Forces the next frame passed to dataCallbackTimestamp() to be read
+    // as a time lapse frame. Used by startQuickReadReturns() so that the next
+    // frame wakes up any blocking read.
+    volatile bool mForceRead;
+
+    // Stores a copy of the MediaBuffer read in the last read() call after
+    // mQuickStop was true.
+    MediaBuffer* mLastReadBufferCopy;
+
+    // Status code for last read.
+    status_t mLastReadStatus;
+
+    CameraSourceTimeLapse(
+        const sp<ICamera> &camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+    // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
+    // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
+    // it calls the base class' function.
+    virtual void signalBufferReturned(MediaBuffer* buffer);
+
+    // Wrapper over CameraSource::read() to implement quick stop.
+    virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+    // For still camera case starts a thread which calls camera's takePicture()
+    // in a loop. For video camera case, just starts the camera's video recording.
+    virtual void startCameraRecording();
+
+    // For still camera case joins the thread created in startCameraRecording().
+    // For video camera case, just stops the camera's video recording.
+    virtual void stopCameraRecording();
+
+    // For still camera case don't need to do anything as memory is locally
+    // allocated with refcounting.
+    // For video camera case just tell the camera to release the frame.
+    virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+    // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
+    // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
+    virtual bool skipCurrentFrame(int64_t timestampUs);
+
+    // Handles the callback to handle raw frame data from the still camera.
+    // Creates a copy of the frame data as the camera can reuse the frame memory
+    // once this callback returns. The function also sets a new timstamp corresponding
+    // to one frame time ahead of the last encoded frame's time stamp. It then
+    // calls dataCallbackTimestamp() of the base class with the copied data and the
+    // modified timestamp, which will think that it recieved the frame from a video
+    // camera and proceed as usual.
+    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data);
+
+    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+    // timestamp and set mSkipCurrentFrame.
+    // Then it calls the base CameraSource::dataCallbackTimestamp()
+    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data);
+
+    // Convenience function to fill mLastReadBufferCopy from the just read
+    // buffer.
+    void fillLastReadBufferCopy(MediaBuffer& sourceBuffer);
+
+    // If the passed in size (width x height) is a supported video/preview size,
+    // the function sets the camera's video/preview size to it and returns true.
+    // Otherwise returns false.
+    bool trySettingVideoSize(int32_t width, int32_t height);
+
+    // The still camera may not support the demanded video width and height.
+    // We look for the supported picture sizes from the still camera and
+    // choose the smallest one with either dimensions higher than the corresponding
+    // video dimensions. The still picture will be cropped to get the video frame.
+    // The function returns true if the camera supports picture sizes greater than
+    // or equal to the passed in width and height, and false otherwise.
+    bool setPictureSizeToClosestSupported(int32_t width, int32_t height);
+
+    // Computes the offset of the rectangle from where to start cropping the
+    // still image into the video frame. We choose the center of the image to be
+    // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY).
+    bool computeCropRectangleOffset();
+
+    // Crops the source data into a smaller image starting at
+    // (mCropRectStartX, mCropRectStartY) and of the size of the video frame.
+    // The data is returned into a newly allocated IMemory.
+    sp<IMemory> cropYUVImage(const sp<IMemory> &source_data);
+
+    // When video camera is used for time lapse capture, returns true
+    // until enough time has passed for the next time lapse frame. When
+    // the frame needs to be encoded, it returns false and also modifies
+    // the time stamp to be one frame time ahead of the last encoded
+    // frame's time stamp.
+    bool skipFrameAndModifyTimeStamp(int64_t *timestampUs);
+
+    // Wrapper to enter threadTimeLapseEntry()
+    static void *ThreadTimeLapseWrapper(void *me);
+
+    // Runs a loop which sleeps until a still picture is required
+    // and then calls mCamera->takePicture() to take the still picture.
+    // Used only in the case mUseStillCameraForTimeLapse = true.
+    void threadTimeLapseEntry();
+
+    // Wrapper to enter threadStartPreview()
+    static void *ThreadStartPreviewWrapper(void *me);
+
+    // Starts the camera's preview.
+    void threadStartPreview();
+
+    // Starts thread ThreadStartPreviewWrapper() for restarting preview.
+    // Needs to be done in a thread so that dataCallback() which calls this function
+    // can return, and the camera can know that takePicture() is done.
+    void restartPreview();
+
+    // Creates a copy of source_data into a new memory of final type MemoryBase.
+    sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
+
+    CameraSourceTimeLapse(const CameraSourceTimeLapse &);
+    CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
+};
+
+}  // namespace android
+
+#endif  // CAMERA_SOURCE_TIME_LAPSE_H_
diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h
index bc3f464..2ae8a5b 100644
--- a/include/media/stagefright/ColorConverter.h
+++ b/include/media/stagefright/ColorConverter.h
@@ -21,6 +21,7 @@
 #include <sys/types.h>
 
 #include <stdint.h>
+#include <utils/Errors.h>
 
 #include <OMX_Video.h>
 
@@ -32,36 +33,48 @@
 
     bool isValid() const;
 
-    void convert(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convert(
+            const void *srcBits,
+            size_t srcWidth, size_t srcHeight,
+            size_t srcCropLeft, size_t srcCropTop,
+            size_t srcCropRight, size_t srcCropBottom,
+            void *dstBits,
+            size_t dstWidth, size_t dstHeight,
+            size_t dstCropLeft, size_t dstCropTop,
+            size_t dstCropRight, size_t dstCropBottom);
 
 private:
+    struct BitmapParams {
+        BitmapParams(
+                void *bits,
+                size_t width, size_t height,
+                size_t cropLeft, size_t cropTop,
+                size_t cropRight, size_t cropBottom);
+
+        size_t cropWidth() const;
+        size_t cropHeight() const;
+
+        void *mBits;
+        size_t mWidth, mHeight;
+        size_t mCropLeft, mCropTop, mCropRight, mCropBottom;
+    };
+
     OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat;
     uint8_t *mClip;
 
     uint8_t *initClip();
 
-    void convertCbYCrY(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertCbYCrY(
+            const BitmapParams &src, const BitmapParams &dst);
 
-    void convertYUV420Planar(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertYUV420Planar(
+            const BitmapParams &src, const BitmapParams &dst);
 
-    void convertQCOMYUV420SemiPlanar(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertQCOMYUV420SemiPlanar(
+            const BitmapParams &src, const BitmapParams &dst);
 
-    void convertYUV420SemiPlanar(
-            size_t width, size_t height,
-            const void *srcBits, size_t srcSkip,
-            void *dstBits, size_t dstSkip);
+    status_t convertYUV420SemiPlanar(
+            const BitmapParams &src, const BitmapParams &dst);
 
     ColorConverter(const ColorConverter &);
     ColorConverter &operator=(const ColorConverter &);
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index d0b9fcd..d4f1733 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -48,13 +48,13 @@
 
     virtual status_t initCheck() const = 0;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size) = 0;
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
 
     // Convenience methods:
-    bool getUInt16(off_t offset, uint16_t *x);
+    bool getUInt16(off64_t offset, uint16_t *x);
 
     // May return ERROR_UNSUPPORTED.
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
 
     virtual uint32_t flags() {
         return 0;
@@ -80,6 +80,9 @@
     }
     virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {};
 
+    virtual String8 getUri() {
+        return String8();
+    }
 
 protected:
     virtual ~DataSource() {}
diff --git a/include/media/stagefright/FileSource.h b/include/media/stagefright/FileSource.h
index 4307263..72a0403 100644
--- a/include/media/stagefright/FileSource.h
+++ b/include/media/stagefright/FileSource.h
@@ -34,9 +34,9 @@
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
 
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
 
     virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
 
@@ -46,7 +46,6 @@
     virtual ~FileSource();
 
 private:
-    FILE *mFile;
     int mFd;
     int64_t mOffset;
     int64_t mLength;
@@ -59,7 +58,7 @@
     int64_t mDrmBufSize;
     unsigned char *mDrmBuf;
 
-    ssize_t readAtDRM(off_t offset, void *data, size_t size);
+    ssize_t readAtDRM(off64_t offset, void *data, size_t size);
 
     FileSource(const FileSource &);
     FileSource &operator=(const FileSource &);
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index 63f11d1..17908b4 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -19,28 +19,76 @@
 #define HARDWARE_API_H_
 
 #include <media/stagefright/OMXPluginBase.h>
-#include <media/stagefright/VideoRenderer.h>
-#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
 #include <utils/RefBase.h>
 
 #include <OMX_Component.h>
 
-extern android::VideoRenderer *createRenderer(
-        const android::sp<android::ISurface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight);
+namespace android {
 
-extern android::VideoRenderer *createRendererWithRotation(
-        const android::sp<android::ISurface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight,
-        int32_t rotationDegrees);
+// A pointer to this struct is passed to the OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
+// is given.
+//
+// When Android native buffer use is disabled for a port (the default state),
+// the OMX node should operate as normal, and expect UseBuffer calls to set its
+// buffers.  This is the mode that will be used when CPU access to the buffer is
+// required.
+//
+// When Android native buffer use has been enabled for a given port, the video
+// color format for the port is to be interpreted as an Android pixel format
+// rather than an OMX color format.  The node should then expect to receive
+// UseAndroidNativeBuffer calls (via OMX_SetParameter) rather than UseBuffer
+// calls for that port.
+struct EnableAndroidNativeBuffersParams {
+    OMX_U32 nSize;
+    OMX_VERSIONTYPE nVersion;
+    OMX_U32 nPortIndex;
+    OMX_BOOL enable;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter() when the extension
+// index "OMX.google.android.index.storeMetaDataInBuffers"
+// is given.
+//
+// When meta data is stored in the video buffers passed between OMX clients
+// and OMX components, interpretation of the buffer data is up to the
+// buffer receiver, and the data may or may not be the actual video data, but
+// some information helpful for the receiver to locate the actual data.
+// The buffer receiver thus needs to know how to interpret what is stored
+// in these buffers, with mechanisms pre-determined externally. How to
+// interpret the meta data is outside of the scope of this method.
+//
+// Currently, this is specifically used to pass meta data from video source
+// (camera component, for instance) to video encoder to avoid memcpying of
+// input video frame data. To do this, bStoreMetaDta is set to OMX_TRUE.
+// If bStoreMetaData is set to false, real YUV frame data will be stored
+// in the buffers. In addition, if no OMX_SetParameter() call is made
+// with the corresponding extension index, real YUV data is stored
+// in the buffers.
+struct StoreMetaDataInBuffersParams {
+    OMX_U32 nSize;
+    OMX_VERSIONTYPE nVersion;
+    OMX_U32 nPortIndex;
+    OMX_BOOL bStoreMetaData;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
+// given.  This call will only be performed if a prior call was made with the
+// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
+// enabling use of Android native buffers.
+struct UseAndroidNativeBufferParams {
+    OMX_U32 nSize;
+    OMX_VERSIONTYPE nVersion;
+    OMX_U32 nPortIndex;
+    OMX_PTR pAppPrivate;
+    OMX_BUFFERHEADERTYPE **bufferHeader;
+    const sp<android_native_buffer_t>& nativeBuffer;
+};
+
+}  // namespace android
 
 extern android::OMXPluginBase *createOMXPlugin();
 
 #endif  // HARDWARE_API_H_
-
diff --git a/include/media/stagefright/JPEGSource.h b/include/media/stagefright/JPEGSource.h
index 9d0a700..1b7e91b 100644
--- a/include/media/stagefright/JPEGSource.h
+++ b/include/media/stagefright/JPEGSource.h
@@ -42,9 +42,9 @@
     sp<DataSource> mSource;
     MediaBufferGroup *mGroup;
     bool mStarted;
-    off_t mSize;
+    off64_t mSize;
     int32_t mWidth, mHeight;
-    off_t mOffset;
+    off64_t mOffset;
 
     status_t parseJPEG();
 
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 7bf07eb..f7618e9 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -61,20 +61,21 @@
 private:
     class Track;
 
-    FILE *mFile;
+    int  mFd;
+    status_t mInitCheck;
     bool mUse4ByteNalLength;
     bool mUse32BitOffset;
     bool mIsFileSizeLimitExplicitlyRequested;
     bool mPaused;
     bool mStarted;
-    off_t mOffset;
+    off64_t mOffset;
     off_t mMdatOffset;
     uint8_t *mMoovBoxBuffer;
-    off_t mMoovBoxBufferOffset;
+    off64_t mMoovBoxBufferOffset;
     bool  mWriteMoovBoxToMemory;
-    off_t mFreeBoxOffset;
+    off64_t mFreeBoxOffset;
     bool mStreamableFile;
-    off_t mEstimatedMoovBoxSize;
+    off64_t mEstimatedMoovBoxSize;
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
     int64_t mStartTimestampUs;
@@ -83,7 +84,7 @@
 
     List<Track *> mTracks;
 
-    List<off_t> mBoxes;
+    List<off64_t> mBoxes;
 
     void setStartTimestampUs(int64_t timeUs);
     int64_t getStartTimestampUs();  // Not const
@@ -145,10 +146,10 @@
     void unlock();
 
     // Acquire lock before calling these methods
-    off_t addSample_l(MediaBuffer *buffer);
-    off_t addLengthPrefixedSample_l(MediaBuffer *buffer);
+    off64_t addSample_l(MediaBuffer *buffer);
+    off64_t addLengthPrefixedSample_l(MediaBuffer *buffer);
 
-    inline size_t write(const void *ptr, size_t size, size_t nmemb, FILE* stream);
+    inline size_t write(const void *ptr, size_t size, size_t nmemb);
     bool exceedsFileSizeLimit();
     bool use32BitFileOffset() const;
     bool exceedsFileDurationLimit();
diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h
index 339e6fb..c1c4f94 100644
--- a/include/media/stagefright/MediaBuffer.h
+++ b/include/media/stagefright/MediaBuffer.h
@@ -25,6 +25,7 @@
 
 namespace android {
 
+class GraphicBuffer;
 class MediaBuffer;
 class MediaBufferObserver;
 class MetaData;
@@ -48,6 +49,8 @@
 
     MediaBuffer(size_t size);
 
+    MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
+
     // Decrements the reference count and returns the buffer to its
     // associated MediaBufferGroup if the reference count drops to 0.
     void release();
@@ -63,6 +66,8 @@
 
     void set_range(size_t offset, size_t length);
 
+    sp<GraphicBuffer> graphicBuffer() const;
+
     sp<MetaData> meta_data();
 
     // Clears meta data and resets the range to the full extent.
@@ -94,6 +99,7 @@
 
     void *mData;
     size_t mSize, mRangeOffset, mRangeLength;
+    sp<GraphicBuffer> mGraphicBuffer;
 
     bool mOwnsData;
 
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 92ce068..2d50ca5 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -44,6 +44,8 @@
 extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA;
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
 
+extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
+
 }  // namespace android
 
 #endif  // MEDIA_DEFS_H_
diff --git a/include/media/stagefright/MediaSource.h b/include/media/stagefright/MediaSource.h
index dafc621..a31395e 100644
--- a/include/media/stagefright/MediaSource.h
+++ b/include/media/stagefright/MediaSource.h
@@ -78,31 +78,18 @@
         void clearSeekTo();
         bool getSeekTo(int64_t *time_us, SeekMode *mode) const;
 
-        // Option allows encoder to skip some frames until the specified
-        // time stamp.
-        // To prevent from being abused, when the skipFrame timestamp is
-        // found to be more than 1 second later than the current timestamp,
-        // an error will be returned from read().
-        void clearSkipFrame();
-        bool getSkipFrame(int64_t *timeUs) const;
-        void setSkipFrame(int64_t timeUs);
-
         void setLateBy(int64_t lateness_us);
         int64_t getLateBy() const;
 
     private:
         enum Options {
-            // Bit map
             kSeekTo_Option      = 1,
-            kSkipFrame_Option   = 2,
         };
 
         uint32_t mOptions;
         int64_t mSeekTimeUs;
         SeekMode mSeekMode;
         int64_t mLatenessUs;
-
-        int64_t mSkipFrameUntilTimeUs;
     };
 
     // Causes this source to suspend pulling data from its upstream source
diff --git a/include/media/stagefright/MediaSourceSplitter.h b/include/media/stagefright/MediaSourceSplitter.h
new file mode 100644
index 0000000..568f4c2
--- /dev/null
+++ b/include/media/stagefright/MediaSourceSplitter.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This class provides a way to split a single media source into multiple sources.
+// The constructor takes in the real mediaSource and createClient() can then be
+// used to create multiple sources served from this real mediaSource.
+//
+// Usage:
+// - Create MediaSourceSplitter by passing in a real mediaSource from which
+// multiple duplicate channels are needed.
+// - Create a client using createClient() and use it as any other mediaSource.
+//
+// Note that multiple clients can be created using createClient() and
+// started/stopped in any order. MediaSourceSplitter stops the real source only
+// when all clients have been stopped.
+//
+// If a new client is created/started after some existing clients have already
+// started, the new client will start getting its read frames from the current
+// time.
+
+#ifndef MEDIA_SOURCE_SPLITTER_H_
+
+#define MEDIA_SOURCE_SPLITTER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class MediaBuffer;
+class MetaData;
+
+class MediaSourceSplitter : public RefBase {
+public:
+    // Constructor
+    // mediaSource: The real mediaSource. The class keeps a reference to it to
+    // implement the various clients.
+    MediaSourceSplitter(sp<MediaSource> mediaSource);
+
+    ~MediaSourceSplitter();
+
+    // Creates a new client of base type MediaSource. Multiple clients can be
+    // created which get their data through the same real mediaSource. These
+    // clients can then be used like any other MediaSource, all of which provide
+    // data from the same real source.
+    sp<MediaSource> createClient();
+
+private:
+    // Total number of clients created through createClient().
+    int32_t mNumberOfClients;
+
+    // reference to the real MediaSource passed to the constructor.
+    sp<MediaSource> mSource;
+
+    // Stores pointer to the MediaBuffer read from the real MediaSource.
+    // All clients use this to implement the read() call.
+    MediaBuffer *mLastReadMediaBuffer;
+
+    // Status code for read from the real MediaSource. All clients return
+    // this for their read().
+    status_t mLastReadStatus;
+
+    // Boolean telling whether the real MediaSource has started.
+    bool mSourceStarted;
+
+    // List of booleans, one for each client, storing whether the corresponding
+    // client's start() has been called.
+    Vector<bool> mClientsStarted;
+
+    // Stores the number of clients which are currently started.
+    int32_t mNumberOfClientsStarted;
+
+    // Since different clients call read() asynchronously, we need to keep track
+    // of what data is currently read into the mLastReadMediaBuffer.
+    // mCurrentReadBit stores the bit for the current read buffer. This bit
+    // flips each time a new buffer is read from the source.
+    // mClientsDesiredReadBit stores the bit for the next desired read buffer
+    // for each client. This bit flips each time read() is completed for this
+    // client.
+    bool mCurrentReadBit;
+    Vector<bool> mClientsDesiredReadBit;
+
+    // Number of clients whose current read has been completed.
+    int32_t mNumberOfCurrentReads;
+
+    // Boolean telling whether the last read has been completed for all clients.
+    // The variable is reset to false each time buffer is read from the real
+    // source.
+    bool mLastReadCompleted;
+
+    // A global mutex for access to critical sections.
+    Mutex mLock;
+
+    // Condition variable for waiting on read from source to complete.
+    Condition mReadFromSourceCondition;
+
+    // Condition variable for waiting on all client's last read to complete.
+    Condition mAllReadsCompleteCondition;
+
+    // Functions used by Client to implement the MediaSource interface.
+
+    // If the real source has not been started yet by any client, starts it.
+    status_t start(int clientId, MetaData *params);
+
+    // Stops the real source after all clients have called stop().
+    status_t stop(int clientId);
+
+    // returns the real source's getFormat().
+    sp<MetaData> getFormat(int clientId);
+
+    // If the client's desired buffer has already been read into
+    // mLastReadMediaBuffer, points the buffer to that. Otherwise if it is the
+    // master client, reads the buffer from source or else waits for the master
+    // client to read the buffer and uses that.
+    status_t read(int clientId,
+            MediaBuffer **buffer, const MediaSource::ReadOptions *options = NULL);
+
+    // Not implemented right now.
+    status_t pause(int clientId);
+
+    // Function which reads a buffer from the real source into
+    // mLastReadMediaBuffer
+    void readFromSource_lock(const MediaSource::ReadOptions *options);
+
+    // Waits until read from the real source has been completed.
+    // _lock means that the function should be called when the thread has already
+    // obtained the lock for the mutex mLock.
+    void waitForReadFromSource_lock(int32_t clientId);
+
+    // Waits until all clients have read the current buffer in
+    // mLastReadCompleted.
+    void waitForAllClientsLastRead_lock(int32_t clientId);
+
+    // Each client calls this after it completes its read(). Once all clients
+    // have called this for the current buffer, the function calls
+    // mAllReadsCompleteCondition.broadcast() to signal the waiting clients.
+    void signalReadComplete_lock(bool readAborted);
+
+    // Make these constructors private.
+    MediaSourceSplitter();
+    MediaSourceSplitter(const MediaSourceSplitter &);
+    MediaSourceSplitter &operator=(const MediaSourceSplitter &);
+
+    // This class implements the MediaSource interface. Each client stores a
+    // reference to the parent MediaSourceSplitter and uses it to complete the
+    // various calls.
+    class Client : public MediaSource {
+    public:
+        // Constructor stores reference to the parent MediaSourceSplitter and it
+        // client id.
+        Client(sp<MediaSourceSplitter> splitter, int32_t clientId);
+
+        // MediaSource interface
+        virtual status_t start(MetaData *params = NULL);
+
+        virtual status_t stop();
+
+        virtual sp<MetaData> getFormat();
+
+        virtual status_t read(
+                MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+        virtual status_t pause();
+
+    private:
+        // Refernce to the parent MediaSourceSplitter
+        sp<MediaSourceSplitter> mSplitter;
+
+        // Id of this client.
+        int32_t mClientId;
+    };
+
+    friend class Client;
+};
+
+}  // namespace android
+
+#endif  // MEDIA_SOURCE_SPLITTER_H_
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index ea2fa52..5170a2c 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -32,12 +32,17 @@
     kKeyMIMEType          = 'mime',  // cstring
     kKeyWidth             = 'widt',  // int32_t
     kKeyHeight            = 'heig',  // int32_t
+
+    // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1)
+    kKeyCropRect          = 'crop',
+
     kKeyRotation          = 'rotA',  // int32_t (angle in degrees)
     kKeyIFramesInterval   = 'ifiv',  // int32_t
     kKeyStride            = 'strd',  // int32_t
     kKeySliceHeight       = 'slht',  // int32_t
     kKeyChannelCount      = '#chn',  // int32_t
-    kKeySampleRate        = 'srte',  // int32_t (also video frame rate)
+    kKeySampleRate        = 'srte',  // int32_t (audio sampling rate Hz)
+    kKeyFrameRate         = 'frmR',  // int32_t (video frame rate fps)
     kKeyBitRate           = 'brte',  // int32_t (bps)
     kKeyESDS              = 'esds',  // raw data
     kKeyAVCC              = 'avcc',  // raw data
@@ -94,7 +99,6 @@
     // Track authoring progress status
     // kKeyTrackTimeStatus is used to track progress in elapsed time
     kKeyTrackTimeStatus   = 'tktm',  // int64_t
-    kKeyRotationDegree    = 'rdge',  // int32_t (clockwise, in degree)
 
     kKeyNotRealTime       = 'ntrt',  // bool (int32_t)
 
@@ -104,6 +108,9 @@
     kKeyValidSamples      = 'valD',  // int32_t
 
     kKeyIsUnreadable      = 'unre',  // bool (int32_t)
+
+    // An indication that a video buffer has been rendered.
+    kKeyRendered          = 'rend',  // bool (int32_t)
 };
 
 enum {
@@ -123,6 +130,7 @@
         TYPE_INT64    = 'in64',
         TYPE_FLOAT    = 'floa',
         TYPE_POINTER  = 'ptr ',
+        TYPE_RECT     = 'rect',
     };
 
     void clear();
@@ -134,12 +142,22 @@
     bool setFloat(uint32_t key, float value);
     bool setPointer(uint32_t key, void *value);
 
+    bool setRect(
+            uint32_t key,
+            int32_t left, int32_t top,
+            int32_t right, int32_t bottom);
+
     bool findCString(uint32_t key, const char **value);
     bool findInt32(uint32_t key, int32_t *value);
     bool findInt64(uint32_t key, int64_t *value);
     bool findFloat(uint32_t key, float *value);
     bool findPointer(uint32_t key, void **value);
 
+    bool findRect(
+            uint32_t key,
+            int32_t *left, int32_t *top,
+            int32_t *right, int32_t *bottom);
+
     bool setData(uint32_t key, uint32_t type, const void *data, size_t size);
 
     bool findData(uint32_t key, uint32_t *type,
@@ -185,6 +203,10 @@
         }
     };
 
+    struct Rect {
+        int32_t mLeft, mTop, mRight, mBottom;
+    };
+
     KeyedVector<uint32_t, typed_data> mItems;
 
     // MetaData &operator=(const MetaData &);
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index fed6761..f8daa4f 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -18,6 +18,7 @@
 
 #define OMX_CODEC_H_
 
+#include <android/native_window.h>
 #include <media/IOMX.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaSource.h>
@@ -38,13 +39,22 @@
         // The client wants to access the output buffer's video
         // data for example for thumbnail extraction.
         kClientNeedsFramebuffer  = 4,
+
+        // Request for software or hardware codecs. If request
+        // can not be fullfilled, Create() returns NULL.
+        kSoftwareCodecsOnly      = 8,
+        kHardwareCodecsOnly      = 16,
+
+        // Store meta data in video buffers
+        kStoreMetaDataInVideoBuffers = 32,
     };
     static sp<MediaSource> Create(
             const sp<IOMX> &omx,
             const sp<MetaData> &meta, bool createEncoder,
             const sp<MediaSource> &source,
             const char *matchComponentName = NULL,
-            uint32_t flags = 0);
+            uint32_t flags = 0,
+            const sp<ANativeWindow> &nativeWindow = NULL);
 
     static void setComponentRole(
             const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
@@ -60,8 +70,6 @@
 
     virtual status_t pause();
 
-    void on_message(const omx_message &msg);
-
     // from MediaBufferObserver
     virtual void signalBufferReturned(MediaBuffer *buffer);
 
@@ -69,6 +77,13 @@
     virtual ~OMXCodec();
 
 private:
+
+    // Make sure mLock is accessible to OMXCodecObserver
+    friend class OMXCodecObserver;
+
+    // Call this with mLock hold
+    void on_message(const omx_message &msg);
+
     enum State {
         DEAD,
         LOADED,
@@ -109,12 +124,18 @@
         kAvoidMemcopyInputRecordingFrames     = 2048,
         kRequiresLargerEncoderOutputBuffer    = 4096,
         kOutputBuffersAreUnreadable           = 8192,
-        kStoreMetaDataInInputVideoBuffers     = 16384,
+    };
+
+    enum BufferStatus {
+        OWNED_BY_US,
+        OWNED_BY_COMPONENT,
+        OWNED_BY_NATIVE_WINDOW,
+        OWNED_BY_CLIENT,
     };
 
     struct BufferInfo {
         IOMX::buffer_id mBuffer;
-        bool mOwnedByComponent;
+        BufferStatus mStatus;
         sp<IMemory> mMem;
         size_t mSize;
         void *mData;
@@ -151,7 +172,6 @@
     int64_t mSeekTimeUs;
     ReadOptions::SeekMode mSeekMode;
     int64_t mTargetTimeUs;
-    int64_t mSkipTimeUs;
 
     MediaBuffer *mLeftOverBuffer;
 
@@ -160,13 +180,23 @@
 
     bool mPaused;
 
+    sp<ANativeWindow> mNativeWindow;
+
+    // The index in each of the mPortBuffers arrays of the buffer that will be
+    // submitted to OMX next.  This only applies when using buffers from a
+    // native window.
+    size_t mNextNativeBufferIndex[2];
+
     // A list of indices into mPortStatus[kPortIndexOutput] filled with data.
     List<size_t> mFilledBuffers;
     Condition mBufferFilled;
 
+    bool mIsMetaDataStoredInVideoBuffers;
+
     OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks,
              bool isEncoder, const char *mime, const char *componentName,
-             const sp<MediaSource> &source);
+             const sp<MediaSource> &source,
+             const sp<ANativeWindow> &nativeWindow);
 
     void addCodecSpecificData(const void *data, size_t size);
     void clearCodecSpecificData();
@@ -217,13 +247,20 @@
 
     status_t allocateBuffers();
     status_t allocateBuffersOnPort(OMX_U32 portIndex);
+    status_t allocateOutputBuffersFromNativeWindow();
+
+    status_t queueBufferToNativeWindow(BufferInfo *info);
+    status_t cancelBufferToNativeWindow(BufferInfo *info);
+    BufferInfo* dequeueBufferFromNativeWindow();
 
     status_t freeBuffersOnPort(
             OMX_U32 portIndex, bool onlyThoseWeOwn = false);
 
-    void drainInputBuffer(IOMX::buffer_id buffer);
+    status_t freeBuffer(OMX_U32 portIndex, size_t bufIndex);
+
+    bool drainInputBuffer(IOMX::buffer_id buffer);
     void fillOutputBuffer(IOMX::buffer_id buffer);
-    void drainInputBuffer(BufferInfo *info);
+    bool drainInputBuffer(BufferInfo *info);
     void fillOutputBuffer(BufferInfo *info);
 
     void drainInputBuffers();
@@ -251,6 +288,7 @@
 
     status_t init();
     void initOutputFormat(const sp<MetaData> &inputFormat);
+    status_t initNativeWindow();
 
     void dumpPortStatus(OMX_U32 portIndex);
 
@@ -265,6 +303,8 @@
             uint32_t flags,
             Vector<String8> *matchingCodecs);
 
+    void restorePatchedDataPointer(BufferInfo *info);
+
     OMXCodec(const OMXCodec &);
     OMXCodec &operator=(const OMXCodec &);
 };
@@ -277,6 +317,7 @@
 struct CodecCapabilities {
     String8 mComponentName;
     Vector<CodecProfileLevel> mProfileLevels;
+    Vector<OMX_U32> mColorFormats;
 };
 
 // Return a vector of componentNames with supported profile/level pairs
diff --git a/include/media/stagefright/VideoRenderer.h b/include/media/stagefright/VideoRenderer.h
deleted file mode 100644
index f80b277..0000000
--- a/include/media/stagefright/VideoRenderer.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef VIDEO_RENDERER_H_
-
-#define VIDEO_RENDERER_H_
-
-#include <sys/types.h>
-
-namespace android {
-
-class VideoRenderer {
-public:
-    virtual ~VideoRenderer() {}
-
-    virtual void render(
-            const void *data, size_t size, void *platformPrivate) = 0;
-
-protected:
-    VideoRenderer() {}
-
-    VideoRenderer(const VideoRenderer &);
-    VideoRenderer &operator=(const VideoRenderer &);
-};
-
-}  // namespace android
-
-#endif  // VIDEO_RENDERER_H_
diff --git a/include/media/stagefright/VideoSourceDownSampler.h b/include/media/stagefright/VideoSourceDownSampler.h
new file mode 100644
index 0000000..439918c
--- /dev/null
+++ b/include/media/stagefright/VideoSourceDownSampler.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// VideoSourceDownSampler implements the MediaSource interface,
+// downsampling frames provided from a real video source.
+
+#ifndef VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#define VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class IMemory;
+class MediaBuffer;
+class MetaData;
+
+class VideoSourceDownSampler : public MediaSource {
+public:
+    virtual ~VideoSourceDownSampler();
+
+    // Constructor:
+    // videoSource: The real video source which provides the original frames.
+    // width, height: The desired width, height. These should be less than or equal
+    // to those of the real video source. We then downsample the original frames to
+    // this size.
+    VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+        int32_t width, int32_t height);
+
+    // MediaSource interface
+    virtual status_t start(MetaData *params = NULL);
+
+    virtual status_t stop();
+
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+    virtual status_t pause();
+
+private:
+    // Reference to the real video source.
+    sp<MediaSource> mRealVideoSource;
+
+    // Size of frames to be provided by this source.
+    int32_t mWidth;
+    int32_t mHeight;
+
+    // Size of frames provided by the real source.
+    int32_t mRealSourceWidth;
+    int32_t mRealSourceHeight;
+
+    // Down sampling paramters.
+    int32_t mDownSampleOffsetX;
+    int32_t mDownSampleOffsetY;
+    int32_t mDownSampleSkipX;
+    int32_t mDownSampleSkipY;
+
+    // True if we need to crop the still video image to get the video frame.
+    bool mNeedDownSampling;
+
+    // Meta data. This is a copy of the real source except for the width and
+    // height parameters.
+    sp<MetaData> mMeta;
+
+    // Computes the offset, skip parameters for downsampling the original frame
+    // to the desired size.
+    void computeDownSamplingParameters();
+
+    // Downsamples the frame in sourceBuffer to size (mWidth x mHeight). A new
+    // buffer is created which stores the downsampled image.
+    void downSampleYUVImage(const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const;
+
+    // Disallow these.
+    VideoSourceDownSampler(const VideoSourceDownSampler &);
+    VideoSourceDownSampler &operator=(const VideoSourceDownSampler &);
+};
+
+}  // namespace android
+
+#endif  // VIDEO_SOURCE_DOWN_SAMPLER_H_
diff --git a/include/media/stagefright/YUVCanvas.h b/include/media/stagefright/YUVCanvas.h
new file mode 100644
index 0000000..ff70923
--- /dev/null
+++ b/include/media/stagefright/YUVCanvas.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// YUVCanvas holds a reference to a YUVImage on which it can do various
+// drawing operations. It provides various utility functions for filling,
+// cropping, etc.
+
+
+#ifndef YUV_CANVAS_H_
+
+#define YUV_CANVAS_H_
+
+#include <stdint.h>
+
+namespace android {
+
+class YUVImage;
+class Rect;
+
+class YUVCanvas {
+public:
+
+    // Constructor takes in reference to a yuvImage on which it can do
+    // various drawing opreations.
+    YUVCanvas(YUVImage &yuvImage);
+    ~YUVCanvas();
+
+    // Fills the entire image with the given YUV values.
+    void FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+    // Fills the rectangular region [startX,endX]x[startY,endY] with the given YUV values.
+    void FillYUVRectangle(const Rect& rect,
+            uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+    // Copies the region [startX,endX]x[startY,endY] from srcImage into the
+    // canvas' target image (mYUVImage) starting at
+    // (destinationStartX,destinationStartY).
+    // Note that undefined behavior may occur if srcImage is same as the canvas'
+    // target image.
+    void CopyImageRect(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage);
+
+    // Downsamples the srcImage into the canvas' target image (mYUVImage)
+    // The downsampling copies pixels from the source image starting at
+    // (srcOffsetX, srcOffsetY) to the target image, starting at (0, 0).
+    // For each X increment in the target image, skipX pixels are skipped
+    // in the source image.
+    // Similarly for each Y increment in the target image, skipY pixels
+    // are skipped in the source image.
+    void downsample(
+            int32_t srcOffsetX, int32_t srcOffsetY,
+            int32_t skipX, int32_t skipY,
+            const YUVImage &srcImage);
+
+private:
+    YUVImage& mYUVImage;
+
+    YUVCanvas(const YUVCanvas &);
+    YUVCanvas &operator=(const YUVCanvas &);
+};
+
+}  // namespace android
+
+#endif  // YUV_CANVAS_H_
diff --git a/include/media/stagefright/YUVImage.h b/include/media/stagefright/YUVImage.h
new file mode 100644
index 0000000..4e98618
--- /dev/null
+++ b/include/media/stagefright/YUVImage.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// A container class to hold YUV data and provide various utilities,
+// e.g. to set/get pixel values.
+// Supported formats:
+//  - YUV420 Planar
+//  - YUV420 Semi Planar
+//
+//  Currently does not support variable strides.
+//
+//  Implementation: Two simple abstractions are done to simplify access
+//  to YUV channels for different formats:
+//  - initializeYUVPointers() sets up pointers (mYdata, mUdata, mVdata) to
+//  point to the right start locations of the different channel data depending
+//  on the format.
+//  - getOffsets() returns the correct offset for the different channels
+//  depending on the format.
+//  Location of any pixel's YUV channels can then be easily computed using these.
+//
+
+#ifndef YUV_IMAGE_H_
+
+#define YUV_IMAGE_H_
+
+#include <stdint.h>
+#include <cstring>
+
+namespace android {
+
+class Rect;
+
+class YUVImage {
+public:
+    // Supported YUV formats
+    enum YUVFormat {
+        YUV420Planar,
+        YUV420SemiPlanar
+    };
+
+    // Constructs an image with the given size, format. Also allocates and owns
+    // the required memory.
+    YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+    // Constructs an image with the given size, format. The memory is provided
+    // by the caller and we don't own it.
+    YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer);
+
+    // Destructor to delete the memory if it owns it.
+    ~YUVImage();
+
+    // Returns the size of the buffer required to store the YUV data for the given
+    // format and geometry. Useful when the caller wants to allocate the requisite
+    // memory.
+    static size_t bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+    int32_t width() const {return mWidth;}
+    int32_t height() const {return mHeight;}
+
+    // Returns true if pixel is the range [0, width-1] x [0, height-1]
+    // and false otherwise.
+    bool validPixel(int32_t x, int32_t y) const;
+
+    // Get the pixel YUV value at pixel (x,y).
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    // Returns true if get was successful and false otherwise.
+    bool getPixelValue(int32_t x, int32_t y,
+            uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const;
+
+    // Set the pixel YUV value at pixel (x,y).
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    // Returns true if set was successful and false otherwise.
+    bool setPixelValue(int32_t x, int32_t y,
+            uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+    // Uses memcpy to copy an entire row of data
+    static void fastCopyRectangle420Planar(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage, YUVImage &destImage);
+
+    // Uses memcpy to copy an entire row of data
+    static void fastCopyRectangle420SemiPlanar(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage, YUVImage &destImage);
+
+    // Tries to use memcopy to copy entire rows of data.
+    // Returns false if fast copy is not possible for the passed image formats.
+    static bool fastCopyRectangle(
+            const Rect& srcRect,
+            int32_t destStartX, int32_t destStartY,
+            const YUVImage &srcImage, YUVImage &destImage);
+
+    // Convert the given YUV value to RGB.
+    void yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+        uint8_t *r, uint8_t *g, uint8_t *b) const;
+
+    // Write the image to a human readable PPM file.
+    // Returns true if write was succesful and false otherwise.
+    bool writeToPPM(const char *filename) const;
+
+private:
+    // YUV Format of the image.
+    YUVFormat mYUVFormat;
+
+    int32_t mWidth;
+    int32_t mHeight;
+
+    // Pointer to the memory buffer.
+    uint8_t *mBuffer;
+
+    // Boolean telling whether we own the memory buffer.
+    bool mOwnBuffer;
+
+    // Pointer to start of the Y data plane.
+    uint8_t *mYdata;
+
+    // Pointer to start of the U data plane. Note that in case of interleaved formats like
+    // YUV420 semiplanar, mUdata points to the start of the U data in the UV plane.
+    uint8_t *mUdata;
+
+    // Pointer to start of the V data plane. Note that in case of interleaved formats like
+    // YUV420 semiplanar, mVdata points to the start of the V data in the UV plane.
+    uint8_t *mVdata;
+
+    // Initialize the pointers mYdata, mUdata, mVdata to point to the right locations for
+    // the given format and geometry.
+    // Returns true if initialize was succesful and false otherwise.
+    bool initializeYUVPointers();
+
+    // For the given pixel location, this returns the offset of the location of y, u and v
+    // data from the corresponding base pointers -- mYdata, mUdata, mVdata.
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    // Returns true if getting offsets was succesful and false otherwise.
+    bool getOffsets(int32_t x, int32_t y,
+        int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const;
+
+    // Returns the offset increments incurred in going from one data row to the next data row
+    // for the YUV channels. Note that this corresponds to data rows and not pixel rows.
+    // E.g. depending on formats, U/V channels may have only one data row corresponding
+    // to two pixel rows.
+    bool getOffsetIncrementsPerDataRow(
+        int32_t *yDataOffsetIncrement,
+        int32_t *uDataOffsetIncrement,
+        int32_t *vDataOffsetIncrement) const;
+
+    // Given the offset return the address of the corresponding channel's data.
+    uint8_t* getYAddress(int32_t offset) const;
+    uint8_t* getUAddress(int32_t offset) const;
+    uint8_t* getVAddress(int32_t offset) const;
+
+    // Given the pixel location, returns the address of the corresponding channel's data.
+    // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+    bool getYUVAddresses(int32_t x, int32_t y,
+        uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const;
+
+    // Disallow implicit casting and copying.
+    YUVImage(const YUVImage &);
+    YUVImage &operator=(const YUVImage &);
+};
+
+}  // namespace android
+
+#endif  // YUV_IMAGE_H_
diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h
index 69021d8..eb5e494 100644
--- a/include/media/stagefright/foundation/ADebug.h
+++ b/include/media/stagefright/foundation/ADebug.h
@@ -32,6 +32,7 @@
 #define CHECK(condition)                                \
     LOG_ALWAYS_FATAL_IF(                                \
             !(condition),                               \
+            "%s",                                       \
             __FILE__ ":" LITERAL_TO_STRING(__LINE__)    \
             " CHECK(" #condition ") failed.")
 
@@ -58,10 +59,12 @@
     do {                                                                \
         AString ___res = Compare_##suffix(x, y);                        \
         if (!___res.empty()) {                                          \
-            LOG_ALWAYS_FATAL(                                           \
-                    __FILE__ ":" LITERAL_TO_STRING(__LINE__)            \
-                    " CHECK_" #suffix "( " #x "," #y ") failed: %s",    \
-                    ___res.c_str());                                    \
+            AString ___full =                                           \
+                __FILE__ ":" LITERAL_TO_STRING(__LINE__)                \
+                    " CHECK_" #suffix "( " #x "," #y ") failed: ";      \
+            ___full.append(___res);                                     \
+                                                                        \
+            LOG_ALWAYS_FATAL("%s", ___full.c_str());                    \
         }                                                               \
     } while (false)
 
diff --git a/include/media/stagefright/foundation/AHierarchicalStateMachine.h b/include/media/stagefright/foundation/AHierarchicalStateMachine.h
new file mode 100644
index 0000000..b5786fb
--- /dev/null
+++ b/include/media/stagefright/foundation/AHierarchicalStateMachine.h
@@ -0,0 +1,49 @@
+#ifndef A_HIERARCHICAL_STATE_MACHINE_H_
+
+#define A_HIERARCHICAL_STATE_MACHINE_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct AState : public RefBase {
+    AState(const sp<AState> &parentState = NULL);
+
+    sp<AState> parentState();
+
+protected:
+    virtual ~AState();
+
+    virtual void stateEntered();
+    virtual void stateExited();
+
+    virtual bool onMessageReceived(const sp<AMessage> &msg) = 0;
+
+private:
+    friend struct AHierarchicalStateMachine;
+
+    sp<AState> mParentState;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AState);
+};
+
+struct AHierarchicalStateMachine : public AHandler {
+    AHierarchicalStateMachine();
+
+protected:
+    virtual ~AHierarchicalStateMachine();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    // Only to be called in response to a message.
+    void changeState(const sp<AState> &state);
+
+private:
+    sp<AState> mState;
+
+    DISALLOW_EVIL_CONSTRUCTORS(AHierarchicalStateMachine);
+};
+
+}  // namespace android
+
+#endif  // A_HIERARCHICAL_STATE_MACHINE_H_
diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h
index c674cba..72dc730 100644
--- a/include/media/stagefright/foundation/AMessage.h
+++ b/include/media/stagefright/foundation/AMessage.h
@@ -26,16 +26,22 @@
 namespace android {
 
 struct AString;
+struct Parcel;
 
 struct AMessage : public RefBase {
     AMessage(uint32_t what = 0, ALooper::handler_id target = 0);
 
+    static sp<AMessage> FromParcel(const Parcel &parcel);
+    void writeToParcel(Parcel *parcel) const;
+
     void setWhat(uint32_t what);
     uint32_t what() const;
 
     void setTarget(ALooper::handler_id target);
     ALooper::handler_id target() const;
 
+    void clear();
+
     void setInt32(const char *name, int32_t value);
     void setInt64(const char *name, int64_t value);
     void setSize(const char *name, size_t value);
@@ -46,6 +52,10 @@
     void setObject(const char *name, const sp<RefBase> &obj);
     void setMessage(const char *name, const sp<AMessage> &obj);
 
+    void setRect(
+            const char *name,
+            int32_t left, int32_t top, int32_t right, int32_t bottom);
+
     bool findInt32(const char *name, int32_t *value) const;
     bool findInt64(const char *name, int64_t *value) const;
     bool findSize(const char *name, size_t *value) const;
@@ -56,8 +66,15 @@
     bool findObject(const char *name, sp<RefBase> *obj) const;
     bool findMessage(const char *name, sp<AMessage> *obj) const;
 
+    bool findRect(
+            const char *name,
+            int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const;
+
     void post(int64_t delayUs = 0);
 
+    // Performs a deep-copy of "this", contained messages are in turn "dup'ed".
+    // Warning: RefBase items, i.e. "objects" are _not_ copied but only have
+    // their refcount incremented.
     sp<AMessage> dup() const;
 
     AString debugString(int32_t indent = 0) const;
@@ -76,11 +93,16 @@
         kTypeString,
         kTypeObject,
         kTypeMessage,
+        kTypeRect,
     };
 
     uint32_t mWhat;
     ALooper::handler_id mTarget;
 
+    struct Rect {
+        int32_t mLeft, mTop, mRight, mBottom;
+    };
+
     struct Item {
         union {
             int32_t int32Value;
@@ -91,6 +113,7 @@
             void *ptrValue;
             RefBase *refValue;
             AString *stringValue;
+            Rect rectValue;
         } u;
         const char *mName;
         Type mType;
@@ -102,7 +125,6 @@
     Item mItems[kMaxNumItems];
     size_t mNumItems;
 
-    void clear();
     Item *allocateItem(const char *name);
     void freeItem(Item *item);
     const Item *findItem(const char *name, Type type) const;
diff --git a/include/private/surfaceflinger/SharedBufferStack.h b/include/private/surfaceflinger/SharedBufferStack.h
index d6ae5e9..9d589cf 100644
--- a/include/private/surfaceflinger/SharedBufferStack.h
+++ b/include/private/surfaceflinger/SharedBufferStack.h
@@ -285,6 +285,8 @@
     uint32_t getTransform(int buffer) const;
 
     status_t resize(int newNumBuffers);
+    status_t grow(int newNumBuffers);
+    status_t shrink(int newNumBuffers);
 
     SharedBufferStack::Statistics getStats() const;
     
@@ -346,6 +348,14 @@
     int mNumBuffers;
     BufferList mBufferList;
 
+    struct BuffersAvailableCondition : public ConditionBase {
+        int mNumBuffers;
+        inline BuffersAvailableCondition(SharedBufferServer* sbs,
+                int numBuffers);
+        inline bool operator()() const;
+        inline const char* name() const { return "BuffersAvailableCondition"; }
+    };
+
     struct UnlockUpdate : public UpdateBase {
         const int lockedBuffer;
         inline UnlockUpdate(SharedBufferBase* sbb, int lockedBuffer);
diff --git a/include/private/ui/sw_gralloc_handle.h b/include/private/ui/sw_gralloc_handle.h
deleted file mode 100644
index b3d333e..0000000
--- a/include/private/ui/sw_gralloc_handle.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H
-#define ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H
-
-#include <stdint.h>
-#include <limits.h>
-#include <sys/cdefs.h>
-#include <hardware/gralloc.h>
-#include <errno.h>
-
-#include <cutils/native_handle.h>
-
-namespace android {
-
-/*****************************************************************************/
-
-struct sw_gralloc_handle_t : public native_handle 
-{
-    // file-descriptors
-    int     fd;
-    // ints
-    int     magic;
-    int     size;
-    int     base;
-    int     prot;
-    int     pid;
-
-    static const int sNumInts = 5;
-    static const int sNumFds = 1;
-    static const int sMagic = '_sgh';
-
-    sw_gralloc_handle_t() :
-        fd(-1), magic(sMagic), size(0), base(0), prot(0), pid(getpid())
-    {
-        version = sizeof(native_handle);
-        numInts = sNumInts;
-        numFds = sNumFds;
-    }
-    ~sw_gralloc_handle_t() {
-        magic = 0;
-    }
-
-    static int validate(const native_handle* h) {
-        const sw_gralloc_handle_t* hnd = (const sw_gralloc_handle_t*)h;
-        if (!h || h->version != sizeof(native_handle) ||
-                h->numInts != sNumInts || h->numFds != sNumFds ||
-                hnd->magic != sMagic) 
-        {
-            return -EINVAL;
-        }
-        return 0;
-    }
-
-    static status_t alloc(uint32_t w, uint32_t h, int format,
-            int usage, buffer_handle_t* handle, int32_t* stride);
-    static status_t free(sw_gralloc_handle_t* hnd);
-    static status_t registerBuffer(sw_gralloc_handle_t* hnd);
-    static status_t unregisterBuffer(sw_gralloc_handle_t* hnd);
-    static status_t lock(sw_gralloc_handle_t* hnd, int usage,
-            int l, int t, int w, int h, void** vaddr);
-    static status_t unlock(sw_gralloc_handle_t* hnd);
-};
-
-/*****************************************************************************/
-
-}; // namespace android
-
-#endif /* ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H */
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index 48b45ff..e6ff654 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -27,4 +27,4 @@
 
 LOCAL_PRELINK_MODULE := false
 
-include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 2e5cbe3..74fb531 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -15,6 +15,7 @@
     IMediaRecorderClient.cpp \
     IMediaPlayer.cpp \
     IMediaRecorder.cpp \
+    IStreamSource.cpp \
     Metadata.cpp \
     mediarecorder.cpp \
     IMediaMetadataRetriever.cpp \
@@ -35,7 +36,8 @@
     fixedfft.cpp.arm
 
 LOCAL_SHARED_LIBRARIES := \
-	libui libcutils libutils libbinder libsonivox libicuuc libexpat libsurfaceflinger_client libcamera_client
+	libui libcutils libutils libbinder libsonivox libicuuc libexpat \
+        libsurfaceflinger_client libcamera_client libstagefright_foundation
 
 LOCAL_MODULE:= libmedia
 
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 88b8c86..aadeba5 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -27,7 +27,6 @@
 #include <media/AudioEffect.h>
 
 #include <utils/Log.h>
-#include <cutils/atomic.h>
 #include <binder/IPCThreadState.h>
 
 
@@ -207,18 +206,22 @@
         return INVALID_OPERATION;
     }
 
-    if (enabled) {
-        LOGV("enable %p", this);
-        if (android_atomic_or(1, &mEnabled) == 0) {
-           return mIEffect->enable();
+    status_t status = NO_ERROR;
+
+    AutoMutex lock(mLock);
+    if (enabled != mEnabled) {
+        if (enabled) {
+            LOGV("enable %p", this);
+            status = mIEffect->enable();
+        } else {
+            LOGV("disable %p", this);
+            status = mIEffect->disable();
         }
-    } else {
-        LOGV("disable %p", this);
-        if (android_atomic_and(~1, &mEnabled) == 1) {
-           return mIEffect->disable();
+        if (status == NO_ERROR) {
+            mEnabled = enabled;
         }
     }
-    return NO_ERROR;
+    return status;
 }
 
 status_t AudioEffect::command(uint32_t cmdCode,
@@ -232,26 +235,26 @@
         return INVALID_OPERATION;
     }
 
-    if ((cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) &&
-            (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL)) {
-        return BAD_VALUE;
+    if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
+        if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) {
+            return NO_ERROR;
+        }
+        if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) {
+            return BAD_VALUE;
+        }
+        mLock.lock();
     }
 
     status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData);
-    if (status != NO_ERROR) {
-        return status;
-    }
 
     if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) {
-        status = *(status_t *)replyData;
-        if (status != NO_ERROR) {
-            return status;
+        if (status == NO_ERROR) {
+            status = *(status_t *)replyData;
         }
-        if (cmdCode == EFFECT_CMD_ENABLE) {
-            android_atomic_or(1, &mEnabled);
-        } else {
-            android_atomic_and(~1, &mEnabled);
+        if (status == NO_ERROR) {
+            mEnabled = (cmdCode == EFFECT_CMD_ENABLE);
         }
+        mLock.unlock();
     }
 
     return status;
@@ -370,11 +373,7 @@
 {
     LOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf);
     if (mStatus == ALREADY_EXISTS) {
-        if (enabled) {
-            android_atomic_or(1, &mEnabled);
-        } else {
-            android_atomic_and(~1, &mEnabled);
-        }
+        mEnabled = enabled;
         if (mCbf) {
             mCbf(EVENT_ENABLE_STATUS_CHANGED, mUserData, &enabled);
         }
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index a6c515c..1d6ffa0 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -35,7 +35,6 @@
 #include <binder/Parcel.h>
 #include <binder/IPCThreadState.h>
 #include <utils/Timers.h>
-#include <cutils/atomic.h>
 
 #define LIKELY( exp )       (__builtin_expect( (exp) != 0, true  ))
 #define UNLIKELY( exp )     (__builtin_expect( (exp) != 0, false ))
@@ -282,7 +281,9 @@
         t->mLock.lock();
      }
 
-    if (android_atomic_or(1, &mActive) == 0) {
+    AutoMutex lock(mLock);
+    if (mActive == 0) {
+        mActive = 1;
         ret = mAudioRecord->start();
         if (ret == DEAD_OBJECT) {
             LOGV("start() dead IAudioRecord: creating a new one");
@@ -302,8 +303,7 @@
                 setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT);
             }
         } else {
-            LOGV("start() failed");
-            android_atomic_and(~1, &mActive);
+            mActive = 0;
         }
     }
 
@@ -322,9 +322,11 @@
 
     if (t != 0) {
         t->mLock.lock();
-     }
+    }
 
-    if (android_atomic_and(~1, &mActive) == 1) {
+    AutoMutex lock(mLock);
+    if (mActive == 1) {
+        mActive = 0;
         mCblk->cv.signal();
         mAudioRecord->stop();
         // the record head position will reset to 0, so if a marker is set, we need
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 9c2a8ba..1a3fcd6 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -835,6 +835,7 @@
 const char *AudioParameter::keyFormat = "format";
 const char *AudioParameter::keyChannels = "channels";
 const char *AudioParameter::keyFrameCount = "frame_count";
+const char *AudioParameter::keyInputSource = "input_source";
 
 AudioParameter::AudioParameter(const String8& keyValuePairs)
 {
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 587c8ff..c1bed59 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -35,7 +35,6 @@
 #include <binder/Parcel.h>
 #include <binder/IPCThreadState.h>
 #include <utils/Timers.h>
-#include <cutils/atomic.h>
 
 #define LIKELY( exp )       (__builtin_expect( (exp) != 0, true  ))
 #define UNLIKELY( exp )     (__builtin_expect( (exp) != 0, false ))
@@ -312,7 +311,9 @@
         t->mLock.lock();
      }
 
-    if (android_atomic_or(1, &mActive) == 0) {
+    AutoMutex lock(mLock);
+    if (mActive == 0) {
+        mActive = 1;
         mNewPosition = mCblk->server + mUpdatePeriod;
         mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS;
         mCblk->waitTimeMs = 0;
@@ -344,7 +345,7 @@
         }
         if (status != NO_ERROR) {
             LOGV("start() failed");
-            android_atomic_and(~1, &mActive);
+            mActive = 0;
             if (t != 0) {
                 t->requestExit();
             } else {
@@ -367,7 +368,9 @@
         t->mLock.lock();
     }
 
-    if (android_atomic_and(~1, &mActive) == 1) {
+    AutoMutex lock(mLock);
+    if (mActive == 1) {
+        mActive = 0;
         mCblk->cv.signal();
         mAudioTrack->stop();
         // Cancel loops (If we are in the middle of a loop, playback
@@ -407,7 +410,6 @@
     mMarkerReached = false;
     mUpdatePeriod = 0;
 
-
     if (!mActive) {
         mAudioTrack->flush();
         // Release AudioTrack callback thread in case it was waiting for new buffers
@@ -419,7 +421,9 @@
 void AudioTrack::pause()
 {
     LOGV("pause");
-    if (android_atomic_and(~1, &mActive) == 1) {
+    AutoMutex lock(mLock);
+    if (mActive == 1) {
+        mActive = 0;
         mAudioTrack->pause();
     }
 }
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index e529d25..0193e25 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -82,8 +82,7 @@
     SET_DATA_SOURCE_URL,
     SET_DATA_SOURCE_FD,
     SET_MODE,
-    GET_MODE,
-    CAPTURE_FRAME,
+    GET_FRAME_AT_TIME,
     EXTRACT_ALBUM_ART,
     EXTRACT_METADATA,
 };
@@ -133,23 +132,17 @@
         return reply.readInt32();
     }
 
-    status_t getMode(int* mode) const
+    sp<IMemory> getFrameAtTime(int64_t timeUs, int option)
     {
+        LOGV("getTimeAtTime: time(%lld us) and option(%d)", timeUs, option);
         Parcel data, reply;
         data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
-        remote()->transact(GET_MODE, data, &reply);
-        *mode = reply.readInt32();
-        return reply.readInt32();
-    }
-
-    sp<IMemory> captureFrame()
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
+        data.writeInt64(timeUs);
+        data.writeInt32(option);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
         sendSchedPolicy(data);
 #endif
-        remote()->transact(CAPTURE_FRAME, data, &reply);
+        remote()->transact(GET_FRAME_AT_TIME, data, &reply);
         status_t ret = reply.readInt32();
         if (ret != NO_ERROR) {
             return NULL;
@@ -222,20 +215,15 @@
             reply->writeInt32(setMode(mode));
             return NO_ERROR;
         } break;
-        case GET_MODE: {
+        case GET_FRAME_AT_TIME: {
             CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
-            int mode;
-            status_t status = getMode(&mode);
-            reply->writeInt32(mode);
-            reply->writeInt32(status);
-            return NO_ERROR;
-        } break;
-        case CAPTURE_FRAME: {
-            CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
+            int64_t timeUs = data.readInt64();
+            int option = data.readInt32();
+            LOGV("getTimeAtTime: time(%lld us) and option(%d)", timeUs, option);
 #ifndef DISABLE_GROUP_SCHEDULE_HACK
             setSchedPolicy(data);
 #endif
-            sp<IMemory> bitmap = captureFrame();
+            sp<IMemory> bitmap = getFrameAtTime(timeUs, option);
             if (bitmap != 0) {  // Don't send NULL across the binder interface
                 reply->writeInt32(NO_ERROR);
                 reply->writeStrongBinder(bitmap->asBinder());
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 0f55b19..c287c0a 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -22,6 +22,7 @@
 
 #include <media/IMediaPlayer.h>
 #include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 
 namespace android {
 
@@ -43,8 +44,6 @@
     INVOKE,
     SET_METADATA_FILTER,
     GET_METADATA,
-    SUSPEND,
-    RESUME,
     SET_AUX_EFFECT_SEND_LEVEL,
     ATTACH_AUX_EFFECT
 };
@@ -65,11 +64,11 @@
         remote()->transact(DISCONNECT, data, &reply);
     }
 
-    status_t setVideoSurface(const sp<ISurface>& surface)
+    status_t setVideoSurface(const sp<Surface>& surface)
     {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-        data.writeStrongBinder(surface->asBinder());
+        Surface::writeToParcel(surface, &data);
         remote()->transact(SET_VIDEO_SURFACE, data, &reply);
         return reply.readInt32();
     }
@@ -204,26 +203,6 @@
         return reply->readInt32();
     }
 
-    status_t suspend() {
-        Parcel request;
-        request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
-        Parcel reply;
-        remote()->transact(SUSPEND, request, &reply);
-
-        return reply.readInt32();
-    }
-
-    status_t resume() {
-        Parcel request;
-        request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-
-        Parcel reply;
-        remote()->transact(RESUME, request, &reply);
-
-        return reply.readInt32();
-    }
-
     status_t setAuxEffectSendLevel(float level)
     {
         Parcel data, reply;
@@ -258,7 +237,7 @@
         } break;
         case SET_VIDEO_SURFACE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
-            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+            sp<Surface> surface = Surface::readFromParcel(data);
             reply->writeInt32(setVideoSurface(surface));
             return NO_ERROR;
         } break;
@@ -341,16 +320,6 @@
             reply->writeInt32(setMetadataFilter(data));
             return NO_ERROR;
         } break;
-        case SUSPEND: {
-            CHECK_INTERFACE(IMediaPlayer, data, reply);
-            reply->writeInt32(suspend());
-            return NO_ERROR;
-        } break;
-        case RESUME: {
-            CHECK_INTERFACE(IMediaPlayer, data, reply);
-            reply->writeInt32(resume());
-            return NO_ERROR;
-        } break;
         case GET_METADATA: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply);
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 4abfa75..77199e1 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,7 @@
 #include <media/IMediaPlayerService.h>
 #include <media/IMediaRecorder.h>
 #include <media/IOMX.h>
+#include <media/IStreamSource.h>
 
 #include <utils/Errors.h>  // for status_t
 
@@ -31,6 +32,7 @@
 enum {
     CREATE_URL = IBinder::FIRST_CALL_TRANSACTION,
     CREATE_FD,
+    CREATE_STREAM,
     DECODE_URL,
     DECODE_FD,
     CREATE_MEDIA_RECORDER,
@@ -107,6 +109,21 @@
         return interface_cast<IMediaPlayer>(reply.readStrongBinder());;
     }
 
+    virtual sp<IMediaPlayer> create(
+            pid_t pid, const sp<IMediaPlayerClient> &client,
+            const sp<IStreamSource> &source, int audioSessionId) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(pid));
+        data.writeStrongBinder(client->asBinder());
+        data.writeStrongBinder(source->asBinder());
+        data.writeInt32(static_cast<int32_t>(audioSessionId));
+
+        remote()->transact(CREATE_STREAM, data, &reply);
+
+        return interface_cast<IMediaPlayer>(reply.readStrongBinder());;
+    }
+
     virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat)
     {
         Parcel data, reply;
@@ -184,6 +201,27 @@
             reply->writeStrongBinder(player->asBinder());
             return NO_ERROR;
         } break;
+        case CREATE_STREAM:
+        {
+            CHECK_INTERFACE(IMediaPlayerService, data, reply);
+
+            pid_t pid = static_cast<pid_t>(data.readInt32());
+
+            sp<IMediaPlayerClient> client =
+                interface_cast<IMediaPlayerClient>(data.readStrongBinder());
+
+            sp<IStreamSource> source =
+                interface_cast<IStreamSource>(data.readStrongBinder());
+
+            int audioSessionId = static_cast<int>(data.readInt32());
+
+            sp<IMediaPlayer> player =
+                create(pid, client, source, audioSessionId);
+
+            reply->writeStrongBinder(player->asBinder());
+            return OK;
+            break;
+        }
         case DECODE_URL: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
             const char* url = data.readCString();
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 947ff34..59cd1b7 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -19,7 +19,7 @@
 #define LOG_TAG "IMediaRecorder"
 #include <utils/Log.h>
 #include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 #include <camera/ICamera.h>
 #include <media/IMediaRecorderClient.h>
 #include <media/IMediaRecorder.h>
@@ -43,6 +43,7 @@
     SET_AUDIO_ENCODER,
     SET_OUTPUT_FILE_PATH,
     SET_OUTPUT_FILE_FD,
+    SET_OUTPUT_FILE_AUXILIARY_FD,
     SET_VIDEO_SIZE,
     SET_VIDEO_FRAMERATE,
     SET_PARAMETERS,
@@ -69,12 +70,12 @@
         return reply.readInt32();
     }
 
-    status_t setPreviewSurface(const sp<ISurface>& surface)
+    status_t setPreviewSurface(const sp<Surface>& surface)
     {
         LOGV("setPreviewSurface(%p)", surface.get());
         Parcel data, reply;
         data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
-        data.writeStrongBinder(surface->asBinder());
+        Surface::writeToParcel(surface, &data);
         remote()->transact(SET_PREVIEW_SURFACE, data, &reply);
         return reply.readInt32();
     }
@@ -159,6 +160,15 @@
         return reply.readInt32();
     }
 
+    status_t setOutputFileAuxiliary(int fd) {
+        LOGV("setOutputFileAuxiliary(%d)", fd);
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+        data.writeFileDescriptor(fd);
+        remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply);
+        return reply.readInt32();
+    }
+
     status_t setVideoSize(int width, int height)
     {
         LOGV("setVideoSize(%dx%d)", width, height);
@@ -377,6 +387,13 @@
             ::close(fd);
             return NO_ERROR;
         } break;
+        case SET_OUTPUT_FILE_AUXILIARY_FD: {
+            LOGV("SET_OUTPUT_FILE_AUXILIARY_FD");
+            CHECK_INTERFACE(IMediaRecorder, data, reply);
+            int fd = dup(data.readFileDescriptor());
+            reply->writeInt32(setOutputFileAuxiliary(fd));
+            return NO_ERROR;
+        } break;
         case SET_VIDEO_SIZE: {
             LOGV("SET_VIDEO_SIZE");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
@@ -409,7 +426,7 @@
         case SET_PREVIEW_SURFACE: {
             LOGV("SET_PREVIEW_SURFACE");
             CHECK_INTERFACE(IMediaRecorder, data, reply);
-            sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+            sp<Surface> surface = Surface::readFromParcel(data);
             reply->writeInt32(setPreviewSurface(surface));
             return NO_ERROR;
         } break;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index ae6c2bf..9ce6738 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -21,59 +21,19 @@
     SET_PARAMETER,
     GET_CONFIG,
     SET_CONFIG,
+    ENABLE_GRAPHIC_BUFFERS,
     USE_BUFFER,
+    USE_GRAPHIC_BUFFER,
+    STORE_META_DATA_IN_BUFFERS,
     ALLOC_BUFFER,
     ALLOC_BUFFER_WITH_BACKUP,
     FREE_BUFFER,
     FILL_BUFFER,
     EMPTY_BUFFER,
     GET_EXTENSION_INDEX,
-    CREATE_RENDERER,
     OBSERVER_ON_MSG,
-    RENDERER_RENDER,
 };
 
-sp<IOMXRenderer> IOMX::createRenderer(
-        const sp<Surface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t encodedWidth, size_t encodedHeight,
-        size_t displayWidth, size_t displayHeight,
-        int32_t rotationDegrees) {
-    return createRenderer(
-            surface->getISurface(),
-            componentName, colorFormat, encodedWidth, encodedHeight,
-            displayWidth, displayHeight,
-            rotationDegrees);
-}
-
-sp<IOMXRenderer> IOMX::createRendererFromJavaSurface(
-        JNIEnv *env, jobject javaSurface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t encodedWidth, size_t encodedHeight,
-        size_t displayWidth, size_t displayHeight,
-        int32_t rotationDegrees) {
-    jclass surfaceClass = env->FindClass("android/view/Surface");
-    if (surfaceClass == NULL) {
-        LOGE("Can't find android/view/Surface");
-        return NULL;
-    }
-
-    jfieldID surfaceID = env->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I");
-    if (surfaceID == NULL) {
-        LOGE("Can't find Surface.mSurface");
-        return NULL;
-    }
-
-    sp<Surface> surface = (Surface *)env->GetIntField(javaSurface, surfaceID);
-
-    return createRenderer(
-            surface, componentName, colorFormat, encodedWidth,
-            encodedHeight, displayWidth, displayHeight,
-            rotationDegrees);
-}
-
 class BpOMX : public BpInterface<IOMX> {
 public:
     BpOMX(const sp<IBinder> &impl)
@@ -220,6 +180,19 @@
         return reply.readInt32();
     }
 
+    virtual status_t enableGraphicBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.writeInt32((uint32_t)enable);
+        remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply);
+
+        status_t err = reply.readInt32();
+        return err;
+    }
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer) {
@@ -242,6 +215,42 @@
         return err;
     }
 
+
+    virtual status_t useGraphicBuffer(
+            node_id node, OMX_U32 port_index,
+            const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.write(*graphicBuffer);
+        remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
+
+        status_t err = reply.readInt32();
+        if (err != OK) {
+            *buffer = 0;
+
+            return err;
+        }
+
+        *buffer = (void*)reply.readIntPtr();
+
+        return err;
+    }
+
+    virtual status_t storeMetaDataInBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+        data.writeIntPtr((intptr_t)node);
+        data.writeInt32(port_index);
+        data.writeInt32((uint32_t)enable);
+        remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+
+        status_t err = reply.readInt32();
+        return err;
+    }
+
     virtual status_t allocateBuffer(
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data) {
@@ -347,30 +356,6 @@
 
         return err;
     }
-
-    virtual sp<IOMXRenderer> createRenderer(
-            const sp<ISurface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
-
-        data.writeStrongBinder(surface->asBinder());
-        data.writeCString(componentName);
-        data.writeInt32(colorFormat);
-        data.writeInt32(encodedWidth);
-        data.writeInt32(encodedHeight);
-        data.writeInt32(displayWidth);
-        data.writeInt32(displayHeight);
-        data.writeInt32(rotationDegrees);
-
-        remote()->transact(CREATE_RENDERER, data, &reply);
-
-        return interface_cast<IOMXRenderer>(reply.readStrongBinder());
-    }
 };
 
 IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX");
@@ -547,6 +532,20 @@
             return NO_ERROR;
         }
 
+        case ENABLE_GRAPHIC_BUFFERS:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+            status_t err = enableGraphicBuffers(node, port_index, enable);
+            reply->writeInt32(err);
+
+            return NO_ERROR;
+        }
+
         case USE_BUFFER:
         {
             CHECK_INTERFACE(IOMX, data, reply);
@@ -567,6 +566,41 @@
             return NO_ERROR;
         }
 
+        case USE_GRAPHIC_BUFFER:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+            data.read(*graphicBuffer);
+
+            buffer_id buffer;
+            status_t err = useGraphicBuffer(
+                    node, port_index, graphicBuffer, &buffer);
+            reply->writeInt32(err);
+
+            if (err == OK) {
+                reply->writeIntPtr((intptr_t)buffer);
+            }
+
+            return NO_ERROR;
+        }
+
+        case STORE_META_DATA_IN_BUFFERS:
+        {
+            CHECK_INTERFACE(IOMX, data, reply);
+
+            node_id node = (void*)data.readIntPtr();
+            OMX_U32 port_index = data.readInt32();
+            OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+            status_t err = storeMetaDataInBuffers(node, port_index, enable);
+            reply->writeInt32(err);
+
+            return NO_ERROR;
+        }
+
         case ALLOC_BUFFER:
         {
             CHECK_INTERFACE(IOMX, data, reply);
@@ -672,35 +706,6 @@
             return OK;
         }
 
-        case CREATE_RENDERER:
-        {
-            CHECK_INTERFACE(IOMX, data, reply);
-
-            sp<ISurface> isurface =
-                interface_cast<ISurface>(data.readStrongBinder());
-
-            const char *componentName = data.readCString();
-
-            OMX_COLOR_FORMATTYPE colorFormat =
-                static_cast<OMX_COLOR_FORMATTYPE>(data.readInt32());
-
-            size_t encodedWidth = (size_t)data.readInt32();
-            size_t encodedHeight = (size_t)data.readInt32();
-            size_t displayWidth = (size_t)data.readInt32();
-            size_t displayHeight = (size_t)data.readInt32();
-            int32_t rotationDegrees = data.readInt32();
-
-            sp<IOMXRenderer> renderer =
-                createRenderer(isurface, componentName, colorFormat,
-                               encodedWidth, encodedHeight,
-                               displayWidth, displayHeight,
-                               rotationDegrees);
-
-            reply->writeStrongBinder(renderer->asBinder());
-
-            return OK;
-        }
-
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
@@ -746,44 +751,4 @@
     }
 }
 
-////////////////////////////////////////////////////////////////////////////////
-
-class BpOMXRenderer : public BpInterface<IOMXRenderer> {
-public:
-    BpOMXRenderer(const sp<IBinder> &impl)
-        : BpInterface<IOMXRenderer>(impl) {
-    }
-
-    virtual void render(IOMX::buffer_id buffer) {
-        Parcel data, reply;
-        data.writeInterfaceToken(IOMXRenderer::getInterfaceDescriptor());
-        data.writeIntPtr((intptr_t)buffer);
-
-        // NOTE: Do NOT make this a ONE_WAY call, it must be synchronous
-        // so that the caller knows when to recycle the buffer.
-        remote()->transact(RENDERER_RENDER, data, &reply);
-    }
-};
-
-IMPLEMENT_META_INTERFACE(OMXRenderer, "android.hardware.IOMXRenderer");
-
-status_t BnOMXRenderer::onTransact(
-    uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
-    switch (code) {
-        case RENDERER_RENDER:
-        {
-            CHECK_INTERFACE(IOMXRenderer, data, reply);
-
-            IOMX::buffer_id buffer = (void*)data.readIntPtr();
-
-            render(buffer);
-
-            return NO_ERROR;
-        }
-
-        default:
-            return BBinder::onTransact(code, data, reply, flags);
-    }
-}
-
 }  // namespace android
diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp
new file mode 100644
index 0000000..5069002
--- /dev/null
+++ b/media/libmedia/IStreamSource.cpp
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IStreamSource"
+#include <utils/Log.h>
+
+#include <media/IStreamSource.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+enum {
+    // IStreamSource
+    SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION,
+    SET_BUFFERS,
+    ON_BUFFER_AVAILABLE,
+
+    // IStreamListener
+    QUEUE_BUFFER,
+    ISSUE_COMMAND,
+};
+
+struct BpStreamSource : public BpInterface<IStreamSource> {
+    BpStreamSource(const sp<IBinder> &impl)
+        : BpInterface<IStreamSource>(impl) {
+    }
+
+    virtual void setListener(const sp<IStreamListener> &listener) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+        data.writeStrongBinder(listener->asBinder());
+        remote()->transact(SET_LISTENER, data, &reply);
+    }
+
+    virtual void setBuffers(const Vector<sp<IMemory> > &buffers) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(buffers.size()));
+        for (size_t i = 0; i < buffers.size(); ++i) {
+            data.writeStrongBinder(buffers.itemAt(i)->asBinder());
+        }
+        remote()->transact(SET_BUFFERS, data, &reply);
+    }
+
+    virtual void onBufferAvailable(size_t index) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(index));
+        remote()->transact(
+                ON_BUFFER_AVAILABLE, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(StreamSource, "android.hardware.IStreamSource");
+
+status_t BnStreamSource::onTransact(
+        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+    switch (code) {
+        case SET_LISTENER:
+        {
+            CHECK_INTERFACE(IStreamSource, data, reply);
+            setListener(
+                    interface_cast<IStreamListener>(data.readStrongBinder()));
+            break;
+        }
+
+        case SET_BUFFERS:
+        {
+            CHECK_INTERFACE(IStreamSource, data, reply);
+            size_t n = static_cast<size_t>(data.readInt32());
+            Vector<sp<IMemory> > buffers;
+            for (size_t i = 0; i < n; ++i) {
+                sp<IMemory> mem =
+                    interface_cast<IMemory>(data.readStrongBinder());
+
+                buffers.push(mem);
+            }
+            setBuffers(buffers);
+            break;
+        }
+
+        case ON_BUFFER_AVAILABLE:
+        {
+            CHECK_INTERFACE(IStreamSource, data, reply);
+            onBufferAvailable(static_cast<size_t>(data.readInt32()));
+            break;
+        }
+
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+
+    return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct BpStreamListener : public BpInterface<IStreamListener> {
+    BpStreamListener(const sp<IBinder> &impl)
+        : BpInterface<IStreamListener>(impl) {
+    }
+
+    virtual void queueBuffer(size_t index, size_t size) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(index));
+        data.writeInt32(static_cast<int32_t>(size));
+
+        remote()->transact(QUEUE_BUFFER, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &msg) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
+        data.writeInt32(static_cast<int32_t>(cmd));
+        data.writeInt32(static_cast<int32_t>(synchronous));
+
+        if (msg != NULL) {
+            data.writeInt32(1);
+            msg->writeToParcel(&data);
+        } else {
+            data.writeInt32(0);
+        }
+
+        remote()->transact(ISSUE_COMMAND, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(StreamListener, "android.hardware.IStreamListener");
+
+status_t BnStreamListener::onTransact(
+        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+    switch (code) {
+        case QUEUE_BUFFER:
+        {
+            CHECK_INTERFACE(IStreamListener, data, reply);
+            size_t index = static_cast<size_t>(data.readInt32());
+            size_t size = static_cast<size_t>(data.readInt32());
+
+            queueBuffer(index, size);
+            break;
+        }
+
+        case ISSUE_COMMAND:
+        {
+            CHECK_INTERFACE(IStreamListener, data, reply);
+            Command cmd = static_cast<Command>(data.readInt32());
+
+            bool synchronous = static_cast<bool>(data.readInt32());
+
+            sp<AMessage> msg;
+
+            if (data.readInt32()) {
+                msg = AMessage::FromParcel(data);
+            }
+
+            issueCommand(cmd, synchronous, msg);
+            break;
+        }
+
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+
+    return OK;
+}
+
+}  // namespace android
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 3869389..9ad63f0 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -59,8 +59,21 @@
 };
 
 const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
+    {"low", CAMCORDER_QUALITY_LOW},
     {"high", CAMCORDER_QUALITY_HIGH},
-    {"low",  CAMCORDER_QUALITY_LOW}
+    {"qcif", CAMCORDER_QUALITY_QCIF},
+    {"cif", CAMCORDER_QUALITY_CIF},
+    {"480p", CAMCORDER_QUALITY_480P},
+    {"720p", CAMCORDER_QUALITY_720P},
+    {"1080p", CAMCORDER_QUALITY_1080P},
+
+    {"timelapselow",  CAMCORDER_QUALITY_TIME_LAPSE_LOW},
+    {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH},
+    {"timelapseqcif", CAMCORDER_QUALITY_TIME_LAPSE_QCIF},
+    {"timelapsecif", CAMCORDER_QUALITY_TIME_LAPSE_CIF},
+    {"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
+    {"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
+    {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P}
 };
 
 /*static*/ void
@@ -411,16 +424,16 @@
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderHighProfile()
+MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
 {
     MediaProfiles::VideoCodec *videoCodec =
-        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
 
     AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
     CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
-    profile->mQuality = CAMCORDER_QUALITY_HIGH;
+    profile->mQuality = quality;
     profile->mDuration = 60;
     profile->mVideoCodec = videoCodec;
     profile->mAudioCodec = audioCodec;
@@ -428,7 +441,40 @@
 }
 
 /*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderLowProfile()
+MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
+{
+    MediaProfiles::VideoCodec *videoCodec =
+        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
+
+    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+    CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+    profile->mCameraId = 0;
+    profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+    profile->mQuality = quality;
+    profile->mDuration = 60;
+    profile->mVideoCodec = videoCodec;
+    profile->mAudioCodec = audioCodec;
+    return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(
+        MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+        MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) {
+    *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW);
+    *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(
+        MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+        MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) {
+    *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
+    *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P);
+}
+
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
 {
     MediaProfiles::VideoCodec *videoCodec =
         new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
@@ -439,18 +485,72 @@
     MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
     profile->mCameraId = 0;
     profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
-    profile->mQuality = CAMCORDER_QUALITY_LOW;
+    profile->mQuality = quality;
     profile->mDuration = 30;
     profile->mVideoCodec = videoCodec;
     profile->mAudioCodec = audioCodec;
     return profile;
 }
 
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
+{
+    MediaProfiles::VideoCodec *videoCodec =
+        new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+
+    AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+    CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+    profile->mCameraId = 0;
+    profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+    profile->mQuality = quality;
+    profile->mDuration = 60;
+    profile->mVideoCodec = videoCodec;
+    profile->mAudioCodec = audioCodec;
+    return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderLowProfiles(
+        MediaProfiles::CamcorderProfile **lowProfile,
+        MediaProfiles::CamcorderProfile **lowSpecificProfile) {
+    *lowProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_LOW);
+    *lowSpecificProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderHighProfiles(
+        MediaProfiles::CamcorderProfile **highProfile,
+        MediaProfiles::CamcorderProfile **highSpecificProfile) {
+    *highProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_HIGH);
+    *highSpecificProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_CIF);
+}
+
 /*static*/ void
 MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)
 {
-    profiles->mCamcorderProfiles.add(createDefaultCamcorderHighProfile());
-    profiles->mCamcorderProfiles.add(createDefaultCamcorderLowProfile());
+    // low camcorder profiles.
+    MediaProfiles::CamcorderProfile *lowProfile, *lowSpecificProfile;
+    createDefaultCamcorderLowProfiles(&lowProfile, &lowSpecificProfile);
+    profiles->mCamcorderProfiles.add(lowProfile);
+    profiles->mCamcorderProfiles.add(lowSpecificProfile);
+
+    // high camcorder profiles.
+    MediaProfiles::CamcorderProfile* highProfile, *highSpecificProfile;
+    createDefaultCamcorderHighProfiles(&highProfile, &highSpecificProfile);
+    profiles->mCamcorderProfiles.add(highProfile);
+    profiles->mCamcorderProfiles.add(highSpecificProfile);
+
+    // low camcorder time lapse profiles.
+    MediaProfiles::CamcorderProfile *lowTimeLapseProfile, *lowSpecificTimeLapseProfile;
+    createDefaultCamcorderTimeLapseLowProfiles(&lowTimeLapseProfile, &lowSpecificTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(lowTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(lowSpecificTimeLapseProfile);
+
+    // high camcorder time lapse profiles.
+    MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile;
+    createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(highTimeLapseProfile);
+    profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
 }
 
 /*static*/ void
@@ -668,13 +768,8 @@
     return decoders;  // copy out
 }
 
-int MediaProfiles::getCamcorderProfileParamByName(const char *name,
-                                                  int cameraId,
-                                                  camcorder_quality quality) const
+int MediaProfiles::getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const
 {
-    LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
-         name, cameraId, quality);
-
     int index = -1;
     for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
         if (mCamcorderProfiles[i]->mCameraId == cameraId &&
@@ -683,6 +778,17 @@
             break;
         }
     }
+    return index;
+}
+
+int MediaProfiles::getCamcorderProfileParamByName(const char *name,
+                                                  int cameraId,
+                                                  camcorder_quality quality) const
+{
+    LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
+         name, cameraId, quality);
+
+    int index = getCamcorderProfileIndex(cameraId, quality);
     if (index == -1) {
         LOGE("The given camcorder profile camera %d quality %d is not found",
              cameraId, quality);
@@ -705,6 +811,11 @@
     return -1;
 }
 
+bool MediaProfiles::hasCamcorderProfile(int cameraId, camcorder_quality quality) const
+{
+    return (getCamcorderProfileIndex(cameraId, quality) != -1);
+}
+
 Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const
 {
     Vector<int> result;
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index c5112a5..5ec573e 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -48,8 +48,7 @@
 }
 
 status_t MediaScanner::processDirectory(
-        const char *path, const char *extensions,
-        MediaScannerClient &client,
+        const char *path, MediaScannerClient &client,
         ExceptionCheck exceptionCheck, void *exceptionEnv) {
     int pathLength = strlen(path);
     if (pathLength >= PATH_MAX) {
@@ -72,38 +71,20 @@
 
     status_t result =
         doProcessDirectory(
-                pathBuffer, pathRemaining, extensions, client,
-                exceptionCheck, exceptionEnv);
+                pathBuffer, pathRemaining, client, exceptionCheck, exceptionEnv);
 
     free(pathBuffer);
 
     return result;
 }
 
-static bool fileMatchesExtension(const char* path, const char* extensions) {
-    const char* extension = strrchr(path, '.');
-    if (!extension) return false;
-    ++extension;    // skip the dot
-    if (extension[0] == 0) return false;
-
-    while (extensions[0]) {
-        const char* comma = strchr(extensions, ',');
-        size_t length = (comma ? comma - extensions : strlen(extensions));
-        if (length == strlen(extension) && strncasecmp(extension, extensions, length) == 0) return true;
-        extensions += length;
-        if (extensions[0] == ',') ++extensions;
-    }
-
-    return false;
-}
-
 status_t MediaScanner::doProcessDirectory(
-        char *path, int pathRemaining, const char *extensions,
-        MediaScannerClient &client, ExceptionCheck exceptionCheck,
-        void *exceptionEnv) {
+        char *path, int pathRemaining, MediaScannerClient &client,
+        ExceptionCheck exceptionCheck, void *exceptionEnv) {
     // place to copy file or directory name
     char* fileSpot = path + strlen(path);
     struct dirent* entry;
+    struct stat statbuf;
 
     // ignore directories that contain a  ".nomedia" file
     if (pathRemaining >= 8 /* strlen(".nomedia") */ ) {
@@ -133,12 +114,18 @@
             continue;
         }
 
+        int nameLength = strlen(name);
+        if (nameLength + 1 > pathRemaining) {
+            // path too long!
+            continue;
+        }
+        strcpy(fileSpot, name);
+
         int type = entry->d_type;
         if (type == DT_UNKNOWN) {
             // If the type is unknown, stat() the file instead.
             // This is sometimes necessary when accessing NFS mounted filesystems, but
             // could be needed in other cases well.
-            struct stat statbuf;
             if (stat(path, &statbuf) == 0) {
                 if (S_ISREG(statbuf.st_mode)) {
                     type = DT_REG;
@@ -150,34 +137,29 @@
             }
         }
         if (type == DT_REG || type == DT_DIR) {
-            int nameLength = strlen(name);
-            bool isDirectory = (type == DT_DIR);
-
-            if (nameLength > pathRemaining || (isDirectory && nameLength + 1 > pathRemaining)) {
-                // path too long!
-                continue;
-            }
-
-            strcpy(fileSpot, name);
-            if (isDirectory) {
+            if (type == DT_DIR) {
                 // ignore directories with a name that starts with '.'
                 // for example, the Mac ".Trashes" directory
                 if (name[0] == '.') continue;
 
+                // report the directory to the client
+                if (stat(path, &statbuf) == 0) {
+                    client.scanFile(path, statbuf.st_mtime, 0, true);
+                }
+
+                // and now process its contents
                 strcat(fileSpot, "/");
-                int err = doProcessDirectory(path, pathRemaining - nameLength - 1, extensions, client, exceptionCheck, exceptionEnv);
+                int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client,
+                        exceptionCheck, exceptionEnv);
                 if (err) {
                     // pass exceptions up - ignore other errors
                     if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
                     LOGE("Error processing '%s' - skipping\n", path);
                     continue;
                 }
-            } else if (fileMatchesExtension(path, extensions)) {
-                struct stat statbuf;
+            } else {
                 stat(path, &statbuf);
-                if (statbuf.st_size > 0) {
-                    client.scanFile(path, statbuf.st_mtime, statbuf.st_size);
-                }
+                client.scanFile(path, statbuf.st_mtime, statbuf.st_size, false);
                 if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
             }
         }
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index e2712ba..39b5bc3 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -134,26 +134,15 @@
     return mRetriever->setMode(mode);
 }
 
-status_t MediaMetadataRetriever::getMode(int* mode)
+sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)
 {
-    LOGV("getMode");
-    Mutex::Autolock _l(mLock);
-    if (mRetriever == 0) {
-        LOGE("retriever is not initialized");
-        return INVALID_OPERATION;
-    }
-    return mRetriever->getMode(mode);
-}
-
-sp<IMemory> MediaMetadataRetriever::captureFrame()
-{
-    LOGV("captureFrame");
+    LOGV("getFrameAtTime: time(%lld us) option(%d)", timeUs, option);
     Mutex::Autolock _l(mLock);
     if (mRetriever == 0) {
         LOGE("retriever is not initialized");
         return NULL;
     }
-    return mRetriever->captureFrame();
+    return mRetriever->getFrameAtTime(timeUs, option);
 }
 
 const char* MediaMetadataRetriever::extractMetadata(int keyCode)
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index cc41e66..a098d69 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -172,16 +172,6 @@
     return INVALID_OPERATION;
 }
 
-status_t MediaPlayer::suspend() {
-    Mutex::Autolock _l(mLock);
-    return mPlayer->suspend();
-}
-
-status_t MediaPlayer::resume() {
-    Mutex::Autolock _l(mLock);
-    return mPlayer->resume();
-}
-
 status_t MediaPlayer::setMetadataFilter(const Parcel& filter)
 {
     LOGD("setMetadataFilter");
@@ -207,10 +197,8 @@
     LOGV("setVideoSurface");
     Mutex::Autolock _l(mLock);
     if (mPlayer == 0) return NO_INIT;
-    if (surface != NULL)
-        return  mPlayer->setVideoSurface(surface->getISurface());
-    else
-        return  mPlayer->setVideoSurface(NULL);
+
+    return mPlayer->setVideoSurface(surface);
 }
 
 // must call with lock held
@@ -449,6 +437,9 @@
         } else {
             mCurrentState = MEDIA_PLAYER_IDLE;
         }
+        // setDataSource has to be called again to create a
+        // new mediaplayer.
+        mPlayer = 0;
         return ret;
     }
     clear_l();
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index e20e3ba..fd575fe 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -65,7 +65,7 @@
         return INVALID_OPERATION;
     }
 
-    status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface());
+    status_t ret = mMediaRecorder->setPreviewSurface(surface);
     if (OK != ret) {
         LOGV("setPreviewSurface failed: %d", ret);
         mCurrentState = MEDIA_RECORDER_ERROR;
@@ -308,6 +308,32 @@
     return ret;
 }
 
+status_t MediaRecorder::setOutputFileAuxiliary(int fd)
+{
+    LOGV("setOutputFileAuxiliary(%d)", fd);
+    if(mMediaRecorder == NULL) {
+        LOGE("media recorder is not initialized yet");
+        return INVALID_OPERATION;
+    }
+    if (mIsAuxiliaryOutputFileSet) {
+        LOGE("output file has already been set");
+        return INVALID_OPERATION;
+    }
+    if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) {
+        LOGE("setOutputFile called in an invalid state(%d)", mCurrentState);
+        return INVALID_OPERATION;
+    }
+
+    status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd);
+    if (OK != ret) {
+        LOGV("setOutputFileAuxiliary failed: %d", ret);
+        mCurrentState = MEDIA_RECORDER_ERROR;
+        return ret;
+    }
+    mIsAuxiliaryOutputFileSet = true;
+    return ret;
+}
+
 status_t MediaRecorder::setVideoSize(int width, int height)
 {
     LOGV("setVideoSize(%d, %d)", width, height);
@@ -571,6 +597,7 @@
     mIsAudioEncoderSet = false;
     mIsVideoEncoderSet = false;
     mIsOutputFileSet   = false;
+    mIsAuxiliaryOutputFileSet = false;
 }
 
 // Release should be OK in any state
@@ -643,4 +670,3 @@
 }
 
 }; // namespace android
-
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 55846be..f7f0d95 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -31,20 +31,12 @@
 	libandroid_runtime    			\
 	libstagefright        			\
 	libstagefright_omx    			\
-	libstagefright_color_conversion         \
 	libstagefright_foundation               \
 	libsurfaceflinger_client
 
 LOCAL_STATIC_LIBRARIES := \
-        libstagefright_rtsp
-
-ifneq ($(BUILD_WITHOUT_PV),true)
-LOCAL_SHARED_LIBRARIES += \
-	libopencore_player    \
-	libopencore_author
-else
-LOCAL_CFLAGS += -DNO_OPENCORE
-endif
+        libstagefright_rtsp                     \
+        libstagefright_nuplayer                 \
 
 ifneq ($(TARGET_SIMULATOR),true)
 LOCAL_SHARED_LIBRARIES += libdl
@@ -56,9 +48,11 @@
 	$(TOP)/frameworks/base/include/media/stagefright/openmax \
 	$(TOP)/frameworks/base/media/libstagefright/include             \
 	$(TOP)/frameworks/base/media/libstagefright/rtsp                \
-        $(TOP)/external/tremolo/Tremolo
+        $(TOP)/external/tremolo/Tremolo \
 
 LOCAL_MODULE:= libmediaplayerservice
 
 include $(BUILD_SHARED_LIBRARY)
 
+include $(call all-makefiles-under,$(LOCAL_PATH))
+
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c43e9bb..439e4ce 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -56,9 +56,9 @@
 #include "MetadataRetrieverClient.h"
 
 #include "MidiFile.h"
-#include <media/PVPlayer.h>
 #include "TestPlayerStub.h"
 #include "StagefrightPlayer.h"
+#include "nuplayer/NuPlayerDriver.h"
 
 #include <OMX.h>
 
@@ -196,11 +196,6 @@
         {".rtttl", SONIVOX_PLAYER},
         {".rtx", SONIVOX_PLAYER},
         {".ota", SONIVOX_PLAYER},
-#ifndef NO_OPENCORE
-        {".wma", PV_PLAYER},
-        {".wmv", PV_PLAYER},
-        {".asf", PV_PLAYER},
-#endif
 };
 
 // TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround
@@ -284,6 +279,26 @@
     return c;
 }
 
+sp<IMediaPlayer> MediaPlayerService::create(
+        pid_t pid, const sp<IMediaPlayerClient> &client,
+        const sp<IStreamSource> &source, int audioSessionId) {
+    int32_t connId = android_atomic_inc(&mNextConnId);
+    sp<Client> c = new Client(this, pid, connId, client, audioSessionId);
+
+    LOGV("Create new client(%d) from pid %d, audioSessionId=%d",
+         connId, pid, audioSessionId);
+
+    if (OK != c->setDataSource(source)) {
+        c.clear();
+    } else {
+        wp<Client> w = c;
+        Mutex::Autolock lock(mLock);
+        mClients.add(w);
+    }
+
+    return c;
+}
+
 sp<IOMX> MediaPlayerService::getOMX() {
     Mutex::Autolock autoLock(mLock);
 
@@ -691,14 +706,6 @@
     if (ident == 0x5367674f) // 'OggS'
         return STAGEFRIGHT_PLAYER;
 
-#ifndef NO_OPENCORE
-    if (ident == 0x75b22630) {
-        // The magic number for .asf files, i.e. wmv and wma content.
-        // These are not currently supported through stagefright.
-        return PV_PLAYER;
-    }
-#endif
-
     // Some kind of MIDI?
     EAS_DATA_HANDLE easdata;
     if (EAS_Init(&easdata) == EAS_SUCCESS) {
@@ -725,6 +732,21 @@
         return TEST_PLAYER;
     }
 
+    char value[PROPERTY_VALUE_MAX];
+    if (!property_get("media.httplive.disable-nuplayer", value, NULL)
+            || (strcasecmp(value, "true") && strcmp(value, "1"))) {
+        if (!strncasecmp("http://", url, 7)) {
+            size_t len = strlen(url);
+            if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) {
+                return NU_PLAYER;
+            }
+
+            if (strstr(url,"m3u8")) {
+                return NU_PLAYER;
+            }
+        }
+    }
+
     // use MidiFile for MIDI extensions
     int lenURL = strlen(url);
     for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
@@ -737,16 +759,6 @@
         }
     }
 
-    if (!strncasecmp(url, "rtsp://", 7)) {
-        char value[PROPERTY_VALUE_MAX];
-        if (property_get("media.stagefright.enable-rtsp", value, NULL)
-            && (strcmp(value, "1") && strcasecmp(value, "true"))) {
-            // For now, we're going to use PV for rtsp-based playback
-            // by default until we can clear up a few more issues.
-            return PV_PLAYER;
-        }
-    }
-
     return getDefaultPlayerType();
 }
 
@@ -755,12 +767,6 @@
 {
     sp<MediaPlayerBase> p;
     switch (playerType) {
-#ifndef NO_OPENCORE
-        case PV_PLAYER:
-            LOGV(" create PVPlayer");
-            p = new PVPlayer();
-            break;
-#endif
         case SONIVOX_PLAYER:
             LOGV(" create MidiFile");
             p = new MidiFile();
@@ -769,10 +775,17 @@
             LOGV(" create StagefrightPlayer");
             p = new StagefrightPlayer;
             break;
+        case NU_PLAYER:
+            LOGV(" create NuPlayer");
+            p = new NuPlayerDriver;
+            break;
         case TEST_PLAYER:
             LOGV("Create Test Player stub");
             p = new TestPlayerStub();
             break;
+        default:
+            LOGE("Unknown player type: %d", playerType);
+            return NULL;
     }
     if (p != NULL) {
         if (p->initCheck() == NO_ERROR) {
@@ -891,7 +904,31 @@
     return mStatus;
 }
 
-status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface)
+status_t MediaPlayerService::Client::setDataSource(
+        const sp<IStreamSource> &source) {
+    // create the right type of player
+    sp<MediaPlayerBase> p = createPlayer(NU_PLAYER);
+
+    if (p == NULL) {
+        return NO_INIT;
+    }
+
+    if (!p->hardwareOutput()) {
+        mAudioOutput = new AudioOutput(mAudioSessionId);
+        static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
+    }
+
+    // now set data source
+    mStatus = p->setDataSource(source);
+
+    if (mStatus == OK) {
+        mPlayer = p;
+    }
+
+    return mStatus;
+}
+
+status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
 {
     LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
     sp<MediaPlayerBase> p = getPlayer();
@@ -966,20 +1003,6 @@
     return OK;
 }
 
-status_t MediaPlayerService::Client::suspend() {
-    sp<MediaPlayerBase> p = getPlayer();
-    if (p == 0) return UNKNOWN_ERROR;
-
-    return p->suspend();
-}
-
-status_t MediaPlayerService::Client::resume() {
-    sp<MediaPlayerBase> p = getPlayer();
-    if (p == 0) return UNKNOWN_ERROR;
-
-    return p->resume();
-}
-
 status_t MediaPlayerService::Client::prepareAsync()
 {
     LOGV("[%d] prepareAsync", mConnId);
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 4492e20..62f8ed6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -191,6 +191,11 @@
             const KeyedVector<String8, String8> *headers, int audioSessionId);
 
     virtual sp<IMediaPlayer>    create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId);
+
+    virtual sp<IMediaPlayer>    create(
+            pid_t pid, const sp<IMediaPlayerClient> &client,
+            const sp<IStreamSource> &source, int audioSessionId);
+
     virtual sp<IMemory>         decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
     virtual sp<IMemory>         decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat);
     virtual sp<IOMX>            getOMX();
@@ -206,7 +211,7 @@
 
         // IMediaPlayer interface
         virtual void            disconnect();
-        virtual status_t        setVideoSurface(const sp<ISurface>& surface);
+        virtual status_t        setVideoSurface(const sp<Surface>& surface);
         virtual status_t        prepareAsync();
         virtual status_t        start();
         virtual status_t        stop();
@@ -224,8 +229,6 @@
         virtual status_t        getMetadata(bool update_only,
                                             bool apply_filter,
                                             Parcel *reply);
-        virtual status_t        suspend();
-        virtual status_t        resume();
         virtual status_t        setAuxEffectSendLevel(float level);
         virtual status_t        attachAuxEffect(int effectId);
 
@@ -236,6 +239,9 @@
                         const KeyedVector<String8, String8> *headers);
 
                 status_t        setDataSource(int fd, int64_t offset, int64_t length);
+
+                status_t        setDataSource(const sp<IStreamSource> &source);
+
         static  void            notify(void* cookie, int msg, int ext1, int ext2);
 
                 pid_t           pid() const { return mPid; }
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 19915f1..1a1780c 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -31,10 +31,6 @@
 #include <binder/MemoryHeapBase.h>
 #include <binder/MemoryBase.h>
 
-#ifndef NO_OPENCORE
-#include <media/PVMediaRecorder.h>
-#endif
-
 #include <utils/String16.h>
 
 #include <media/AudioTrack.h>
@@ -70,7 +66,7 @@
     return mRecorder->setCamera(camera);
 }
 
-status_t MediaRecorderClient::setPreviewSurface(const sp<ISurface>& surface)
+status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface)
 {
     LOGV("setPreviewSurface");
     Mutex::Autolock lock(mLock);
@@ -164,6 +160,17 @@
     return mRecorder->setOutputFile(fd, offset, length);
 }
 
+status_t MediaRecorderClient::setOutputFileAuxiliary(int fd)
+{
+    LOGV("setOutputFileAuxiliary(%d)", fd);
+    Mutex::Autolock lock(mLock);
+    if (mRecorder == NULL) {
+        LOGE("recorder is not initialized");
+        return NO_INIT;
+    }
+    return mRecorder->setOutputFileAuxiliary(fd);
+}
+
 status_t MediaRecorderClient::setVideoSize(int width, int height)
 {
     LOGV("setVideoSize(%dx%d)", width, height);
@@ -293,22 +300,7 @@
 {
     LOGV("Client constructor");
     mPid = pid;
-
-    char value[PROPERTY_VALUE_MAX];
-    if (!property_get("media.stagefright.enable-record", value, NULL)
-        || !strcmp(value, "1") || !strcasecmp(value, "true")) {
-        mRecorder = new StagefrightRecorder;
-    } else
-#ifndef NO_OPENCORE
-    {
-        mRecorder = new PVMediaRecorder();
-    }
-#else
-    {
-        mRecorder = NULL;
-    }
-#endif
-
+    mRecorder = new StagefrightRecorder;
     mMediaPlayerService = service;
 }
 
@@ -337,4 +329,3 @@
 }
 
 }; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 1d1913d..fded98e 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -29,7 +29,7 @@
 {
 public:
     virtual     status_t        setCamera(const sp<ICamera>& camera);
-    virtual     status_t        setPreviewSurface(const sp<ISurface>& surface);
+    virtual     status_t        setPreviewSurface(const sp<Surface>& surface);
     virtual     status_t        setVideoSource(int vs);
     virtual     status_t        setAudioSource(int as);
     virtual     status_t        setOutputFormat(int of);
@@ -37,6 +37,7 @@
     virtual     status_t        setAudioEncoder(int ae);
     virtual     status_t        setOutputFile(const char* path);
     virtual     status_t        setOutputFile(int fd, int64_t offset, int64_t length);
+    virtual     status_t        setOutputFileAuxiliary(int fd);
     virtual     status_t        setVideoSize(int width, int height);
     virtual     status_t        setVideoFrameRate(int frames_per_second);
     virtual     status_t        setParameters(const String8& params);
@@ -66,4 +67,3 @@
 }; // namespace android
 
 #endif // ANDROID_MEDIARECORDERCLIENT_H
-
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 39fce81..abaec02 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -35,7 +35,6 @@
 #include <binder/IServiceManager.h>
 #include <media/MediaMetadataRetrieverInterface.h>
 #include <media/MediaPlayerInterface.h>
-#include <media/PVMetadataRetriever.h>
 #include <private/media/VideoFrame.h>
 #include "MidiMetadataRetriever.h"
 #include "MetadataRetrieverClient.h"
@@ -107,12 +106,6 @@
             p = new StagefrightMetadataRetriever;
             break;
         }
-#ifndef NO_OPENCORE
-        case PV_PLAYER:
-            LOGV("create pv metadata retriever");
-            p = new PVMetadataRetriever();
-            break;
-#endif
         case SONIVOX_PLAYER:
             LOGV("create midi metadata retriever");
             p = new MidiMetadataRetriever();
@@ -203,33 +196,16 @@
     return NO_ERROR;
 }
 
-status_t MetadataRetrieverClient::getMode(int* mode) const
+sp<IMemory> MetadataRetrieverClient::getFrameAtTime(int64_t timeUs, int option)
 {
-    LOGV("getMode");
-    Mutex::Autolock lock(mLock);
-
-    // TODO:
-    // This may not be necessary.
-    // If setDataSource() has not been called, return the cached value
-    // otherwise, return the value retrieved from the retriever
-    if (mRetriever == NULL) {
-        *mode = mMode;
-    } else {
-        mRetriever->getMode(mode);
-    }
-    return NO_ERROR;
-}
-
-sp<IMemory> MetadataRetrieverClient::captureFrame()
-{
-    LOGV("captureFrame");
+    LOGV("getFrameAtTime: time(%lld us) option(%d)", timeUs, option);
     Mutex::Autolock lock(mLock);
     mThumbnail.clear();
     if (mRetriever == NULL) {
         LOGE("retriever is not initialized");
         return NULL;
     }
-    VideoFrame *frame = mRetriever->captureFrame();
+    VideoFrame *frame = mRetriever->getFrameAtTime(timeUs, option);
     if (frame == NULL) {
         LOGE("failed to capture a video frame");
         return NULL;
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index 4aab94f..8b4c0c7 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -44,8 +44,7 @@
     virtual status_t                setDataSource(const char *url);
     virtual status_t                setDataSource(int fd, int64_t offset, int64_t length);
     virtual status_t                setMode(int mode);
-    virtual status_t                getMode(int* mode) const;
-    virtual sp<IMemory>             captureFrame();
+    virtual sp<IMemory>             getFrameAtTime(int64_t timeUs, int option);
     virtual sp<IMemory>             extractAlbumArt();
     virtual const char*             extractMetadata(int keyCode);
 
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 4a60ece..aa8f3f0 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,7 @@
             const char* path, const KeyedVector<String8, String8> *headers);
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setVideoSurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+    virtual status_t    setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
     virtual status_t    prepare();
     virtual status_t    prepareAsync();
     virtual status_t    start();
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index 6bded09..da564dc 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -44,10 +44,14 @@
     return mPlayer->setDataSource(dup(fd), offset, length);
 }
 
-status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) {
+status_t StagefrightPlayer::setDataSource(const sp<IStreamSource> &source) {
+    return mPlayer->setDataSource(source);
+}
+
+status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
     LOGV("setVideoSurface");
 
-    mPlayer->setISurface(surface);
+    mPlayer->setSurface(surface);
     return OK;
 }
 
@@ -140,16 +144,6 @@
     return STAGEFRIGHT_PLAYER;
 }
 
-status_t StagefrightPlayer::suspend() {
-    LOGV("suspend");
-    return mPlayer->suspend();
-}
-
-status_t StagefrightPlayer::resume() {
-    LOGV("resume");
-    return mPlayer->resume();
-}
-
 status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) {
     return INVALID_OPERATION;
 }
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 781eb44..fc72bfb 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -35,7 +35,10 @@
             const char *url, const KeyedVector<String8, String8> *headers);
 
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t setVideoSurface(const sp<ISurface> &surface);
+
+    virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+    virtual status_t setVideoSurface(const sp<Surface> &surface);
     virtual status_t prepare();
     virtual status_t prepareAsync();
     virtual status_t start();
@@ -50,8 +53,6 @@
     virtual player_type playerType();
     virtual status_t invoke(const Parcel &request, Parcel *reply);
     virtual void setAudioSink(const sp<AudioSink> &audioSink);
-    virtual status_t suspend();
-    virtual status_t resume();
 
     virtual status_t getMetadata(
             const media::Metadata::Filter& ids, Parcel *records);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 3261fe6..d372ee6 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -20,10 +20,12 @@
 
 #include "StagefrightRecorder.h"
 
-#include <binder/IPCThreadState.h>
 #include <media/stagefright/AudioSource.h>
 #include <media/stagefright/AMRWriter.h>
 #include <media/stagefright/CameraSource.h>
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaSourceSplitter.h>
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MediaDebug.h>
@@ -33,21 +35,20 @@
 #include <media/stagefright/OMXCodec.h>
 #include <media/MediaProfiles.h>
 #include <camera/ICamera.h>
-#include <camera/Camera.h>
 #include <camera/CameraParameters.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 #include <utils/Errors.h>
 #include <sys/types.h>
-#include <unistd.h>
 #include <ctype.h>
+#include <unistd.h>
 
 #include "ARTPWriter.h"
 
 namespace android {
 
 StagefrightRecorder::StagefrightRecorder()
-    : mWriter(NULL),
-      mOutputFd(-1) {
+    : mWriter(NULL), mWriterAux(NULL),
+      mOutputFd(-1), mOutputFdAux(-1) {
 
     LOGV("Constructor");
     reset();
@@ -164,7 +165,8 @@
 
 status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
     LOGV("setVideoFrameRate: %d", frames_per_second);
-    if (frames_per_second <= 0 || frames_per_second > 30) {
+    if ((frames_per_second <= 0 && frames_per_second != -1) ||
+        frames_per_second > 120) {
         LOGE("Invalid video frame rate: %d", frames_per_second);
         return BAD_VALUE;
     }
@@ -182,26 +184,11 @@
         return BAD_VALUE;
     }
 
-    int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mFlags &= ~FLAGS_HOT_CAMERA;
-    mCamera = Camera::create(camera);
-    if (mCamera == 0) {
-        LOGE("Unable to connect to camera");
-        IPCThreadState::self()->restoreCallingIdentity(token);
-        return -EBUSY;
-    }
-
-    LOGV("Connected to camera");
-    if (mCamera->previewEnabled()) {
-        LOGV("camera is hot");
-        mFlags |= FLAGS_HOT_CAMERA;
-    }
-    IPCThreadState::self()->restoreCallingIdentity(token);
-
+    mCamera = camera;
     return OK;
 }
 
-status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
+status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
     LOGV("setPreviewSurface: %p", surface.get());
     mPreviewSurface = surface;
 
@@ -235,6 +222,24 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) {
+    LOGV("setOutputFileAuxiliary: %d", fd);
+
+    if (fd < 0) {
+        LOGE("Invalid file descriptor: %d", fd);
+        return -EBADF;
+    }
+
+    mCaptureAuxVideo = true;
+
+    if (mOutputFdAux >= 0) {
+        ::close(mOutputFdAux);
+    }
+    mOutputFdAux = dup(fd);
+
+    return OK;
+}
+
 // Attempt to parse an int64 literal optionally surrounded by whitespace,
 // returns true on success, false otherwise.
 static bool safe_strtoi64(const char *s, int64_t *val) {
@@ -353,6 +358,8 @@
 
 status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
     LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
+
+    // This is meant for backward compatibility for MediaRecorder.java
     if (timeUs <= 0) {
         LOGW("Max file duration is not positive: %lld us. Disabling duration limit.", timeUs);
         timeUs = 0; // Disable the duration limit for zero or negative values.
@@ -370,7 +377,13 @@
 
 status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
     LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
-    if (bytes <= 1024) {  // XXX: 1 kB
+
+    // This is meant for backward compatibility for MediaRecorder.java
+    if (bytes <= 0) {
+        LOGW("Max file size is not positive: %lld bytes. "
+             "Disabling file size limit.", bytes);
+        bytes = 0; // Disable the file size limit for zero or negative values.
+    } else if (bytes <= 1024) {  // XXX: 1 kB
         LOGE("Max file size is too small: %lld bytes", bytes);
         return BAD_VALUE;
     }
@@ -493,6 +506,68 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
+    LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
+
+    if(timeLapseEnable == 0) {
+        mCaptureTimeLapse = false;
+    } else if (timeLapseEnable == 1) {
+        mCaptureTimeLapse = true;
+    } else {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
+    LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+
+    // Not allowing time more than a day
+    if (timeUs <= 0 || timeUs > 86400*1E6) {
+        LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
+        return BAD_VALUE;
+    }
+
+    mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) {
+    LOGV("setParamAuxVideoWidth : %d", width);
+
+    if (width <= 0) {
+        LOGE("Width (%d) is not positive", width);
+        return BAD_VALUE;
+    }
+
+    mAuxVideoWidth = width;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) {
+    LOGV("setParamAuxVideoHeight : %d", height);
+
+    if (height <= 0) {
+        LOGE("Height (%d) is not positive", height);
+        return BAD_VALUE;
+    }
+
+    mAuxVideoHeight = height;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) {
+    LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate);
+
+    if (bitRate <= 0) {
+        LOGE("Invalid video encoding bit rate: %d", bitRate);
+        return BAD_VALUE;
+    }
+
+    mAuxVideoBitRate = bitRate;
+    return OK;
+}
+
 status_t StagefrightRecorder::setParameter(
         const String8 &key, const String8 &value) {
     LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -581,6 +656,32 @@
         if (safe_strtoi32(value.string(), &timeScale)) {
             return setParamVideoTimeScale(timeScale);
         }
+    } else if (key == "time-lapse-enable") {
+        int32_t timeLapseEnable;
+        if (safe_strtoi32(value.string(), &timeLapseEnable)) {
+            return setParamTimeLapseEnable(timeLapseEnable);
+        }
+    } else if (key == "time-between-time-lapse-frame-capture") {
+        int64_t timeBetweenTimeLapseFrameCaptureMs;
+        if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+            return setParamTimeBetweenTimeLapseFrameCapture(
+                    1000LL * timeBetweenTimeLapseFrameCaptureMs);
+        }
+    } else if (key == "video-aux-param-width") {
+        int32_t auxWidth;
+        if (safe_strtoi32(value.string(), &auxWidth)) {
+            return setParamAuxVideoWidth(auxWidth);
+        }
+    } else if (key == "video-aux-param-height") {
+        int32_t auxHeight;
+        if (safe_strtoi32(value.string(), &auxHeight)) {
+            return setParamAuxVideoHeight(auxHeight);
+        }
+    } else if (key == "video-aux-param-encoding-bitrate") {
+        int32_t auxVideoBitRate;
+        if (safe_strtoi32(value.string(), &auxVideoBitRate)) {
+            return setParamAuxVideoEncodingBitRate(auxVideoBitRate);
+        }
     } else {
         LOGE("setParameter: failed to find key %s", key.string());
     }
@@ -779,7 +880,7 @@
         return UNKNOWN_ERROR;
     }
 
-    mWriter = new AMRWriter(dup(mOutputFd));
+    mWriter = new AMRWriter(mOutputFd);
     mWriter->addSource(audioEncoder);
 
     if (mMaxFileDurationUs != 0) {
@@ -814,13 +915,20 @@
     if (mAudioSource != AUDIO_SOURCE_LIST_END) {
         source = createAudioSource();
     } else {
-        status_t err = setupVideoEncoder(&source);
+
+        sp<CameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+
+        err = setupVideoEncoder(cameraSource, mVideoBitRate, &source);
         if (err != OK) {
             return err;
         }
     }
 
-    mWriter = new ARTPWriter(dup(mOutputFd));
+    mWriter = new ARTPWriter(mOutputFd);
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
@@ -830,7 +938,7 @@
 status_t StagefrightRecorder::startMPEG2TSRecording() {
     CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
 
-    sp<MediaWriter> writer = new MPEG2TSWriter(dup(mOutputFd));
+    sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
 
     if (mAudioSource != AUDIO_SOURCE_LIST_END) {
         if (mAudioEncoder != AUDIO_ENCODER_AAC) {
@@ -850,8 +958,14 @@
             return ERROR_UNSUPPORTED;
         }
 
+        sp<CameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+
         sp<MediaSource> encoder;
-        status_t err = setupVideoEncoder(&encoder);
+        err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder);
 
         if (err != OK) {
             return err;
@@ -879,7 +993,7 @@
                         "enc.vid.fps.min", mVideoEncoder);
     int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
                         "enc.vid.fps.max", mVideoEncoder);
-    if (mFrameRate < minFrameRate) {
+    if (mFrameRate < minFrameRate && mFrameRate != -1) {
         LOGW("Intended video encoding frame rate (%d fps) is too small"
              " and will be set to (%d fps)", mFrameRate, minFrameRate);
         mFrameRate = minFrameRate;
@@ -924,57 +1038,15 @@
     }
 }
 
-status_t StagefrightRecorder::setupCameraSource() {
-    clipVideoBitRate();
-    clipVideoFrameRate();
-    clipVideoFrameWidth();
-    clipVideoFrameHeight();
-
-    int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    if (mCamera == 0) {
-        mCamera = Camera::connect(mCameraId);
-        if (mCamera == 0) {
-            LOGE("Camera connection could not be established.");
-            return -EBUSY;
-        }
-        mFlags &= ~FLAGS_HOT_CAMERA;
-        mCamera->lock();
+status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+    if (!mCaptureTimeLapse) {
+        // Dont clip for time lapse capture as encoder will have enough
+        // time to encode because of slow capture rate of time lapse.
+        clipVideoBitRate();
+        clipVideoFrameRate();
+        clipVideoFrameWidth();
+        clipVideoFrameHeight();
     }
-
-    // Set the actual video recording frame size
-    CameraParameters params(mCamera->getParameters());
-    params.setPreviewSize(mVideoWidth, mVideoHeight);
-    params.setPreviewFrameRate(mFrameRate);
-    String8 s = params.flatten();
-    if (OK != mCamera->setParameters(s)) {
-        LOGE("Could not change settings."
-             " Someone else is using camera %d?", mCameraId);
-        return -EBUSY;
-    }
-    CameraParameters newCameraParams(mCamera->getParameters());
-
-    // Check on video frame size
-    int frameWidth = 0, frameHeight = 0;
-    newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
-    if (frameWidth  < 0 || frameWidth  != mVideoWidth ||
-        frameHeight < 0 || frameHeight != mVideoHeight) {
-        LOGE("Failed to set the video frame size to %dx%d",
-                mVideoWidth, mVideoHeight);
-        IPCThreadState::self()->restoreCallingIdentity(token);
-        return UNKNOWN_ERROR;
-    }
-
-    // Check on video frame rate
-    int frameRate = newCameraParams.getPreviewFrameRate();
-    if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
-        LOGE("Failed to set frame rate to %d fps. The actual "
-             "frame rate is %d", mFrameRate, frameRate);
-    }
-
-    // This CHECK is good, since we just passed the lock/unlock
-    // check earlier by calling mCamera->setParameters().
-    CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
-    IPCThreadState::self()->restoreCallingIdentity(token);
     return OK;
 }
 
@@ -995,18 +1067,64 @@
     }
 }
 
-status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
+status_t StagefrightRecorder::setupCameraSource(
+        sp<CameraSource> *cameraSource) {
+    status_t err = OK;
+    if ((err = checkVideoEncoderCapabilities()) != OK) {
+        return err;
+    }
+    Size videoSize;
+    videoSize.width = mVideoWidth;
+    videoSize.height = mVideoHeight;
+    if (mCaptureTimeLapse) {
+        mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+                mCamera, mCameraId,
+                videoSize, mFrameRate, mPreviewSurface,
+                mTimeBetweenTimeLapseFrameCaptureUs);
+        *cameraSource = mCameraSourceTimeLapse;
+    } else {
+        *cameraSource = CameraSource::CreateFromCamera(
+                mCamera, mCameraId, videoSize, mFrameRate,
+                mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
+    }
+    if (*cameraSource == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    if ((*cameraSource)->initCheck() != OK) {
+        (*cameraSource).clear();
+        *cameraSource = NULL;
+        return NO_INIT;
+    }
+
+    // When frame rate is not set, the actual frame rate will be set to
+    // the current frame rate being used.
+    if (mFrameRate == -1) {
+        int32_t frameRate = 0;
+        CHECK ((*cameraSource)->getFormat()->findInt32(
+                    kKeyFrameRate, &frameRate));
+        LOGI("Frame rate is not explicitly set. Use the current frame "
+             "rate (%d fps)", frameRate);
+        mFrameRate = frameRate;
+    }
+
+    CHECK(mFrameRate != -1);
+
+    mIsMetaDataStoredInVideoBuffers =
+        (*cameraSource)->isMetaDataStoredInVideoBuffers();
+
+    return OK;
+}
+
+status_t StagefrightRecorder::setupVideoEncoder(
+        sp<MediaSource> cameraSource,
+        int32_t videoBitRate,
+        sp<MediaSource> *source) {
     source->clear();
 
-    status_t err = setupCameraSource();
-    if (err != OK) return err;
-
-    sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
-    CHECK(cameraSource != NULL);
-
     sp<MetaData> enc_meta = new MetaData;
-    enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
-    enc_meta->setInt32(kKeySampleRate, mFrameRate);
+    enc_meta->setInt32(kKeyBitRate, videoBitRate);
+    enc_meta->setInt32(kKeyFrameRate, mFrameRate);
 
     switch (mVideoEncoder) {
         case VIDEO_ENCODER_H263:
@@ -1049,15 +1167,36 @@
     }
     if (mVideoEncoderLevel != -1) {
         enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+    } else if (mCaptureTimeLapse) {
+        // Check if we are using high resolution and/or high bitrate and
+        // set appropriate level for the software AVCEncoder.
+        if ((width * height >= 921600) // 720p
+                || (videoBitRate >= 20000000)) {
+            enc_meta->setInt32(kKeyVideoLevel, OMX_VIDEO_AVCLevel5);
+        }
     }
 
     OMXClient client;
     CHECK_EQ(client.connect(), OK);
 
+    // Use software codec for time lapse
+    uint32_t encoder_flags = 0;
+    if (mCaptureTimeLapse) {
+        encoder_flags |= OMXCodec::kPreferSoftwareCodecs;
+    } else if (mIsMetaDataStoredInVideoBuffers) {
+        encoder_flags |= OMXCodec::kHardwareCodecsOnly;
+        encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+    }
     sp<MediaSource> encoder = OMXCodec::Create(
             client.interface(), enc_meta,
-            true /* createEncoder */, cameraSource);
+            true /* createEncoder */, cameraSource,
+            NULL, encoder_flags);
     if (encoder == NULL) {
+        LOGW("Failed to create the encoder");
+        // When the encoder fails to be created, we need
+        // release the camera source due to the camera's lock
+        // and unlock mechanism.
+        cameraSource->stop();
         return UNKNOWN_ERROR;
     }
 
@@ -1087,54 +1226,150 @@
     return OK;
 }
 
-status_t StagefrightRecorder::startMPEG4Recording() {
-    int32_t totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording(
+        bool useSplitCameraSource,
+        int outputFd,
+        int32_t videoWidth, int32_t videoHeight,
+        int32_t videoBitRate,
+        int32_t *totalBitRate,
+        sp<MediaWriter> *mediaWriter) {
+    mediaWriter->clear();
+    *totalBitRate = 0;
     status_t err = OK;
-    sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
+    sp<MediaWriter> writer = new MPEG4Writer(outputFd);
 
     // Add audio source first if it exists
-    if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+    if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) {
         err = setupAudioEncoder(writer);
         if (err != OK) return err;
-        totalBitRate += mAudioBitRate;
+        *totalBitRate += mAudioBitRate;
     }
     if (mVideoSource == VIDEO_SOURCE_DEFAULT
             || mVideoSource == VIDEO_SOURCE_CAMERA) {
+
+        sp<MediaSource> cameraMediaSource;
+        if (useSplitCameraSource) {
+            LOGV("Using Split camera source");
+            cameraMediaSource = mCameraSourceSplitter->createClient();
+        } else {
+            sp<CameraSource> cameraSource;
+            err = setupCameraSource(&cameraSource);
+            cameraMediaSource = cameraSource;
+        }
+        if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) {
+            // Use downsampling from the original source.
+            cameraMediaSource =
+                new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight);
+        }
+        if (err != OK) {
+            return err;
+        }
+
         sp<MediaSource> encoder;
-        err = setupVideoEncoder(&encoder);
-        if (err != OK) return err;
+        err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder);
+        if (err != OK) {
+            return err;
+        }
+
         writer->addSource(encoder);
-        totalBitRate += mVideoBitRate;
+        *totalBitRate += videoBitRate;
     }
 
     if (mInterleaveDurationUs > 0) {
         reinterpret_cast<MPEG4Writer *>(writer.get())->
             setInterleaveDuration(mInterleaveDurationUs);
     }
-
     if (mMaxFileDurationUs != 0) {
         writer->setMaxFileDuration(mMaxFileDurationUs);
     }
     if (mMaxFileSizeBytes != 0) {
         writer->setMaxFileSize(mMaxFileSizeBytes);
     }
-    sp<MetaData> meta = new MetaData;
-    meta->setInt64(kKeyTime, systemTime() / 1000);
-    meta->setInt32(kKeyFileType, mOutputFormat);
-    meta->setInt32(kKeyBitRate, totalBitRate);
-    meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+
+    writer->setListener(mListener);
+    *mediaWriter = writer;
+    return OK;
+}
+
+void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+        sp<MetaData> *meta) {
+    (*meta)->setInt64(kKeyTime, startTimeUs);
+    (*meta)->setInt32(kKeyFileType, mOutputFormat);
+    (*meta)->setInt32(kKeyBitRate, totalBitRate);
+    (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
     if (mMovieTimeScale > 0) {
-        meta->setInt32(kKeyTimeScale, mMovieTimeScale);
+        (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
     }
     if (mTrackEveryTimeDurationUs > 0) {
-        meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+        (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
     }
     if (mRotationDegrees != 0) {
-        meta->setInt32(kKeyRotationDegree, mRotationDegrees);
+        (*meta)->setInt32(kKeyRotation, mRotationDegrees);
     }
-    writer->setListener(mListener);
-    mWriter = writer;
-    return mWriter->start(meta.get());
+}
+
+status_t StagefrightRecorder::startMPEG4Recording() {
+    if (mCaptureAuxVideo) {
+        if (!mCaptureTimeLapse) {
+            LOGE("Auxiliary video can be captured only in time lapse mode");
+            return UNKNOWN_ERROR;
+        }
+        LOGV("Creating MediaSourceSplitter");
+        sp<CameraSource> cameraSource;
+        status_t err = setupCameraSource(&cameraSource);
+        if (err != OK) {
+            return err;
+        }
+        mCameraSourceSplitter = new MediaSourceSplitter(cameraSource);
+    } else {
+        mCameraSourceSplitter = NULL;
+    }
+
+    int32_t totalBitRate;
+    status_t err = setupMPEG4Recording(mCaptureAuxVideo,
+            mOutputFd, mVideoWidth, mVideoHeight,
+            mVideoBitRate, &totalBitRate, &mWriter);
+    if (err != OK) {
+        return err;
+    }
+
+    int64_t startTimeUs = systemTime() / 1000;
+    sp<MetaData> meta = new MetaData;
+    setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+
+    err = mWriter->start(meta.get());
+    if (err != OK) {
+        return err;
+    }
+
+    if (mCaptureAuxVideo) {
+        CHECK(mOutputFdAux >= 0);
+        if (mWriterAux != NULL) {
+            LOGE("Auxiliary File writer is not avaialble");
+            return UNKNOWN_ERROR;
+        }
+        if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) ||
+                ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) {
+            LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)",
+                    mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight);
+            return UNKNOWN_ERROR;
+        }
+
+        int32_t totalBitrateAux;
+        err = setupMPEG4Recording(mCaptureAuxVideo,
+                mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight,
+                mAuxVideoBitRate, &totalBitrateAux, &mWriterAux);
+        if (err != OK) {
+            return err;
+        }
+
+        sp<MetaData> metaAux = new MetaData;
+        setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux);
+
+        return mWriterAux->start(metaAux.get());
+    }
+
+    return OK;
 }
 
 status_t StagefrightRecorder::pause() {
@@ -1143,35 +1378,50 @@
         return UNKNOWN_ERROR;
     }
     mWriter->pause();
+
+    if (mCaptureAuxVideo) {
+        if (mWriterAux == NULL) {
+            return UNKNOWN_ERROR;
+        }
+        mWriterAux->pause();
+    }
+
     return OK;
 }
 
 status_t StagefrightRecorder::stop() {
     LOGV("stop");
     status_t err = OK;
+
+    if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
+        mCameraSourceTimeLapse->startQuickReadReturns();
+        mCameraSourceTimeLapse = NULL;
+    }
+
+    if (mCaptureAuxVideo) {
+        if (mWriterAux != NULL) {
+            mWriterAux->stop();
+            mWriterAux.clear();
+        }
+    }
+
     if (mWriter != NULL) {
         err = mWriter->stop();
         mWriter.clear();
     }
 
-    if (mCamera != 0) {
-        LOGV("Disconnect camera");
-        int64_t token = IPCThreadState::self()->clearCallingIdentity();
-        if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
-            LOGV("Camera was cold when we started, stopping preview");
-            mCamera->stopPreview();
-        }
-        mCamera->unlock();
-        mCamera.clear();
-        IPCThreadState::self()->restoreCallingIdentity(token);
-        mFlags = 0;
-    }
-
     if (mOutputFd >= 0) {
         ::close(mOutputFd);
         mOutputFd = -1;
     }
 
+    if (mCaptureAuxVideo) {
+        if (mOutputFdAux >= 0) {
+            ::close(mOutputFdAux);
+            mOutputFdAux = -1;
+        }
+    }
+
     return err;
 }
 
@@ -1196,8 +1446,11 @@
     mVideoEncoder  = VIDEO_ENCODER_H263;
     mVideoWidth    = 176;
     mVideoHeight   = 144;
-    mFrameRate     = 20;
+    mAuxVideoWidth    = 176;
+    mAuxVideoHeight   = 144;
+    mFrameRate     = -1;
     mVideoBitRate  = 192000;
+    mAuxVideoBitRate = 192000;
     mSampleRate    = 8000;
     mAudioChannels = 1;
     mAudioBitRate  = 12200;
@@ -1214,11 +1467,17 @@
     mMaxFileDurationUs = 0;
     mMaxFileSizeBytes = 0;
     mTrackEveryTimeDurationUs = 0;
-    mRotationDegrees = 0;
+    mCaptureTimeLapse = false;
+    mTimeBetweenTimeLapseFrameCaptureUs = -1;
+    mCaptureAuxVideo = false;
+    mCameraSourceSplitter = NULL;
+    mCameraSourceTimeLapse = NULL;
+    mIsMetaDataStoredInVideoBuffers = false;
     mEncoderProfiles = MediaProfiles::getInstance();
+    mRotationDegrees = 0;
 
     mOutputFd = -1;
-    mFlags = 0;
+    mOutputFdAux = -1;
 
     return OK;
 }
@@ -1255,6 +1514,8 @@
     snprintf(buffer, SIZE, "   Recorder: %p\n", this);
     snprintf(buffer, SIZE, "   Output file (fd %d):\n", mOutputFd);
     result.append(buffer);
+    snprintf(buffer, SIZE, "   Output file Auxiliary (fd %d):\n", mOutputFdAux);
+    result.append(buffer);
     snprintf(buffer, SIZE, "     File format: %d\n", mOutputFormat);
     result.append(buffer);
     snprintf(buffer, SIZE, "     Max file size (bytes): %lld\n", mMaxFileSizeBytes);
@@ -1287,8 +1548,6 @@
     result.append(buffer);
     snprintf(buffer, SIZE, "     Camera Id: %d\n", mCameraId);
     result.append(buffer);
-    snprintf(buffer, SIZE, "     Camera flags: %d\n", mFlags);
-    result.append(buffer);
     snprintf(buffer, SIZE, "     Encoder: %d\n", mVideoEncoder);
     result.append(buffer);
     snprintf(buffer, SIZE, "     Encoder profile: %d\n", mVideoEncoderProfile);
@@ -1299,10 +1558,14 @@
     result.append(buffer);
     snprintf(buffer, SIZE, "     Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
     result.append(buffer);
+    snprintf(buffer, SIZE, "     Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight);
+    result.append(buffer);
     snprintf(buffer, SIZE, "     Frame rate (fps): %d\n", mFrameRate);
     result.append(buffer);
     snprintf(buffer, SIZE, "     Bit rate (bps): %d\n", mVideoBitRate);
     result.append(buffer);
+    snprintf(buffer, SIZE, "     Aux Bit rate (bps): %d\n", mAuxVideoBitRate);
+    result.append(buffer);
     ::write(fd, result.string(), result.size());
     return OK;
 }
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index e42df2e..36a15a8 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -19,13 +19,18 @@
 #define STAGEFRIGHT_RECORDER_H_
 
 #include <media/MediaRecorderBase.h>
+#include <camera/CameraParameters.h>
 #include <utils/String8.h>
 
 namespace android {
 
 class Camera;
+class CameraSource;
+class CameraSourceTimeLapse;
+class MediaSourceSplitter;
 struct MediaSource;
 struct MediaWriter;
+class MetaData;
 struct AudioSource;
 class MediaProfiles;
 
@@ -42,9 +47,10 @@
     virtual status_t setVideoSize(int width, int height);
     virtual status_t setVideoFrameRate(int frames_per_second);
     virtual status_t setCamera(const sp<ICamera>& camera);
-    virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+    virtual status_t setPreviewSurface(const sp<Surface>& surface);
     virtual status_t setOutputFile(const char *path);
     virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+    virtual status_t setOutputFileAuxiliary(int fd);
     virtual status_t setParameters(const String8& params);
     virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
     virtual status_t prepare();
@@ -57,15 +63,10 @@
     virtual status_t dump(int fd, const Vector<String16>& args) const;
 
 private:
-    enum CameraFlags {
-        FLAGS_SET_CAMERA = 1L << 0,
-        FLAGS_HOT_CAMERA = 1L << 1,
-    };
-
-    sp<Camera> mCamera;
-    sp<ISurface> mPreviewSurface;
+    sp<ICamera> mCamera;
+    sp<Surface> mPreviewSurface;
     sp<IMediaRecorderClient> mListener;
-    sp<MediaWriter> mWriter;
+    sp<MediaWriter> mWriter, mWriterAux;
     sp<AudioSource> mAudioSourceNode;
 
     audio_source mAudioSource;
@@ -75,8 +76,9 @@
     video_encoder mVideoEncoder;
     bool mUse64BitFileOffset;
     int32_t mVideoWidth, mVideoHeight;
+    int32_t mAuxVideoWidth, mAuxVideoHeight;
     int32_t mFrameRate;
-    int32_t mVideoBitRate;
+    int32_t mVideoBitRate, mAuxVideoBitRate;
     int32_t mAudioBitRate;
     int32_t mAudioChannels;
     int32_t mSampleRate;
@@ -93,21 +95,40 @@
     int64_t mTrackEveryTimeDurationUs;
     int32_t mRotationDegrees;  // Clockwise
 
-    String8 mParams;
-    int mOutputFd;
-    int32_t mFlags;
+    bool mCaptureTimeLapse;
+    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+    bool mCaptureAuxVideo;
+    sp<MediaSourceSplitter> mCameraSourceSplitter;
+    sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
 
+    String8 mParams;
+    int mOutputFd, mOutputFdAux;
+
+    bool mIsMetaDataStoredInVideoBuffers;
     MediaProfiles *mEncoderProfiles;
 
+    status_t setupMPEG4Recording(
+        bool useSplitCameraSource,
+        int outputFd,
+        int32_t videoWidth, int32_t videoHeight,
+        int32_t videoBitRate,
+        int32_t *totalBitRate,
+        sp<MediaWriter> *mediaWriter);
+    void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+        sp<MetaData> *meta);
     status_t startMPEG4Recording();
     status_t startAMRRecording();
     status_t startAACRecording();
     status_t startRTPRecording();
     status_t startMPEG2TSRecording();
     sp<MediaSource> createAudioSource();
-    status_t setupCameraSource();
+    status_t checkVideoEncoderCapabilities();
+    status_t setupCameraSource(sp<CameraSource> *cameraSource);
     status_t setupAudioEncoder(const sp<MediaWriter>& writer);
-    status_t setupVideoEncoder(sp<MediaSource> *source);
+    status_t setupVideoEncoder(
+            sp<MediaSource> cameraSource,
+            int32_t videoBitRate,
+            sp<MediaSource> *source);
 
     // Encoding parameter handling utilities
     status_t setParameter(const String8 &key, const String8 &value);
@@ -115,6 +136,11 @@
     status_t setParamAudioNumberOfChannels(int32_t channles);
     status_t setParamAudioSamplingRate(int32_t sampleRate);
     status_t setParamAudioTimeScale(int32_t timeScale);
+    status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
+    status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+    status_t setParamAuxVideoHeight(int32_t height);
+    status_t setParamAuxVideoWidth(int32_t width);
+    status_t setParamAuxVideoEncodingBitRate(int32_t bitRate);
     status_t setParamVideoEncodingBitRate(int32_t bitRate);
     status_t setParamVideoIFramesInterval(int32_t seconds);
     status_t setParamVideoEncoderProfile(int32_t profile);
@@ -140,4 +166,3 @@
 }  // namespace android
 
 #endif  // STAGEFRIGHT_RECORDER_H_
-
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 6e6c3cd..6abd8e3 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,7 +75,7 @@
 
 
     // All the methods below wrap the mPlayer instance.
-    virtual status_t setVideoSurface(const android::sp<android::ISurface>& s)  {
+    virtual status_t setVideoSurface(const android::sp<android::Surface>& s)  {
         return mPlayer->setVideoSurface(s);
     }
     virtual status_t prepare() {return mPlayer->prepare();}
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
new file mode 100644
index 0000000..c20e279
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -0,0 +1,25 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=                       \
+        HTTPLiveSource.cpp              \
+        NuPlayer.cpp                    \
+        NuPlayerDecoder.cpp             \
+        NuPlayerDriver.cpp              \
+        NuPlayerRenderer.cpp            \
+        NuPlayerStreamListener.cpp      \
+        DecoderWrapper.cpp              \
+        StreamingSource.cpp             \
+
+LOCAL_C_INCLUDES := \
+        $(TOP)/frameworks/base/include/media/stagefright/openmax        \
+	$(TOP)/frameworks/base/media/libstagefright/include             \
+        $(TOP)/frameworks/base/media/libstagefright/mpeg2ts             \
+        $(TOP)/frameworks/base/media/libstagefright/httplive            \
+
+LOCAL_MODULE:= libstagefright_nuplayer
+
+LOCAL_MODULE_TAGS := eng
+
+include $(BUILD_STATIC_LIBRARY)
+
diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp
new file mode 100644
index 0000000..802d1fb
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp
@@ -0,0 +1,576 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DecoderWrapper"
+#include <utils/Log.h>
+
+#include "DecoderWrapper.h"
+
+#include "AACDecoder.h"
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+struct DecoderWrapper::WrapperSource : public MediaSource {
+    WrapperSource(
+            const sp<MetaData> &meta,
+            const sp<AMessage> &notify);
+
+    virtual status_t start(MetaData *params);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options);
+
+    void queueBuffer(const sp<ABuffer> &buffer);
+    void queueEOS(status_t finalResult);
+    void clear();
+
+protected:
+    virtual ~WrapperSource();
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    sp<MetaData> mMeta;
+    sp<AMessage> mNotify;
+
+    List<sp<ABuffer> > mQueue;
+    status_t mFinalResult;
+
+    DISALLOW_EVIL_CONSTRUCTORS(WrapperSource);
+};
+
+DecoderWrapper::WrapperSource::WrapperSource(
+        const sp<MetaData> &meta, const sp<AMessage> &notify)
+    : mMeta(meta),
+      mNotify(notify),
+      mFinalResult(OK) {
+}
+
+DecoderWrapper::WrapperSource::~WrapperSource() {
+}
+
+status_t DecoderWrapper::WrapperSource::start(MetaData *params) {
+    return OK;
+}
+
+status_t DecoderWrapper::WrapperSource::stop() {
+    return OK;
+}
+
+sp<MetaData> DecoderWrapper::WrapperSource::getFormat() {
+    return mMeta;
+}
+
+status_t DecoderWrapper::WrapperSource::read(
+        MediaBuffer **out, const ReadOptions *options) {
+    Mutex::Autolock autoLock(mLock);
+
+    bool requestedBuffer = false;
+
+    while (mQueue.empty() && mFinalResult == OK) {
+        if (!requestedBuffer) {
+            mNotify->dup()->post();
+            requestedBuffer = true;
+        }
+
+        mCondition.wait(mLock);
+    }
+
+    if (mQueue.empty()) {
+        return mFinalResult;
+    }
+
+    sp<ABuffer> src = *mQueue.begin();
+    mQueue.erase(mQueue.begin());
+
+    MediaBuffer *dst = new MediaBuffer(src->size());
+    memcpy(dst->data(), src->data(), src->size());
+
+    int64_t timeUs;
+    CHECK(src->meta()->findInt64("timeUs", &timeUs));
+
+    dst->meta_data()->setInt64(kKeyTime, timeUs);
+
+    *out = dst;
+
+    return OK;
+}
+
+void DecoderWrapper::WrapperSource::queueBuffer(const sp<ABuffer> &buffer) {
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(buffer);
+    mCondition.broadcast();
+}
+
+void DecoderWrapper::WrapperSource::queueEOS(status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    Mutex::Autolock autoLock(mLock);
+    mFinalResult = finalResult;
+    mCondition.broadcast();
+}
+
+void DecoderWrapper::WrapperSource::clear() {
+    Mutex::Autolock autoLock(mLock);
+    mQueue.clear();
+    mFinalResult = OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct DecoderWrapper::WrapperReader : public AHandler {
+    WrapperReader(
+            const sp<MediaSource> &decoder,
+            const sp<AMessage> &notify);
+
+    void start();
+    void stop();
+    void readMore(bool flush = false);
+
+protected:
+    virtual ~WrapperReader();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatRead
+    };
+
+    sp<MediaSource> mDecoder;
+    sp<AMessage> mNotify;
+    bool mEOS;
+    bool mSentFormat;
+
+    void sendFormatChange();
+
+    DISALLOW_EVIL_CONSTRUCTORS(WrapperReader);
+};
+
+DecoderWrapper::WrapperReader::WrapperReader(
+        const sp<MediaSource> &decoder, const sp<AMessage> &notify)
+    : mDecoder(decoder),
+      mNotify(notify),
+      mEOS(false),
+      mSentFormat(false) {
+}
+
+DecoderWrapper::WrapperReader::~WrapperReader() {
+}
+
+void DecoderWrapper::WrapperReader::start() {
+    CHECK_EQ(mDecoder->start(), (status_t)OK);
+    readMore();
+}
+
+void DecoderWrapper::WrapperReader::stop() {
+    CHECK_EQ(mDecoder->stop(), (status_t)OK);
+}
+
+void DecoderWrapper::WrapperReader::readMore(bool flush) {
+    if (!flush && mEOS) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatRead, id());
+    msg->setInt32("flush", static_cast<int32_t>(flush));
+    msg->post();
+}
+
+void DecoderWrapper::WrapperReader::onMessageReceived(
+        const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatRead:
+        {
+            int32_t flush;
+            CHECK(msg->findInt32("flush", &flush));
+
+            MediaSource::ReadOptions options;
+            if (flush) {
+                // Dummy seek
+                options.setSeekTo(0);
+                mEOS = false;
+            }
+
+            CHECK(!mEOS);
+
+            MediaBuffer *src;
+            status_t err = mDecoder->read(&src, &options);
+
+            if (err == OK) {
+                if (!mSentFormat) {
+                    sendFormatChange();
+                    mSentFormat = true;
+                }
+
+                sp<AMessage> notify = mNotify->dup();
+
+                sp<AMessage> realNotify;
+                CHECK(notify->findMessage("real-notify", &realNotify));
+
+                realNotify->setInt32("what", ACodec::kWhatDrainThisBuffer);
+
+                sp<ABuffer> dst = new ABuffer(src->range_length());
+                memcpy(dst->data(),
+                       (const uint8_t *)src->data() + src->range_offset(),
+                       src->range_length());
+
+                int64_t timeUs;
+                CHECK(src->meta_data()->findInt64(kKeyTime, &timeUs));
+                src->release();
+                src = NULL;
+
+                dst->meta()->setInt64("timeUs", timeUs);
+
+                realNotify->setObject("buffer", dst);
+
+                notify->post();
+            } else if (err == INFO_FORMAT_CHANGED) {
+                sendFormatChange();
+
+                readMore(false /* flush */);
+            } else {
+                sp<AMessage> notify = mNotify->dup();
+
+                sp<AMessage> realNotify;
+                CHECK(notify->findMessage("real-notify", &realNotify));
+
+                realNotify->setInt32("what", ACodec::kWhatEOS);
+                mEOS = true;
+
+                notify->post();
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void DecoderWrapper::WrapperReader::sendFormatChange() {
+    sp<AMessage> notify = mNotify->dup();
+
+    sp<AMessage> realNotify;
+    CHECK(notify->findMessage("real-notify", &realNotify));
+
+    realNotify->setInt32("what", ACodec::kWhatOutputFormatChanged);
+
+    sp<MetaData> meta = mDecoder->getFormat();
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    realNotify->setString("mime", mime);
+
+    if (!strncasecmp("audio/", mime, 6)) {
+        int32_t numChannels;
+        CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+
+        int32_t sampleRate;
+        CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+        realNotify->setInt32("channel-count", numChannels);
+        realNotify->setInt32("sample-rate", sampleRate);
+    } else {
+        CHECK(!strncasecmp("video/", mime, 6));
+
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        realNotify->setInt32("width", width);
+        realNotify->setInt32("height", height);
+
+        int32_t cropLeft, cropTop, cropRight, cropBottom;
+        if (!meta->findRect(
+                    kKeyCropRect,
+                    &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+            cropLeft = 0;
+            cropTop = 0;
+            cropRight = width - 1;
+            cropBottom = height - 1;
+        }
+
+        realNotify->setRect("crop", cropLeft, cropTop, cropRight, cropBottom);
+    }
+
+    notify->post();
+
+    mSentFormat = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+DecoderWrapper::DecoderWrapper()
+    : mNumOutstandingInputBuffers(0),
+      mNumOutstandingOutputBuffers(0),
+      mNumPendingDecodes(0),
+      mFlushing(false) {
+}
+
+DecoderWrapper::~DecoderWrapper() {
+}
+
+void DecoderWrapper::setNotificationMessage(const sp<AMessage> &msg) {
+    mNotify = msg;
+}
+
+void DecoderWrapper::initiateSetup(const sp<AMessage> &msg) {
+    msg->setWhat(kWhatSetup);
+    msg->setTarget(id());
+    msg->post();
+}
+
+void DecoderWrapper::initiateShutdown() {
+    (new AMessage(kWhatShutdown, id()))->post();
+}
+
+void DecoderWrapper::signalFlush() {
+    (new AMessage(kWhatFlush, id()))->post();
+}
+
+void DecoderWrapper::signalResume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void DecoderWrapper::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSetup:
+            onSetup(msg);
+            break;
+
+        case kWhatShutdown:
+            onShutdown();
+            break;
+
+        case kWhatInputDataRequested:
+        {
+            postFillBuffer();
+            ++mNumOutstandingInputBuffers;
+            break;
+        }
+
+        case kWhatInputBufferFilled:
+        {
+            CHECK_GT(mNumOutstandingInputBuffers, 0);
+            --mNumOutstandingInputBuffers;
+
+            if (mFlushing) {
+                mSource->queueEOS(INFO_DISCONTINUITY);
+
+                completeFlushIfPossible();
+                break;
+            }
+
+            sp<RefBase> obj;
+            if (!msg->findObject("buffer", &obj)) {
+                int32_t err = OK;
+                CHECK(msg->findInt32("err", &err));
+
+                mSource->queueEOS(err);
+                break;
+            }
+
+            sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+            mSource->queueBuffer(buffer);
+            break;
+        }
+
+        case kWhatFillBufferDone:
+        {
+            sp<AMessage> notify;
+            CHECK(msg->findMessage("real-notify", &notify));
+
+            int32_t what;
+            CHECK(notify->findInt32("what", &what));
+
+            if (what == ACodec::kWhatDrainThisBuffer) {
+                CHECK_GT(mNumPendingDecodes, 0);
+                --mNumPendingDecodes;
+
+                sp<AMessage> reply =
+                    new AMessage(kWhatOutputBufferDrained, id());
+
+                notify->setMessage("reply", reply);
+
+                ++mNumOutstandingOutputBuffers;
+            } else if (what == ACodec::kWhatEOS) {
+                CHECK_GT(mNumPendingDecodes, 0);
+                --mNumPendingDecodes;
+
+                if (mFlushing) {
+                    completeFlushIfPossible();
+                    break;
+                }
+            }
+
+            notify->post();
+            break;
+        }
+
+        case kWhatOutputBufferDrained:
+        {
+            CHECK_GT(mNumOutstandingOutputBuffers, 0);
+            --mNumOutstandingOutputBuffers;
+
+            if (mFlushing) {
+                completeFlushIfPossible();
+                break;
+            }
+
+            ++mNumPendingDecodes;
+            mReader->readMore();
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush();
+            break;
+        }
+
+        case kWhatResume:
+        {
+            onResume();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void DecoderWrapper::onSetup(const sp<AMessage> &msg) {
+    AString mime;
+    CHECK(msg->findString("mime", &mime));
+
+    CHECK(!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC));
+
+    int32_t numChannels, sampleRate;
+    CHECK(msg->findInt32("channel-count", &numChannels));
+    CHECK(msg->findInt32("sample-rate", &sampleRate));
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("esds", &obj));
+    sp<ABuffer> esds = static_cast<ABuffer *>(obj.get());
+
+    sp<MetaData> meta = new MetaData;
+    meta->setCString(kKeyMIMEType, mime.c_str());
+    meta->setInt32(kKeySampleRate, sampleRate);
+    meta->setInt32(kKeyChannelCount, numChannels);
+    meta->setData(kKeyESDS, 0, esds->data(), esds->size());
+
+    mSource = new WrapperSource(
+            meta, new AMessage(kWhatInputDataRequested, id()));
+
+    sp<MediaSource> decoder = new AACDecoder(mSource);
+
+    mReaderLooper = new ALooper;
+    mReaderLooper->setName("DecoderWrapper looper");
+
+    mReaderLooper->start(
+            false, /* runOnCallingThread */
+            false, /* canCallJava */
+            PRIORITY_AUDIO);
+
+    sp<AMessage> notify = new AMessage(kWhatFillBufferDone, id());
+    notify->setMessage("real-notify", mNotify);
+
+    mReader = new WrapperReader(decoder, notify);
+    mReaderLooper->registerHandler(mReader);
+
+    mReader->start();
+    ++mNumPendingDecodes;
+}
+
+void DecoderWrapper::onShutdown() {
+    mReaderLooper->stop();
+    mReaderLooper.clear();
+
+    mReader->stop();
+    mReader.clear();
+
+    mSource.clear();
+
+    mNumOutstandingInputBuffers = 0;
+    mNumOutstandingOutputBuffers = 0;
+    mNumPendingDecodes = 0;
+    mFlushing = false;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+    notify->post();
+}
+
+void DecoderWrapper::postFillBuffer() {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatFillThisBuffer);
+    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+    notify->setMessage("reply", reply);
+    notify->post();
+}
+
+void DecoderWrapper::onFlush() {
+    CHECK(!mFlushing);
+    mFlushing = true;
+
+    completeFlushIfPossible();
+}
+
+void DecoderWrapper::completeFlushIfPossible() {
+    CHECK(mFlushing);
+
+    if (mNumOutstandingInputBuffers > 0
+            || mNumOutstandingOutputBuffers > 0
+            || mNumPendingDecodes > 0) {
+        return;
+    }
+
+    mFlushing = false;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatFlushCompleted);
+    notify->post();
+}
+
+void DecoderWrapper::onResume() {
+    CHECK(!mFlushing);
+
+    ++mNumPendingDecodes;
+
+    mSource->clear();
+    mReader->readMore(true /* flush */);
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.h b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h
new file mode 100644
index 0000000..b9be12c
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DECODER_WRAPPER_H_
+
+#define DECODER_WRAPPER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct MediaSource;
+
+struct DecoderWrapper : public AHandler {
+    DecoderWrapper();
+
+    void setNotificationMessage(const sp<AMessage> &msg);
+    void initiateSetup(const sp<AMessage> &msg);
+    void initiateShutdown();
+    void signalFlush();
+    void signalResume();
+
+protected:
+    virtual ~DecoderWrapper();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    struct WrapperSource;
+    struct WrapperReader;
+
+    enum {
+        kWhatSetup,
+        kWhatInputBufferFilled,
+        kWhatOutputBufferDrained,
+        kWhatShutdown,
+        kWhatFillBufferDone,
+        kWhatInputDataRequested,
+        kWhatFlush,
+        kWhatResume,
+    };
+
+    sp<AMessage> mNotify;
+
+    sp<WrapperSource> mSource;
+
+    sp<ALooper> mReaderLooper;
+    sp<WrapperReader> mReader;
+
+    int32_t mNumOutstandingInputBuffers;
+    int32_t mNumOutstandingOutputBuffers;
+    int32_t mNumPendingDecodes;
+    bool mFlushing;
+
+    void onSetup(const sp<AMessage> &msg);
+    void onShutdown();
+    void onFlush();
+    void onResume();
+
+    void postFillBuffer();
+    void completeFlushIfPossible();
+
+    DISALLOW_EVIL_CONSTRUCTORS(DecoderWrapper);
+};
+
+}  // namespace android
+
+#endif  // DECODER_WRAPPER_H_
+
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
new file mode 100644
index 0000000..6bf6dd3
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPLiveSource"
+#include <utils/Log.h>
+
+#include "HTTPLiveSource.h"
+
+#include "ATSParser.h"
+#include "AnotherPacketSource.h"
+#include "LiveDataSource.h"
+#include "LiveSession.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+NuPlayer::HTTPLiveSource::HTTPLiveSource(const char *url)
+    : mURL(url),
+      mEOS(false),
+      mOffset(0) {
+}
+
+NuPlayer::HTTPLiveSource::~HTTPLiveSource() {
+    mLiveSession->disconnect();
+    mLiveLooper->stop();
+}
+
+void NuPlayer::HTTPLiveSource::start() {
+    mLiveLooper = new ALooper;
+    mLiveLooper->setName("http live");
+    mLiveLooper->start();
+
+    mLiveSession = new LiveSession;
+    mLiveLooper->registerHandler(mLiveSession);
+
+    mLiveSession->connect(mURL.c_str());
+
+    mTSParser = new ATSParser;
+}
+
+sp<MetaData> NuPlayer::HTTPLiveSource::getFormat(bool audio) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer::HTTPLiveSource::feedMoreTSData() {
+    if (mEOS) {
+        return false;
+    }
+
+    sp<LiveDataSource> source =
+        static_cast<LiveDataSource *>(mLiveSession->getDataSource().get());
+
+    for (int32_t i = 0; i < 50; ++i) {
+        char buffer[188];
+        ssize_t n = source->readAtNonBlocking(mOffset, buffer, sizeof(buffer));
+
+        if (n == -EWOULDBLOCK) {
+            break;
+        } else if (n < 0) {
+            LOGI("input data EOS reached.");
+            mTSParser->signalEOS(n);
+            mEOS = true;
+            break;
+        } else {
+            if (buffer[0] == 0x00) {
+                // XXX legacy
+                mTSParser->signalDiscontinuity(
+                        buffer[1] == 0x00
+                            ? ATSParser::DISCONTINUITY_SEEK
+                            : ATSParser::DISCONTINUITY_FORMATCHANGE);
+            } else {
+                mTSParser->feedTSPacket(buffer, sizeof(buffer));
+            }
+
+            mOffset += n;
+        }
+    }
+
+    return true;
+}
+
+status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) {
+    return mLiveSession->getDuration(durationUs);
+}
+
+status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {
+    // We need to make sure we're not seeking until we have seen the very first
+    // PTS timestamp in the whole stream (from the beginning of the stream).
+    while (!mTSParser->PTSTimeDeltaEstablished() && feedMoreTSData()) {
+        usleep(100000);
+    }
+
+    mLiveSession->seekTo(seekTimeUs);
+
+    return OK;
+}
+
+bool NuPlayer::HTTPLiveSource::isSeekable() {
+    return mLiveSession->isSeekable();
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
new file mode 100644
index 0000000..f3f539a
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_LIVE_SOURCE_H_
+
+#define HTTP_LIVE_SOURCE_H_
+
+#include "NuPlayer.h"
+#include "NuPlayerSource.h"
+
+namespace android {
+
+struct ATSParser;
+struct LiveSession;
+
+struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
+    HTTPLiveSource(const char *url);
+
+    virtual void start();
+
+    // Returns true iff more data was available, false on EOS.
+    virtual bool feedMoreTSData();
+
+    virtual sp<MetaData> getFormat(bool audio);
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(int64_t seekTimeUs);
+    virtual bool isSeekable();
+
+protected:
+    virtual ~HTTPLiveSource();
+
+private:
+    AString mURL;
+    bool mEOS;
+    off64_t mOffset;
+    sp<ALooper> mLiveLooper;
+    sp<LiveSession> mLiveSession;
+    sp<ATSParser> mTSParser;
+
+    DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource);
+};
+
+}  // namespace android
+
+#endif  // HTTP_LIVE_SOURCE_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
new file mode 100644
index 0000000..7f534c0
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -0,0 +1,649 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayer"
+#include <utils/Log.h>
+
+#include "NuPlayer.h"
+
+#include "HTTPLiveSource.h"
+#include "NuPlayerDecoder.h"
+#include "NuPlayerDriver.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+#include "StreamingSource.h"
+
+#include "ATSParser.h"
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <surfaceflinger/Surface.h>
+
+namespace android {
+
+////////////////////////////////////////////////////////////////////////////////
+
+NuPlayer::NuPlayer()
+    : mAudioEOS(false),
+      mVideoEOS(false),
+      mScanSourcesPending(false),
+      mScanSourcesGeneration(0),
+      mFlushingAudio(NONE),
+      mFlushingVideo(NONE),
+      mResetInProgress(false),
+      mResetPostponed(false) {
+}
+
+NuPlayer::~NuPlayer() {
+}
+
+void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) {
+    mDriver = driver;
+}
+
+void NuPlayer::setDataSource(const sp<IStreamSource> &source) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+
+    msg->setObject("source", new StreamingSource(source));
+    msg->post();
+}
+
+void NuPlayer::setDataSource(
+        const char *url, const KeyedVector<String8, String8> *headers) {
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+
+    msg->setObject("source", new HTTPLiveSource(url));
+    msg->post();
+}
+
+void NuPlayer::setVideoSurface(const sp<Surface> &surface) {
+    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, id());
+    msg->setObject("surface", surface);
+    msg->post();
+}
+
+void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) {
+    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id());
+    msg->setObject("sink", sink);
+    msg->post();
+}
+
+void NuPlayer::start() {
+    (new AMessage(kWhatStart, id()))->post();
+}
+
+void NuPlayer::pause() {
+    // XXX to be implemented
+}
+
+void NuPlayer::resume() {
+    // XXX to be implemented
+}
+
+void NuPlayer::resetAsync() {
+    (new AMessage(kWhatReset, id()))->post();
+}
+
+void NuPlayer::seekToAsync(int64_t seekTimeUs) {
+    sp<AMessage> msg = new AMessage(kWhatSeek, id());
+    msg->setInt64("seekTimeUs", seekTimeUs);
+    msg->post();
+}
+
+// static
+bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) {
+    switch (state) {
+        case FLUSHING_DECODER:
+            if (needShutdown != NULL) {
+                *needShutdown = false;
+            }
+            return true;
+
+        case FLUSHING_DECODER_SHUTDOWN:
+            if (needShutdown != NULL) {
+                *needShutdown = true;
+            }
+            return true;
+
+        default:
+            return false;
+    }
+}
+
+void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSetDataSource:
+        {
+            LOGV("kWhatSetDataSource");
+
+            CHECK(mSource == NULL);
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("source", &obj));
+
+            mSource = static_cast<Source *>(obj.get());
+            break;
+        }
+
+        case kWhatSetVideoSurface:
+        {
+            LOGV("kWhatSetVideoSurface");
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("surface", &obj));
+
+            mSurface = static_cast<Surface *>(obj.get());
+            break;
+        }
+
+        case kWhatSetAudioSink:
+        {
+            LOGV("kWhatSetAudioSink");
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("sink", &obj));
+
+            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
+            break;
+        }
+
+        case kWhatStart:
+        {
+            LOGV("kWhatStart");
+
+            mAudioEOS = false;
+            mVideoEOS = false;
+
+            mSource->start();
+
+            mRenderer = new Renderer(
+                    mAudioSink,
+                    new AMessage(kWhatRendererNotify, id()));
+
+            looper()->registerHandler(mRenderer);
+
+            postScanSources();
+            break;
+        }
+
+        case kWhatScanSources:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mScanSourcesGeneration) {
+                // Drop obsolete msg.
+                break;
+            }
+
+            mScanSourcesPending = false;
+
+            LOGV("scanning sources haveAudio=%d, haveVideo=%d",
+                 mAudioDecoder != NULL, mVideoDecoder != NULL);
+
+            instantiateDecoder(false, &mVideoDecoder);
+
+            if (mAudioSink != NULL) {
+                instantiateDecoder(true, &mAudioDecoder);
+            }
+
+            if (!mSource->feedMoreTSData()) {
+                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+                    // We're not currently decoding anything (no audio or
+                    // video tracks found) and we just ran out of input data.
+                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
+                }
+                break;
+            }
+
+            if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
+                msg->post(100000ll);
+                mScanSourcesPending = true;
+            }
+            break;
+        }
+
+        case kWhatVideoNotify:
+        case kWhatAudioNotify:
+        {
+            bool audio = msg->what() == kWhatAudioNotify;
+
+            sp<AMessage> codecRequest;
+            CHECK(msg->findMessage("codec-request", &codecRequest));
+
+            int32_t what;
+            CHECK(codecRequest->findInt32("what", &what));
+
+            if (what == ACodec::kWhatFillThisBuffer) {
+                status_t err = feedDecoderInputData(
+                        audio, codecRequest);
+
+                if (err == -EWOULDBLOCK) {
+                    if (mSource->feedMoreTSData()) {
+                        msg->post();
+                    }
+                }
+            } else if (what == ACodec::kWhatEOS) {
+                mRenderer->queueEOS(audio, ERROR_END_OF_STREAM);
+            } else if (what == ACodec::kWhatFlushCompleted) {
+                bool needShutdown;
+
+                if (audio) {
+                    CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
+                    mFlushingAudio = FLUSHED;
+                } else {
+                    CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
+                    mFlushingVideo = FLUSHED;
+                }
+
+                LOGV("decoder %s flush completed", audio ? "audio" : "video");
+
+                if (needShutdown) {
+                    LOGV("initiating %s decoder shutdown",
+                         audio ? "audio" : "video");
+
+                    (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();
+
+                    if (audio) {
+                        mFlushingAudio = SHUTTING_DOWN_DECODER;
+                    } else {
+                        mFlushingVideo = SHUTTING_DOWN_DECODER;
+                    }
+                }
+
+                finishFlushIfPossible();
+            } else if (what == ACodec::kWhatOutputFormatChanged) {
+                if (audio) {
+                    int32_t numChannels;
+                    CHECK(codecRequest->findInt32("channel-count", &numChannels));
+
+                    int32_t sampleRate;
+                    CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
+
+                    LOGV("Audio output format changed to %d Hz, %d channels",
+                         sampleRate, numChannels);
+
+                    mAudioSink->close();
+                    CHECK_EQ(mAudioSink->open(sampleRate, numChannels), (status_t)OK);
+                    mAudioSink->start();
+
+                    mRenderer->signalAudioSinkChanged();
+                } else {
+                    // video
+
+                    int32_t width, height;
+                    CHECK(codecRequest->findInt32("width", &width));
+                    CHECK(codecRequest->findInt32("height", &height));
+
+                    int32_t cropLeft, cropTop, cropRight, cropBottom;
+                    CHECK(codecRequest->findRect(
+                                "crop",
+                                &cropLeft, &cropTop, &cropRight, &cropBottom));
+
+                    LOGV("Video output format changed to %d x %d "
+                         "(crop: %d, %d, %d, %d)",
+                         width, height,
+                         cropLeft, cropTop, cropRight, cropBottom);
+
+                    notifyListener(
+                            MEDIA_SET_VIDEO_SIZE,
+                            cropRight - cropLeft + 1,
+                            cropBottom - cropTop + 1);
+                }
+            } else if (what == ACodec::kWhatShutdownCompleted) {
+                LOGV("%s shutdown completed", audio ? "audio" : "video");
+                if (audio) {
+                    mAudioDecoder.clear();
+
+                    CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingAudio = SHUT_DOWN;
+                } else {
+                    mVideoDecoder.clear();
+
+                    CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
+                    mFlushingVideo = SHUT_DOWN;
+                }
+
+                finishFlushIfPossible();
+            } else {
+                CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
+
+                renderBuffer(audio, codecRequest);
+            }
+
+            break;
+        }
+
+        case kWhatRendererNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == Renderer::kWhatEOS) {
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                if (audio) {
+                    mAudioEOS = true;
+                } else {
+                    mVideoEOS = true;
+                }
+
+                LOGV("reached %s EOS", audio ? "audio" : "video");
+
+                if ((mAudioEOS || mAudioDecoder == NULL)
+                        && (mVideoEOS || mVideoDecoder == NULL)) {
+                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
+                }
+            } else if (what == Renderer::kWhatPosition) {
+                int64_t positionUs;
+                CHECK(msg->findInt64("positionUs", &positionUs));
+
+                if (mDriver != NULL) {
+                    sp<NuPlayerDriver> driver = mDriver.promote();
+                    if (driver != NULL) {
+                        driver->notifyPosition(positionUs);
+                    }
+                }
+            } else {
+                CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete);
+
+                int32_t audio;
+                CHECK(msg->findInt32("audio", &audio));
+
+                LOGV("renderer %s flush completed.", audio ? "audio" : "video");
+            }
+            break;
+        }
+
+        case kWhatMoreDataQueued:
+        {
+            break;
+        }
+
+        case kWhatReset:
+        {
+            LOGV("kWhatReset");
+
+            if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
+                // We're currently flushing, postpone the reset until that's
+                // completed.
+
+                LOGV("postponing reset");
+
+                mResetPostponed = true;
+                break;
+            }
+
+            if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+                finishReset();
+                break;
+            }
+
+            if (mAudioDecoder != NULL) {
+                flushDecoder(true /* audio */, true /* needShutdown */);
+            }
+
+            if (mVideoDecoder != NULL) {
+                flushDecoder(false /* audio */, true /* needShutdown */);
+            }
+
+            mResetInProgress = true;
+            break;
+        }
+
+        case kWhatSeek:
+        {
+            int64_t seekTimeUs;
+            CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+
+            LOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)",
+                 seekTimeUs, seekTimeUs / 1E6);
+
+            mSource->seekTo(seekTimeUs);
+
+            if (mDriver != NULL) {
+                sp<NuPlayerDriver> driver = mDriver.promote();
+                if (driver != NULL) {
+                    driver->notifySeekComplete();
+                }
+            }
+
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer::finishFlushIfPossible() {
+    if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) {
+        return;
+    }
+
+    if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) {
+        return;
+    }
+
+    LOGV("both audio and video are flushed now.");
+
+    mRenderer->signalTimeDiscontinuity();
+
+    if (mAudioDecoder != NULL) {
+        mAudioDecoder->signalResume();
+    }
+
+    if (mVideoDecoder != NULL) {
+        mVideoDecoder->signalResume();
+    }
+
+    mFlushingAudio = NONE;
+    mFlushingVideo = NONE;
+
+    if (mResetInProgress) {
+        LOGV("reset completed");
+
+        mResetInProgress = false;
+        finishReset();
+    } else if (mResetPostponed) {
+        (new AMessage(kWhatReset, id()))->post();
+        mResetPostponed = false;
+    } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
+        postScanSources();
+    }
+}
+
+void NuPlayer::finishReset() {
+    CHECK(mAudioDecoder == NULL);
+    CHECK(mVideoDecoder == NULL);
+
+    mRenderer.clear();
+    mSource.clear();
+
+    if (mDriver != NULL) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifyResetComplete();
+        }
+    }
+}
+
+void NuPlayer::postScanSources() {
+    if (mScanSourcesPending) {
+        return;
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatScanSources, id());
+    msg->setInt32("generation", mScanSourcesGeneration);
+    msg->post();
+
+    mScanSourcesPending = true;
+}
+
+status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
+    if (*decoder != NULL) {
+        return OK;
+    }
+
+    sp<MetaData> meta = mSource->getFormat(audio);
+
+    if (meta == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    sp<AMessage> notify =
+        new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
+                     id());
+
+    *decoder = new Decoder(notify, audio ? NULL : mSurface);
+    looper()->registerHandler(*decoder);
+
+    (*decoder)->configure(meta);
+
+    int64_t durationUs;
+    if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifyDuration(durationUs);
+        }
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
+    sp<AMessage> reply;
+    CHECK(msg->findMessage("reply", &reply));
+
+    if ((audio && IsFlushingState(mFlushingAudio))
+            || (!audio && IsFlushingState(mFlushingVideo))) {
+        reply->setInt32("err", INFO_DISCONTINUITY);
+        reply->post();
+        return OK;
+    }
+
+    sp<ABuffer> accessUnit;
+    status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);
+
+    if (err == -EWOULDBLOCK) {
+        return err;
+    } else if (err != OK) {
+        if (err == INFO_DISCONTINUITY) {
+            int32_t type;
+            CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+            bool formatChange =
+                type == ATSParser::DISCONTINUITY_FORMATCHANGE;
+
+            LOGV("%s discontinuity (formatChange=%d)",
+                 audio ? "audio" : "video", formatChange);
+
+            flushDecoder(audio, formatChange);
+        }
+
+        reply->setInt32("err", err);
+        reply->post();
+        return OK;
+    }
+
+    // LOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
+
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+    LOGV("feeding %s input buffer at media time %.2f secs",
+         audio ? "audio" : "video",
+         mediaTimeUs / 1E6);
+#endif
+
+    reply->setObject("buffer", accessUnit);
+    reply->post();
+
+    return OK;
+}
+
+void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
+    // LOGV("renderBuffer %s", audio ? "audio" : "video");
+
+    sp<AMessage> reply;
+    CHECK(msg->findMessage("reply", &reply));
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+
+    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+    mRenderer->queueBuffer(audio, buffer, reply);
+}
+
+void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
+    if (mDriver == NULL) {
+        return;
+    }
+
+    sp<NuPlayerDriver> driver = mDriver.promote();
+
+    if (driver == NULL) {
+        return;
+    }
+
+    driver->sendEvent(msg, ext1, ext2);
+}
+
+void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
+    // Make sure we don't continue to scan sources until we finish flushing.
+    ++mScanSourcesGeneration;
+    mScanSourcesPending = false;
+
+    (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
+    mRenderer->flush(audio);
+
+    FlushStatus newStatus =
+        needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
+
+    if (audio) {
+        CHECK(mFlushingAudio == NONE
+                || mFlushingAudio == AWAITING_DISCONTINUITY);
+
+        mFlushingAudio = newStatus;
+
+        if (mFlushingVideo == NONE) {
+            mFlushingVideo = (mVideoDecoder != NULL)
+                ? AWAITING_DISCONTINUITY
+                : FLUSHED;
+        }
+    } else {
+        CHECK(mFlushingVideo == NONE
+                || mFlushingVideo == AWAITING_DISCONTINUITY);
+
+        mFlushingVideo = newStatus;
+
+        if (mFlushingAudio == NONE) {
+            mFlushingAudio = (mAudioDecoder != NULL)
+                ? AWAITING_DISCONTINUITY
+                : FLUSHED;
+        }
+    }
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
new file mode 100644
index 0000000..339b628
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NU_PLAYER_H_
+
+#define NU_PLAYER_H_
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ACodec;
+struct MetaData;
+struct NuPlayerDriver;
+
+struct NuPlayer : public AHandler {
+    NuPlayer();
+
+    void setDriver(const wp<NuPlayerDriver> &driver);
+
+    void setDataSource(const sp<IStreamSource> &source);
+
+    void setDataSource(
+            const char *url, const KeyedVector<String8, String8> *headers);
+
+    void setVideoSurface(const sp<Surface> &surface);
+    void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink);
+    void start();
+
+    void pause();
+    void resume();
+
+    // Will notify the driver through "notifyResetComplete" once finished.
+    void resetAsync();
+
+    // Will notify the driver through "notifySeekComplete" once finished.
+    void seekToAsync(int64_t seekTimeUs);
+
+protected:
+    virtual ~NuPlayer();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    struct Decoder;
+    struct HTTPLiveSource;
+    struct NuPlayerStreamListener;
+    struct Renderer;
+    struct Source;
+    struct StreamingSource;
+
+    enum {
+        kWhatSetDataSource,
+        kWhatSetVideoSurface,
+        kWhatSetAudioSink,
+        kWhatMoreDataQueued,
+        kWhatStart,
+        kWhatScanSources,
+        kWhatVideoNotify,
+        kWhatAudioNotify,
+        kWhatRendererNotify,
+        kWhatReset,
+        kWhatSeek,
+    };
+
+    wp<NuPlayerDriver> mDriver;
+    sp<Source> mSource;
+    sp<Surface> mSurface;
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+    sp<Decoder> mVideoDecoder;
+    sp<Decoder> mAudioDecoder;
+    sp<Renderer> mRenderer;
+
+    bool mAudioEOS;
+    bool mVideoEOS;
+
+    bool mScanSourcesPending;
+    int32_t mScanSourcesGeneration;
+
+    enum FlushStatus {
+        NONE,
+        AWAITING_DISCONTINUITY,
+        FLUSHING_DECODER,
+        FLUSHING_DECODER_SHUTDOWN,
+        SHUTTING_DOWN_DECODER,
+        FLUSHED,
+        SHUT_DOWN,
+    };
+
+    FlushStatus mFlushingAudio;
+    FlushStatus mFlushingVideo;
+    bool mResetInProgress;
+    bool mResetPostponed;
+
+    status_t instantiateDecoder(bool audio, sp<Decoder> *decoder);
+
+    status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
+    void renderBuffer(bool audio, const sp<AMessage> &msg);
+
+    void notifyListener(int msg, int ext1, int ext2);
+
+    void finishFlushIfPossible();
+
+    void flushDecoder(bool audio, bool needShutdown);
+
+    static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL);
+
+    void finishReset();
+    void postScanSources();
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayer);
+};
+
+}  // namespace android
+
+#endif  // NU_PLAYER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
new file mode 100644
index 0000000..761dfa4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -0,0 +1,296 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoder"
+#include <utils/Log.h>
+
+#include "NuPlayerDecoder.h"
+
+#include "DecoderWrapper.h"
+#include "ESDS.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <surfaceflinger/Surface.h>
+
+namespace android {
+
+NuPlayer::Decoder::Decoder(
+        const sp<AMessage> &notify, const sp<Surface> &surface)
+    : mNotify(notify),
+      mSurface(surface) {
+}
+
+NuPlayer::Decoder::~Decoder() {
+}
+
+void NuPlayer::Decoder::configure(const sp<MetaData> &meta) {
+    CHECK(mCodec == NULL);
+    CHECK(mWrapper == NULL);
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    sp<AMessage> notifyMsg =
+        new AMessage(kWhatCodecNotify, id());
+
+    sp<AMessage> format = makeFormat(meta);
+
+    if (mSurface != NULL) {
+        format->setObject("surface", mSurface);
+    }
+
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+        mWrapper = new DecoderWrapper;
+        looper()->registerHandler(mWrapper);
+
+        mWrapper->setNotificationMessage(notifyMsg);
+        mWrapper->initiateSetup(format);
+    } else {
+        mCodec = new ACodec;
+        looper()->registerHandler(mCodec);
+
+        mCodec->setNotificationMessage(notifyMsg);
+        mCodec->initiateSetup(format);
+    }
+}
+
+void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatCodecNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == ACodec::kWhatFillThisBuffer) {
+                onFillThisBuffer(msg);
+            } else {
+                sp<AMessage> notify = mNotify->dup();
+                notify->setMessage("codec-request", msg);
+                notify->post();
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+sp<AMessage> NuPlayer::Decoder::makeFormat(const sp<MetaData> &meta) {
+    CHECK(mCSD.isEmpty());
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    sp<AMessage> msg = new AMessage;
+    msg->setString("mime", mime);
+
+    if (!strncasecmp("video/", mime, 6)) {
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        msg->setInt32("width", width);
+        msg->setInt32("height", height);
+    } else {
+        CHECK(!strncasecmp("audio/", mime, 6));
+
+        int32_t numChannels, sampleRate;
+        CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+        CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+        msg->setInt32("channel-count", numChannels);
+        msg->setInt32("sample-rate", sampleRate);
+    }
+
+    int32_t maxInputSize;
+    if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+        msg->setInt32("max-input-size", maxInputSize);
+    }
+
+    mCSDIndex = 0;
+
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+        // Parse the AVCDecoderConfigurationRecord
+
+        const uint8_t *ptr = (const uint8_t *)data;
+
+        CHECK(size >= 7);
+        CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
+        uint8_t profile = ptr[1];
+        uint8_t level = ptr[3];
+
+        // There is decodable content out there that fails the following
+        // assertion, let's be lenient for now...
+        // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
+
+        size_t lengthSize = 1 + (ptr[4] & 3);
+
+        // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+        // violates it...
+        // CHECK((ptr[5] >> 5) == 7);  // reserved
+
+        size_t numSeqParameterSets = ptr[5] & 31;
+
+        ptr += 6;
+        size -= 6;
+
+        sp<ABuffer> buffer = new ABuffer(1024);
+        buffer->setRange(0, 0);
+
+        for (size_t i = 0; i < numSeqParameterSets; ++i) {
+            CHECK(size >= 2);
+            size_t length = U16_AT(ptr);
+
+            ptr += 2;
+            size -= 2;
+
+            CHECK(size >= length);
+
+            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+            memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+            buffer->setRange(0, buffer->size() + 4 + length);
+
+            ptr += length;
+            size -= length;
+        }
+
+        buffer->meta()->setInt32("csd", true);
+        mCSD.push(buffer);
+
+        buffer = new ABuffer(1024);
+        buffer->setRange(0, 0);
+
+        CHECK(size >= 1);
+        size_t numPictureParameterSets = *ptr;
+        ++ptr;
+        --size;
+
+        for (size_t i = 0; i < numPictureParameterSets; ++i) {
+            CHECK(size >= 2);
+            size_t length = U16_AT(ptr);
+
+            ptr += 2;
+            size -= 2;
+
+            CHECK(size >= length);
+
+            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+            memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+            buffer->setRange(0, buffer->size() + 4 + length);
+
+            ptr += length;
+            size -= length;
+        }
+
+        buffer->meta()->setInt32("csd", true);
+        mCSD.push(buffer);
+
+        msg->setObject("csd", buffer);
+    } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+#if 0
+        ESDS esds((const char *)data, size);
+        CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+        const void *codec_specific_data;
+        size_t codec_specific_data_size;
+        esds.getCodecSpecificInfo(
+                &codec_specific_data, &codec_specific_data_size);
+
+        sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+        memcpy(buffer->data(), codec_specific_data,
+               codec_specific_data_size);
+
+        buffer->meta()->setInt32("csd", true);
+        mCSD.push(buffer);
+#else
+        sp<ABuffer> buffer = new ABuffer(size);
+        memcpy(buffer->data(), data, size);
+
+        msg->setObject("esds", buffer);
+#endif
+    }
+
+    return msg;
+}
+
+void NuPlayer::Decoder::onFillThisBuffer(const sp<AMessage> &msg) {
+    sp<AMessage> reply;
+    CHECK(msg->findMessage("reply", &reply));
+
+#if 0
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+    sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get());
+#else
+    sp<ABuffer> outBuffer;
+#endif
+
+    if (mCSDIndex < mCSD.size()) {
+        outBuffer = mCSD.editItemAt(mCSDIndex++);
+        outBuffer->meta()->setInt64("timeUs", 0);
+
+        reply->setObject("buffer", outBuffer);
+        reply->post();
+        return;
+    }
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setMessage("codec-request", msg);
+    notify->post();
+}
+
+void NuPlayer::Decoder::signalFlush() {
+    if (mCodec != NULL) {
+        mCodec->signalFlush();
+    } else {
+        CHECK(mWrapper != NULL);
+        mWrapper->signalFlush();
+    }
+}
+
+void NuPlayer::Decoder::signalResume() {
+    if (mCodec != NULL) {
+        mCodec->signalResume();
+    } else {
+        CHECK(mWrapper != NULL);
+        mWrapper->signalResume();
+    }
+}
+
+void NuPlayer::Decoder::initiateShutdown() {
+    if (mCodec != NULL) {
+        mCodec->initiateShutdown();
+    } else {
+        CHECK(mWrapper != NULL);
+        mWrapper->initiateShutdown();
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
new file mode 100644
index 0000000..3874cfe
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_H_
+
+#define NUPLAYER_DECODER_H_
+
+#include "NuPlayer.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct DecoderWrapper;
+
+struct NuPlayer::Decoder : public AHandler {
+    Decoder(const sp<AMessage> &notify, const sp<Surface> &surface = NULL);
+
+    void configure(const sp<MetaData> &meta);
+
+    void signalFlush();
+    void signalResume();
+    void initiateShutdown();
+
+protected:
+    virtual ~Decoder();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatCodecNotify,
+    };
+
+    sp<AMessage> mNotify;
+    sp<Surface> mSurface;
+
+    sp<ACodec> mCodec;
+    sp<DecoderWrapper> mWrapper;
+
+    Vector<sp<ABuffer> > mCSD;
+    size_t mCSDIndex;
+
+    sp<AMessage> makeFormat(const sp<MetaData> &meta);
+
+    void onFillThisBuffer(const sp<AMessage> &msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Decoder);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_DECODER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
new file mode 100644
index 0000000..ac19a2f
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -0,0 +1,268 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDriver"
+#include <utils/Log.h>
+
+#include "NuPlayerDriver.h"
+
+#include "NuPlayer.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+
+namespace android {
+
+NuPlayerDriver::NuPlayerDriver()
+    : mResetInProgress(false),
+      mDurationUs(-1),
+      mPositionUs(-1),
+      mLooper(new ALooper),
+      mState(UNINITIALIZED),
+      mStartupSeekTimeUs(-1) {
+    mLooper->setName("NuPlayerDriver Looper");
+
+    mLooper->start(
+            false, /* runOnCallingThread */
+            true,  /* canCallJava */
+            PRIORITY_AUDIO);
+
+    mPlayer = new NuPlayer;
+    mLooper->registerHandler(mPlayer);
+
+    mPlayer->setDriver(this);
+}
+
+NuPlayerDriver::~NuPlayerDriver() {
+    mLooper->stop();
+}
+
+status_t NuPlayerDriver::initCheck() {
+    return OK;
+}
+
+status_t NuPlayerDriver::setDataSource(
+        const char *url, const KeyedVector<String8, String8> *headers) {
+    CHECK_EQ((int)mState, (int)UNINITIALIZED);
+
+    mPlayer->setDataSource(url, headers);
+
+    mState = STOPPED;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
+    return INVALID_OPERATION;
+}
+
+status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
+    CHECK_EQ((int)mState, (int)UNINITIALIZED);
+
+    mPlayer->setDataSource(source);
+
+    mState = STOPPED;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setVideoSurface(const sp<Surface> &surface) {
+    mPlayer->setVideoSurface(surface);
+
+    return OK;
+}
+
+status_t NuPlayerDriver::prepare() {
+    return OK;
+}
+
+status_t NuPlayerDriver::prepareAsync() {
+    sendEvent(MEDIA_PREPARED);
+
+    return OK;
+}
+
+status_t NuPlayerDriver::start() {
+    switch (mState) {
+        case UNINITIALIZED:
+            return INVALID_OPERATION;
+        case STOPPED:
+        {
+            mPlayer->start();
+
+            if (mStartupSeekTimeUs >= 0) {
+                mPlayer->seekToAsync(mStartupSeekTimeUs);
+                mStartupSeekTimeUs = -1;
+            }
+            break;
+        }
+        case PLAYING:
+            return OK;
+        default:
+        {
+            CHECK_EQ((int)mState, (int)PAUSED);
+
+            mPlayer->resume();
+            break;
+        }
+    }
+
+    mState = PLAYING;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::stop() {
+    return pause();
+}
+
+status_t NuPlayerDriver::pause() {
+    switch (mState) {
+        case UNINITIALIZED:
+            return INVALID_OPERATION;
+        case STOPPED:
+            return OK;
+        case PLAYING:
+            mPlayer->pause();
+            break;
+        default:
+        {
+            CHECK_EQ((int)mState, (int)PAUSED);
+            return OK;
+        }
+    }
+
+    mState = PAUSED;
+
+    return OK;
+}
+
+bool NuPlayerDriver::isPlaying() {
+    return mState == PLAYING;
+}
+
+status_t NuPlayerDriver::seekTo(int msec) {
+    int64_t seekTimeUs = msec * 1000ll;
+
+    switch (mState) {
+        case UNINITIALIZED:
+            return INVALID_OPERATION;
+        case STOPPED:
+        {
+            mStartupSeekTimeUs = seekTimeUs;
+            break;
+        }
+        case PLAYING:
+        case PAUSED:
+        {
+            mPlayer->seekToAsync(seekTimeUs);
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+
+    return OK;
+}
+
+status_t NuPlayerDriver::getCurrentPosition(int *msec) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mPositionUs < 0) {
+        *msec = 0;
+    } else {
+        *msec = (mPositionUs + 500ll) / 1000;
+    }
+
+    return OK;
+}
+
+status_t NuPlayerDriver::getDuration(int *msec) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mDurationUs < 0) {
+        *msec = 0;
+    } else {
+        *msec = (mDurationUs + 500ll) / 1000;
+    }
+
+    return OK;
+}
+
+status_t NuPlayerDriver::reset() {
+    Mutex::Autolock autoLock(mLock);
+    mResetInProgress = true;
+
+    mPlayer->resetAsync();
+
+    while (mResetInProgress) {
+        mCondition.wait(mLock);
+    }
+
+    mDurationUs = -1;
+    mPositionUs = -1;
+    mState = UNINITIALIZED;
+    mStartupSeekTimeUs = -1;
+
+    return OK;
+}
+
+status_t NuPlayerDriver::setLooping(int loop) {
+    return INVALID_OPERATION;
+}
+
+player_type NuPlayerDriver::playerType() {
+    return NU_PLAYER;
+}
+
+status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
+    return INVALID_OPERATION;
+}
+
+void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
+    mPlayer->setAudioSink(audioSink);
+}
+
+status_t NuPlayerDriver::getMetadata(
+        const media::Metadata::Filter& ids, Parcel *records) {
+    return INVALID_OPERATION;
+}
+
+void NuPlayerDriver::notifyResetComplete() {
+    Mutex::Autolock autoLock(mLock);
+    CHECK(mResetInProgress);
+    mResetInProgress = false;
+    mCondition.broadcast();
+}
+
+void NuPlayerDriver::notifyDuration(int64_t durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    mDurationUs = durationUs;
+}
+
+void NuPlayerDriver::notifyPosition(int64_t positionUs) {
+    Mutex::Autolock autoLock(mLock);
+    mPositionUs = positionUs;
+}
+
+void NuPlayerDriver::notifySeekComplete() {
+    sendEvent(MEDIA_SEEK_COMPLETE);
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
new file mode 100644
index 0000000..e3a5de4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/MediaPlayerInterface.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct ALooper;
+struct NuPlayer;
+
+struct NuPlayerDriver : public MediaPlayerInterface {
+    NuPlayerDriver();
+
+    virtual status_t initCheck();
+
+    virtual status_t setDataSource(
+            const char *url, const KeyedVector<String8, String8> *headers);
+
+    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    virtual status_t setDataSource(const sp<IStreamSource> &source);
+
+    virtual status_t setVideoSurface(const sp<Surface> &surface);
+    virtual status_t prepare();
+    virtual status_t prepareAsync();
+    virtual status_t start();
+    virtual status_t stop();
+    virtual status_t pause();
+    virtual bool isPlaying();
+    virtual status_t seekTo(int msec);
+    virtual status_t getCurrentPosition(int *msec);
+    virtual status_t getDuration(int *msec);
+    virtual status_t reset();
+    virtual status_t setLooping(int loop);
+    virtual player_type playerType();
+    virtual status_t invoke(const Parcel &request, Parcel *reply);
+    virtual void setAudioSink(const sp<AudioSink> &audioSink);
+
+    virtual status_t getMetadata(
+            const media::Metadata::Filter& ids, Parcel *records);
+
+    void notifyResetComplete();
+    void notifyDuration(int64_t durationUs);
+    void notifyPosition(int64_t positionUs);
+    void notifySeekComplete();
+
+protected:
+    virtual ~NuPlayerDriver();
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    // The following are protected through "mLock"
+    // >>>
+    bool mResetInProgress;
+    int64_t mDurationUs;
+    int64_t mPositionUs;
+    // <<<
+
+    sp<ALooper> mLooper;
+    sp<NuPlayer> mPlayer;
+
+    enum State {
+        UNINITIALIZED,
+        STOPPED,
+        PLAYING,
+        PAUSED
+    };
+
+    State mState;
+
+    int64_t mStartupSeekTimeUs;
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
+};
+
+}  // namespace android
+
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
new file mode 100644
index 0000000..5833697
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -0,0 +1,532 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerRenderer"
+#include <utils/Log.h>
+
+#include "NuPlayerRenderer.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+NuPlayer::Renderer::Renderer(
+        const sp<MediaPlayerBase::AudioSink> &sink,
+        const sp<AMessage> &notify)
+    : mAudioSink(sink),
+      mNotify(notify),
+      mNumFramesWritten(0),
+      mDrainAudioQueuePending(false),
+      mDrainVideoQueuePending(false),
+      mAudioQueueGeneration(0),
+      mVideoQueueGeneration(0),
+      mAnchorTimeMediaUs(-1),
+      mAnchorTimeRealUs(-1),
+      mFlushingAudio(false),
+      mFlushingVideo(false),
+      mHasAudio(mAudioSink != NULL),
+      mHasVideo(true),
+      mSyncQueues(mHasAudio && mHasVideo) {
+}
+
+NuPlayer::Renderer::~Renderer() {
+}
+
+void NuPlayer::Renderer::queueBuffer(
+        bool audio,
+        const sp<ABuffer> &buffer,
+        const sp<AMessage> &notifyConsumed) {
+    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setObject("buffer", buffer);
+    msg->setMessage("notifyConsumed", notifyConsumed);
+    msg->post();
+}
+
+void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+void NuPlayer::Renderer::flush(bool audio) {
+    {
+        Mutex::Autolock autoLock(mFlushLock);
+        if (audio) {
+            CHECK(!mFlushingAudio);
+            mFlushingAudio = true;
+        } else {
+            CHECK(!mFlushingVideo);
+            mFlushingVideo = true;
+        }
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatFlush, id());
+    msg->setInt32("audio", static_cast<int32_t>(audio));
+    msg->post();
+}
+
+void NuPlayer::Renderer::signalTimeDiscontinuity() {
+    CHECK(mAudioQueue.empty());
+    CHECK(mVideoQueue.empty());
+    mAnchorTimeMediaUs = -1;
+    mAnchorTimeRealUs = -1;
+    mSyncQueues = mHasAudio && mHasVideo;
+}
+
+void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatDrainAudioQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mAudioQueueGeneration) {
+                break;
+            }
+
+            mDrainAudioQueuePending = false;
+
+            onDrainAudioQueue();
+
+            postDrainAudioQueue();
+            break;
+        }
+
+        case kWhatDrainVideoQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mVideoQueueGeneration) {
+                break;
+            }
+
+            mDrainVideoQueuePending = false;
+
+            onDrainVideoQueue();
+
+            postDrainVideoQueue();
+            break;
+        }
+
+        case kWhatQueueBuffer:
+        {
+            onQueueBuffer(msg);
+            break;
+        }
+
+        case kWhatQueueEOS:
+        {
+            onQueueEOS(msg);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            onFlush(msg);
+            break;
+        }
+
+        case kWhatAudioSinkChanged:
+        {
+            onAudioSinkChanged();
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+void NuPlayer::Renderer::postDrainAudioQueue() {
+    if (mDrainAudioQueuePending || mSyncQueues) {
+        return;
+    }
+
+    if (mAudioQueue.empty()) {
+        return;
+    }
+
+    mDrainAudioQueuePending = true;
+    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
+    msg->setInt32("generation", mAudioQueueGeneration);
+    msg->post(10000);
+}
+
+void NuPlayer::Renderer::signalAudioSinkChanged() {
+    (new AMessage(kWhatAudioSinkChanged, id()))->post();
+}
+
+void NuPlayer::Renderer::onDrainAudioQueue() {
+    uint32_t numFramesPlayed;
+    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+    ssize_t numFramesAvailableToWrite =
+        mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
+
+    CHECK_GE(numFramesAvailableToWrite, 0);
+
+    size_t numBytesAvailableToWrite =
+        numFramesAvailableToWrite * mAudioSink->frameSize();
+
+    while (numBytesAvailableToWrite > 0) {
+        if (mAudioQueue.empty()) {
+            break;
+        }
+
+        QueueEntry *entry = &*mAudioQueue.begin();
+
+        if (entry->mBuffer == NULL) {
+            // EOS
+
+            notifyEOS(true /* audio */);
+
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+            return;
+        }
+
+        if (entry->mOffset == 0) {
+            int64_t mediaTimeUs;
+            CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+            LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+
+            mAnchorTimeMediaUs = mediaTimeUs;
+
+            uint32_t numFramesPlayed;
+            CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+
+            uint32_t numFramesPendingPlayout =
+                mNumFramesWritten - numFramesPlayed;
+
+            int64_t realTimeOffsetUs =
+                (mAudioSink->latency() / 2  /* XXX */
+                    + numFramesPendingPlayout
+                        * mAudioSink->msecsPerFrame()) * 1000ll;
+
+            // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
+
+            mAnchorTimeRealUs =
+                ALooper::GetNowUs() + realTimeOffsetUs;
+        }
+
+        size_t copy = entry->mBuffer->size() - entry->mOffset;
+        if (copy > numBytesAvailableToWrite) {
+            copy = numBytesAvailableToWrite;
+        }
+
+        CHECK_EQ(mAudioSink->write(
+                    entry->mBuffer->data() + entry->mOffset, copy),
+                 (ssize_t)copy);
+
+        entry->mOffset += copy;
+        if (entry->mOffset == entry->mBuffer->size()) {
+            entry->mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            entry = NULL;
+        }
+
+        numBytesAvailableToWrite -= copy;
+        mNumFramesWritten += copy / mAudioSink->frameSize();
+    }
+
+    notifyPosition();
+}
+
+void NuPlayer::Renderer::postDrainVideoQueue() {
+    if (mDrainVideoQueuePending || mSyncQueues) {
+        return;
+    }
+
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry &entry = *mVideoQueue.begin();
+
+    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
+    msg->setInt32("generation", mVideoQueueGeneration);
+
+    int64_t delayUs;
+
+    if (entry.mBuffer == NULL) {
+        // EOS doesn't carry a timestamp.
+        delayUs = 0;
+    } else {
+        int64_t mediaTimeUs;
+        CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+        if (mAnchorTimeMediaUs < 0) {
+            delayUs = 0;
+
+            if (!mHasAudio) {
+                mAnchorTimeMediaUs = mediaTimeUs;
+                mAnchorTimeRealUs = ALooper::GetNowUs();
+            }
+        } else {
+            int64_t realTimeUs =
+                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
+
+            delayUs = realTimeUs - ALooper::GetNowUs();
+        }
+    }
+
+    msg->post(delayUs);
+
+    mDrainVideoQueuePending = true;
+}
+
+void NuPlayer::Renderer::onDrainVideoQueue() {
+    if (mVideoQueue.empty()) {
+        return;
+    }
+
+    QueueEntry *entry = &*mVideoQueue.begin();
+
+    if (entry->mBuffer == NULL) {
+        // EOS
+
+        notifyEOS(false /* audio */);
+
+        mVideoQueue.erase(mVideoQueue.begin());
+        entry = NULL;
+        return;
+    }
+
+#if 0
+    int64_t mediaTimeUs;
+    CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
+    LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6);
+#endif
+
+    entry->mNotifyConsumed->setInt32("render", true);
+    entry->mNotifyConsumed->post();
+    mVideoQueue.erase(mVideoQueue.begin());
+    entry = NULL;
+
+    notifyPosition();
+}
+
+void NuPlayer::Renderer::notifyEOS(bool audio) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatEOS);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->post();
+}
+
+void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferWhileFlushing(audio, msg)) {
+        return;
+    }
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("buffer", &obj));
+    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+    sp<AMessage> notifyConsumed;
+    CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
+
+    QueueEntry entry;
+    entry.mBuffer = buffer;
+    entry.mNotifyConsumed = notifyConsumed;
+    entry.mOffset = 0;
+    entry.mFinalResult = OK;
+
+    if (audio) {
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue();
+    } else {
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+
+    if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) {
+        int64_t firstAudioTimeUs;
+        int64_t firstVideoTimeUs;
+        CHECK((*mAudioQueue.begin()).mBuffer->meta()
+                ->findInt64("timeUs", &firstAudioTimeUs));
+        CHECK((*mVideoQueue.begin()).mBuffer->meta()
+                ->findInt64("timeUs", &firstVideoTimeUs));
+
+        int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
+
+        LOGV("queueDiff = %.2f secs", diff / 1E6);
+
+        if (diff > 100000ll) {
+            // Audio data starts More than 0.1 secs before video.
+            // Drop some audio.
+
+            (*mAudioQueue.begin()).mNotifyConsumed->post();
+            mAudioQueue.erase(mAudioQueue.begin());
+            return;
+        }
+
+        syncQueuesDone();
+    }
+}
+
+void NuPlayer::Renderer::syncQueuesDone() {
+    if (!mSyncQueues) {
+        return;
+    }
+
+    mSyncQueues = false;
+
+    if (!mAudioQueue.empty()) {
+        postDrainAudioQueue();
+    }
+
+    if (!mVideoQueue.empty()) {
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    if (dropBufferWhileFlushing(audio, msg)) {
+        return;
+    }
+
+    int32_t finalResult;
+    CHECK(msg->findInt32("finalResult", &finalResult));
+
+    QueueEntry entry;
+    entry.mOffset = 0;
+    entry.mFinalResult = finalResult;
+
+    if (audio) {
+        mAudioQueue.push_back(entry);
+        postDrainAudioQueue();
+    } else {
+        mVideoQueue.push_back(entry);
+        postDrainVideoQueue();
+    }
+}
+
+void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
+    int32_t audio;
+    CHECK(msg->findInt32("audio", &audio));
+
+    // If we're currently syncing the queues, i.e. dropping audio while
+    // aligning the first audio/video buffer times and only one of the
+    // two queues has data, we may starve that queue by not requesting
+    // more buffers from the decoder. If the other source then encounters
+    // a discontinuity that leads to flushing, we'll never find the
+    // corresponding discontinuity on the other queue.
+    // Therefore we'll stop syncing the queues if at least one of them
+    // is flushed.
+    syncQueuesDone();
+
+    if (audio) {
+        flushQueue(&mAudioQueue);
+
+        Mutex::Autolock autoLock(mFlushLock);
+        mFlushingAudio = false;
+
+        mDrainAudioQueuePending = false;
+        ++mAudioQueueGeneration;
+    } else {
+        flushQueue(&mVideoQueue);
+
+        Mutex::Autolock autoLock(mFlushLock);
+        mFlushingVideo = false;
+
+        mDrainVideoQueuePending = false;
+        ++mVideoQueueGeneration;
+    }
+
+    notifyFlushComplete(audio);
+}
+
+void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
+    while (!queue->empty()) {
+        QueueEntry *entry = &*queue->begin();
+
+        if (entry->mBuffer != NULL) {
+            entry->mNotifyConsumed->post();
+        }
+
+        queue->erase(queue->begin());
+        entry = NULL;
+    }
+}
+
+void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatFlushComplete);
+    notify->setInt32("audio", static_cast<int32_t>(audio));
+    notify->post();
+}
+
+bool NuPlayer::Renderer::dropBufferWhileFlushing(
+        bool audio, const sp<AMessage> &msg) {
+    bool flushing = false;
+
+    {
+        Mutex::Autolock autoLock(mFlushLock);
+        if (audio) {
+            flushing = mFlushingAudio;
+        } else {
+            flushing = mFlushingVideo;
+        }
+    }
+
+    if (!flushing) {
+        return false;
+    }
+
+    sp<AMessage> notifyConsumed;
+    if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
+        notifyConsumed->post();
+    }
+
+    return true;
+}
+
+void NuPlayer::Renderer::onAudioSinkChanged() {
+    CHECK(!mDrainAudioQueuePending);
+    mNumFramesWritten = 0;
+}
+
+void NuPlayer::Renderer::notifyPosition() {
+    if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
+        return;
+    }
+
+    int64_t nowUs = ALooper::GetNowUs();
+    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatPosition);
+    notify->setInt64("positionUs", positionUs);
+    notify->post();
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
new file mode 100644
index 0000000..dbf3ecf
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_RENDERER_H_
+
+#define NUPLAYER_RENDERER_H_
+
+#include "NuPlayer.h"
+
+namespace android {
+
+struct ABuffer;
+
+struct NuPlayer::Renderer : public AHandler {
+    Renderer(const sp<MediaPlayerBase::AudioSink> &sink,
+             const sp<AMessage> &notify);
+
+    void queueBuffer(
+            bool audio,
+            const sp<ABuffer> &buffer,
+            const sp<AMessage> &notifyConsumed);
+
+    void queueEOS(bool audio, status_t finalResult);
+
+    void flush(bool audio);
+
+    void signalTimeDiscontinuity();
+
+    void signalAudioSinkChanged();
+
+    enum {
+        kWhatEOS,
+        kWhatFlushComplete,
+        kWhatPosition,
+    };
+
+protected:
+    virtual ~Renderer();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatDrainAudioQueue,
+        kWhatDrainVideoQueue,
+        kWhatQueueBuffer,
+        kWhatQueueEOS,
+        kWhatFlush,
+        kWhatAudioSinkChanged,
+    };
+
+    struct QueueEntry {
+        sp<ABuffer> mBuffer;
+        sp<AMessage> mNotifyConsumed;
+        size_t mOffset;
+        status_t mFinalResult;
+    };
+
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+    sp<AMessage> mNotify;
+    List<QueueEntry> mAudioQueue;
+    List<QueueEntry> mVideoQueue;
+    uint32_t mNumFramesWritten;
+
+    bool mDrainAudioQueuePending;
+    bool mDrainVideoQueuePending;
+    int32_t mAudioQueueGeneration;
+    int32_t mVideoQueueGeneration;
+
+    int64_t mAnchorTimeMediaUs;
+    int64_t mAnchorTimeRealUs;
+
+    Mutex mFlushLock;  // protects the following 2 member vars.
+    bool mFlushingAudio;
+    bool mFlushingVideo;
+
+    bool mHasAudio;
+    bool mHasVideo;
+    bool mSyncQueues;
+
+    void onDrainAudioQueue();
+    void postDrainAudioQueue();
+
+    void onDrainVideoQueue();
+    void postDrainVideoQueue();
+
+    void onQueueBuffer(const sp<AMessage> &msg);
+    void onQueueEOS(const sp<AMessage> &msg);
+    void onFlush(const sp<AMessage> &msg);
+    void onAudioSinkChanged();
+
+    void notifyEOS(bool audio);
+    void notifyFlushComplete(bool audio);
+    void notifyPosition();
+
+    void flushQueue(List<QueueEntry> *queue);
+    bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg);
+    void syncQueuesDone();
+
+    DISALLOW_EVIL_CONSTRUCTORS(Renderer);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_RENDERER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
new file mode 100644
index 0000000..5e55487
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_SOURCE_H_
+
+#define NUPLAYER_SOURCE_H_
+
+#include "NuPlayer.h"
+
+namespace android {
+
+struct ABuffer;
+
+struct NuPlayer::Source : public RefBase {
+    Source() {}
+
+    virtual void start() = 0;
+
+    // Returns true iff more data was available, false on EOS.
+    virtual bool feedMoreTSData() = 0;
+
+    virtual sp<MetaData> getFormat(bool audio) = 0;
+
+    virtual status_t dequeueAccessUnit(
+            bool audio, sp<ABuffer> *accessUnit) = 0;
+
+    virtual status_t getDuration(int64_t *durationUs) {
+        return INVALID_OPERATION;
+    }
+
+    virtual status_t seekTo(int64_t seekTimeUs) {
+        return INVALID_OPERATION;
+    }
+
+    virtual bool isSeekable() {
+        return false;
+    }
+
+protected:
+    virtual ~Source() {}
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(Source);
+};
+
+}  // namespace android
+
+#endif  // NUPLAYER_SOURCE_H_
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
new file mode 100644
index 0000000..a23beb7
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerStreamListener"
+#include <utils/Log.h>
+
+#include "NuPlayerStreamListener.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+NuPlayer::NuPlayerStreamListener::NuPlayerStreamListener(
+        const sp<IStreamSource> &source,
+        ALooper::handler_id id)
+    : mSource(source),
+      mTargetID(id),
+      mEOS(false),
+      mSendDataNotification(true) {
+    mSource->setListener(this);
+
+    mMemoryDealer = new MemoryDealer(kNumBuffers * kBufferSize);
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        sp<IMemory> mem = mMemoryDealer->allocate(kBufferSize);
+        CHECK(mem != NULL);
+
+        mBuffers.push(mem);
+    }
+    mSource->setBuffers(mBuffers);
+}
+
+void NuPlayer::NuPlayerStreamListener::start() {
+    for (size_t i = 0; i < kNumBuffers; ++i) {
+        mSource->onBufferAvailable(i);
+    }
+}
+
+void NuPlayer::NuPlayerStreamListener::queueBuffer(size_t index, size_t size) {
+    QueueEntry entry;
+    entry.mIsCommand = false;
+    entry.mIndex = index;
+    entry.mSize = size;
+    entry.mOffset = 0;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetID != 0) {
+            (new AMessage(kWhatMoreDataQueued, mTargetID))->post();
+        }
+    }
+}
+
+void NuPlayer::NuPlayerStreamListener::issueCommand(
+        Command cmd, bool synchronous, const sp<AMessage> &extra) {
+    CHECK(!synchronous);
+
+    QueueEntry entry;
+    entry.mIsCommand = true;
+    entry.mCommand = cmd;
+    entry.mExtra = extra;
+
+    Mutex::Autolock autoLock(mLock);
+    mQueue.push_back(entry);
+
+    if (mSendDataNotification) {
+        mSendDataNotification = false;
+
+        if (mTargetID != 0) {
+            (new AMessage(kWhatMoreDataQueued, mTargetID))->post();
+        }
+    }
+}
+
+ssize_t NuPlayer::NuPlayerStreamListener::read(void *data, size_t size) {
+    CHECK_GT(size, 0u);
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mEOS) {
+        return 0;
+    }
+
+    if (mQueue.empty()) {
+        mSendDataNotification = true;
+
+        return -EWOULDBLOCK;
+    }
+
+    QueueEntry *entry = &*mQueue.begin();
+
+    if (entry->mIsCommand) {
+        switch (entry->mCommand) {
+            case EOS:
+            {
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                mEOS = true;
+                return 0;
+            }
+
+            case DISCONTINUITY:
+            {
+                mQueue.erase(mQueue.begin());
+                entry = NULL;
+
+                return INFO_DISCONTINUITY;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+    }
+
+    size_t copy = entry->mSize;
+    if (copy > size) {
+        copy = size;
+    }
+
+    memcpy(data,
+           (const uint8_t *)mBuffers.editItemAt(entry->mIndex)->pointer()
+            + entry->mOffset,
+           copy);
+
+    entry->mOffset += copy;
+    entry->mSize -= copy;
+
+    if (entry->mSize == 0) {
+        mSource->onBufferAvailable(entry->mIndex);
+        mQueue.erase(mQueue.begin());
+        entry = NULL;
+    }
+
+    return copy;
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h
new file mode 100644
index 0000000..f88e945
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_STREAM_LISTENER_H_
+
+#define NUPLAYER_STREAM_LISTENER_H_
+
+#include "NuPlayer.h"
+
+#include <media/IStreamSource.h>
+
+namespace android {
+
+struct MemoryDealer;
+
+struct NuPlayer::NuPlayerStreamListener : public BnStreamListener {
+    NuPlayerStreamListener(
+            const sp<IStreamSource> &source,
+            ALooper::handler_id targetID);
+
+    virtual void queueBuffer(size_t index, size_t size);
+
+    virtual void issueCommand(
+            Command cmd, bool synchronous, const sp<AMessage> &extra);
+
+    void start();
+    ssize_t read(void *data, size_t size);
+
+private:
+    enum {
+        kNumBuffers = 16,
+        kBufferSize = 188 * 20
+    };
+
+    struct QueueEntry {
+        bool mIsCommand;
+
+        size_t mIndex;
+        size_t mSize;
+        size_t mOffset;
+
+        Command mCommand;
+        sp<AMessage> mExtra;
+    };
+
+    Mutex mLock;
+
+    sp<IStreamSource> mSource;
+    ALooper::handler_id mTargetID;
+    sp<MemoryDealer> mMemoryDealer;
+    Vector<sp<IMemory> > mBuffers;
+    List<QueueEntry> mQueue;
+    bool mEOS;
+    bool mSendDataNotification;
+
+    DISALLOW_EVIL_CONSTRUCTORS(NuPlayerStreamListener);
+};
+
+}  // namespace android
+
+#endif // NUPLAYER_STREAM_LISTENER_H_
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
new file mode 100644
index 0000000..b85ac9f
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "StreamingSource"
+#include <utils/Log.h>
+
+#include "StreamingSource.h"
+
+#include "ATSParser.h"
+#include "AnotherPacketSource.h"
+#include "NuPlayerStreamListener.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+NuPlayer::StreamingSource::StreamingSource(const sp<IStreamSource> &source)
+    : mSource(source),
+      mEOS(false) {
+}
+
+NuPlayer::StreamingSource::~StreamingSource() {
+}
+
+void NuPlayer::StreamingSource::start() {
+    mStreamListener = new NuPlayerStreamListener(mSource, 0);
+    mTSParser = new ATSParser;
+
+    mStreamListener->start();
+}
+
+bool NuPlayer::StreamingSource::feedMoreTSData() {
+    if (mEOS) {
+        return false;
+    }
+
+    for (int32_t i = 0; i < 10; ++i) {
+        char buffer[188];
+        ssize_t n = mStreamListener->read(buffer, sizeof(buffer));
+
+        if (n == 0) {
+            LOGI("input data EOS reached.");
+            mTSParser->signalEOS(ERROR_END_OF_STREAM);
+            mEOS = true;
+            break;
+        } else if (n == INFO_DISCONTINUITY) {
+            mTSParser->signalDiscontinuity(ATSParser::DISCONTINUITY_SEEK);
+        } else if (n < 0) {
+            CHECK_EQ(n, -EWOULDBLOCK);
+            break;
+        } else {
+            if (buffer[0] == 0x00) {
+                // XXX legacy
+                mTSParser->signalDiscontinuity(
+                        buffer[1] == 0x00
+                            ? ATSParser::DISCONTINUITY_SEEK
+                            : ATSParser::DISCONTINUITY_FORMATCHANGE);
+            } else {
+                mTSParser->feedTSPacket(buffer, sizeof(buffer));
+            }
+        }
+    }
+
+    return true;
+}
+
+sp<MetaData> NuPlayer::StreamingSource::getFormat(bool audio) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+status_t NuPlayer::StreamingSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    ATSParser::SourceType type =
+        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
+
+    sp<AnotherPacketSource> source =
+        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
new file mode 100644
index 0000000..7abce84
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STREAMING_SOURCE_H_
+
+#define STREAMING_SOURCE_H_
+
+#include "NuPlayer.h"
+#include "NuPlayerSource.h"
+
+namespace android {
+
+struct ABuffer;
+struct ATSParser;
+
+struct NuPlayer::StreamingSource : public NuPlayer::Source {
+    StreamingSource(const sp<IStreamSource> &source);
+
+    virtual void start();
+
+    // Returns true iff more data was available, false on EOS.
+    virtual bool feedMoreTSData();
+
+    virtual sp<MetaData> getFormat(bool audio);
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+protected:
+    virtual ~StreamingSource();
+
+private:
+    sp<IStreamSource> mSource;
+    bool mEOS;
+    sp<NuPlayerStreamListener> mStreamListener;
+    sp<ATSParser> mTSParser;
+
+    DISALLOW_EVIL_CONSTRUCTORS(StreamingSource);
+};
+
+}  // namespace android
+
+#endif  // STREAMING_SOURCE_H_
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
new file mode 100644
index 0000000..dfb4e00
--- /dev/null
+++ b/media/libstagefright/ACodec.cpp
@@ -0,0 +1,2208 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ACodec"
+
+#include <media/stagefright/ACodec.h>
+
+#include <binder/MemoryDealer.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/OMXClient.h>
+
+#include <surfaceflinger/Surface.h>
+
+#include <OMX_Component.h>
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+    params->nSize = sizeof(T);
+    params->nVersion.s.nVersionMajor = 1;
+    params->nVersion.s.nVersionMinor = 0;
+    params->nVersion.s.nRevision = 0;
+    params->nVersion.s.nStep = 0;
+}
+
+struct CodecObserver : public BnOMXObserver {
+    CodecObserver() {}
+
+    void setNotificationMessage(const sp<AMessage> &msg) {
+        mNotify = msg;
+    }
+
+    // from IOMXObserver
+    virtual void onMessage(const omx_message &omx_msg) {
+        sp<AMessage> msg = mNotify->dup();
+
+        msg->setInt32("type", omx_msg.type);
+        msg->setPointer("node", omx_msg.node);
+
+        switch (omx_msg.type) {
+            case omx_message::EVENT:
+            {
+                msg->setInt32("event", omx_msg.u.event_data.event);
+                msg->setInt32("data1", omx_msg.u.event_data.data1);
+                msg->setInt32("data2", omx_msg.u.event_data.data2);
+                break;
+            }
+
+            case omx_message::EMPTY_BUFFER_DONE:
+            {
+                msg->setPointer("buffer", omx_msg.u.buffer_data.buffer);
+                break;
+            }
+
+            case omx_message::FILL_BUFFER_DONE:
+            {
+                msg->setPointer(
+                        "buffer", omx_msg.u.extended_buffer_data.buffer);
+                msg->setInt32(
+                        "range_offset",
+                        omx_msg.u.extended_buffer_data.range_offset);
+                msg->setInt32(
+                        "range_length",
+                        omx_msg.u.extended_buffer_data.range_length);
+                msg->setInt32(
+                        "flags",
+                        omx_msg.u.extended_buffer_data.flags);
+                msg->setInt64(
+                        "timestamp",
+                        omx_msg.u.extended_buffer_data.timestamp);
+                msg->setPointer(
+                        "platform_private",
+                        omx_msg.u.extended_buffer_data.platform_private);
+                msg->setPointer(
+                        "data_ptr",
+                        omx_msg.u.extended_buffer_data.data_ptr);
+                break;
+            }
+
+            default:
+                TRESPASS();
+                break;
+        }
+
+        msg->post();
+    }
+
+protected:
+    virtual ~CodecObserver() {}
+
+private:
+    sp<AMessage> mNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::BaseState : public AState {
+    BaseState(ACodec *codec, const sp<AState> &parentState = NULL);
+
+protected:
+    enum PortMode {
+        KEEP_BUFFERS,
+        RESUBMIT_BUFFERS,
+        FREE_BUFFERS,
+    };
+
+    ACodec *mCodec;
+
+    virtual PortMode getPortMode(OMX_U32 portIndex);
+
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+    virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+    void postFillThisBuffer(BufferInfo *info);
+
+private:
+    bool onOMXMessage(const sp<AMessage> &msg);
+
+    bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID);
+
+    bool onOMXFillBufferDone(
+            IOMX::buffer_id bufferID,
+            size_t rangeOffset, size_t rangeLength,
+            OMX_U32 flags,
+            int64_t timeUs,
+            void *platformPrivate,
+            void *dataPtr);
+
+    void getMoreInputDataIfPossible();
+
+    DISALLOW_EVIL_CONSTRUCTORS(BaseState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::UninitializedState : public ACodec::BaseState {
+    UninitializedState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    void onSetup(const sp<AMessage> &msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::LoadedToIdleState : public ACodec::BaseState {
+    LoadedToIdleState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+    virtual void stateEntered();
+
+private:
+    status_t allocateBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::IdleToExecutingState : public ACodec::BaseState {
+    IdleToExecutingState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+    virtual void stateEntered();
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ExecutingState : public ACodec::BaseState {
+    ExecutingState(ACodec *codec);
+
+    void submitOutputBuffers();
+
+    // Submit output buffers to the decoder, submit input buffers to client
+    // to fill with data.
+    void resume();
+
+protected:
+    virtual PortMode getPortMode(OMX_U32 portIndex);
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(ExecutingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState {
+    OutputPortSettingsChangedState(ACodec *codec);
+
+protected:
+    virtual PortMode getPortMode(OMX_U32 portIndex);
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ExecutingToIdleState : public ACodec::BaseState {
+    ExecutingToIdleState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+    virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+private:
+    void changeStateIfWeOwnAllBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::IdleToLoadedState : public ACodec::BaseState {
+    IdleToLoadedState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::ErrorState : public ACodec::BaseState {
+    ErrorState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(ErrorState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct ACodec::FlushingState : public ACodec::BaseState {
+    FlushingState(ACodec *codec);
+
+protected:
+    virtual bool onMessageReceived(const sp<AMessage> &msg);
+    virtual void stateEntered();
+
+    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
+
+    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
+    virtual void onInputBufferFilled(const sp<AMessage> &msg);
+
+private:
+    bool mFlushComplete[2];
+
+    void changeStateIfWeOwnAllBuffers();
+
+    DISALLOW_EVIL_CONSTRUCTORS(FlushingState);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ACodec()
+    : mNode(NULL),
+      mSentFormat(false) {
+    mUninitializedState = new UninitializedState(this);
+    mLoadedToIdleState = new LoadedToIdleState(this);
+    mIdleToExecutingState = new IdleToExecutingState(this);
+    mExecutingState = new ExecutingState(this);
+
+    mOutputPortSettingsChangedState =
+        new OutputPortSettingsChangedState(this);
+
+    mExecutingToIdleState = new ExecutingToIdleState(this);
+    mIdleToLoadedState = new IdleToLoadedState(this);
+    mErrorState = new ErrorState(this);
+    mFlushingState = new FlushingState(this);
+
+    mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
+
+    changeState(mUninitializedState);
+}
+
+ACodec::~ACodec() {
+}
+
+void ACodec::setNotificationMessage(const sp<AMessage> &msg) {
+    mNotify = msg;
+}
+
+void ACodec::initiateSetup(const sp<AMessage> &msg) {
+    msg->setWhat(kWhatSetup);
+    msg->setTarget(id());
+    msg->post();
+}
+
+void ACodec::signalFlush() {
+    (new AMessage(kWhatFlush, id()))->post();
+}
+
+void ACodec::signalResume() {
+    (new AMessage(kWhatResume, id()))->post();
+}
+
+void ACodec::initiateShutdown() {
+    (new AMessage(kWhatShutdown, id()))->post();
+}
+
+status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    CHECK(mDealer[portIndex] == NULL);
+    CHECK(mBuffers[portIndex].isEmpty());
+
+    if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+        return allocateOutputBuffersFromNativeWindow();
+    }
+
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = portIndex;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    LOGV("[%s] Allocating %lu buffers of size %lu on %s port",
+            mComponentName.c_str(),
+            def.nBufferCountActual, def.nBufferSize,
+            portIndex == kPortIndexInput ? "input" : "output");
+
+    size_t totalSize = def.nBufferCountActual * def.nBufferSize;
+    mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec");
+
+    for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
+        sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
+        CHECK(mem.get() != NULL);
+
+        IOMX::buffer_id buffer;
+#if 0
+        err = mOMX->allocateBufferWithBackup(mNode, portIndex, mem, &buffer);
+#else
+        err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
+#endif
+
+        if (err != OK) {
+            return err;
+        }
+
+        BufferInfo info;
+        info.mBufferID = buffer;
+        info.mStatus = BufferInfo::OWNED_BY_US;
+        info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
+        mBuffers[portIndex].push(info);
+    }
+
+    return OK;
+}
+
+status_t ACodec::allocateOutputBuffersFromNativeWindow() {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = native_window_set_buffers_geometry(
+            mNativeWindow.get(),
+            def.format.video.nFrameWidth,
+            def.format.video.nFrameHeight,
+            def.format.video.eColorFormat);
+
+    if (err != 0) {
+        LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+                strerror(-err), -err);
+        return err;
+    }
+
+    // Increase the buffer count by one to allow for the ANativeWindow to hold
+    // on to one of the buffers.
+    def.nBufferCountActual++;
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    // Set up the native window.
+    // XXX TODO: Get the gralloc usage flags from the OMX plugin!
+    err = native_window_set_usage(
+            mNativeWindow.get(),
+            GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+
+    if (err != 0) {
+        LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+        return err;
+    }
+
+    err = native_window_set_buffer_count(
+            mNativeWindow.get(), def.nBufferCountActual);
+
+    if (err != 0) {
+        LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+                -err);
+        return err;
+    }
+
+    // XXX TODO: Do something so the ANativeWindow knows that we'll need to get
+    // the same set of buffers.
+
+    LOGV("[%s] Allocating %lu buffers from a native window of size %lu on "
+         "output port",
+         mComponentName.c_str(), def.nBufferCountActual, def.nBufferSize);
+
+    // Dequeue buffers and send them to OMX
+    OMX_U32 i;
+    for (i = 0; i < def.nBufferCountActual; i++) {
+        android_native_buffer_t *buf;
+        err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+        if (err != 0) {
+            LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+            break;
+        }
+
+        sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+        IOMX::buffer_id bufferId;
+        err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+                &bufferId);
+        if (err != 0) {
+            break;
+        }
+
+        LOGV("[%s] Registered graphic buffer with ID %p (pointer = %p)",
+             mComponentName.c_str(),
+             bufferId, graphicBuffer.get());
+
+        BufferInfo info;
+        info.mBufferID = bufferId;
+        info.mStatus = BufferInfo::OWNED_BY_US;
+        info.mData = new ABuffer(0);
+        info.mGraphicBuffer = graphicBuffer;
+        mBuffers[kPortIndexOutput].push(info);
+    }
+
+    OMX_U32 cancelStart;
+    OMX_U32 cancelEnd;
+
+    if (err != 0) {
+        // If an error occurred while dequeuing we need to cancel any buffers
+        // that were dequeued.
+        cancelStart = 0;
+        cancelEnd = i;
+    } else {
+        // Return the last two buffers to the native window.
+        // XXX TODO: The number of buffers the native window owns should
+        // probably be queried from it when we put the native window in
+        // fixed buffer pool mode (which needs to be implemented).
+        // Currently it's hard-coded to 2.
+        cancelStart = def.nBufferCountActual - 2;
+        cancelEnd = def.nBufferCountActual;
+    }
+
+    for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+        BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+        cancelBufferToNativeWindow(info);
+    }
+
+    return err;
+}
+
+status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+    LOGV("[%s] Calling cancelBuffer on buffer %p",
+         mComponentName.c_str(), info->mBufferID);
+
+    int err = mNativeWindow->cancelBuffer(
+        mNativeWindow.get(), info->mGraphicBuffer.get());
+
+    CHECK_EQ(err, 0);
+
+    info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+
+    return OK;
+}
+
+ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
+    android_native_buffer_t *buf;
+    CHECK_EQ(mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf), 0);
+
+    for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
+        BufferInfo *info =
+            &mBuffers[kPortIndexOutput].editItemAt(i);
+
+        if (info->mGraphicBuffer->handle == buf->handle) {
+            CHECK_EQ((int)info->mStatus,
+                     (int)BufferInfo::OWNED_BY_NATIVE_WINDOW);
+
+            info->mStatus = BufferInfo::OWNED_BY_US;
+
+            return info;
+        }
+    }
+
+    TRESPASS();
+
+    return NULL;
+}
+
+status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
+    for (size_t i = mBuffers[portIndex].size(); i-- > 0;) {
+        CHECK_EQ((status_t)OK, freeBuffer(portIndex, i));
+    }
+
+    mDealer[portIndex].clear();
+
+    return OK;
+}
+
+status_t ACodec::freeOutputBuffersOwnedByNativeWindow() {
+    for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {
+        BufferInfo *info =
+            &mBuffers[kPortIndexOutput].editItemAt(i);
+
+        if (info->mStatus ==
+                BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+            CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i));
+        }
+    }
+
+    return OK;
+}
+
+status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
+    BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+    CHECK(info->mStatus == BufferInfo::OWNED_BY_US
+            || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW);
+
+    if (portIndex == kPortIndexOutput && mNativeWindow != NULL
+            && info->mStatus == BufferInfo::OWNED_BY_US) {
+        CHECK_EQ((status_t)OK, cancelBufferToNativeWindow(info));
+    }
+
+    CHECK_EQ(mOMX->freeBuffer(
+                mNode, portIndex, info->mBufferID),
+             (status_t)OK);
+
+    mBuffers[portIndex].removeAt(i);
+
+    return OK;
+}
+
+ACodec::BufferInfo *ACodec::findBufferByID(
+        uint32_t portIndex, IOMX::buffer_id bufferID,
+        ssize_t *index) {
+    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+        if (info->mBufferID == bufferID) {
+            if (index != NULL) {
+                *index = i;
+            }
+            return info;
+        }
+    }
+
+    TRESPASS();
+
+    return NULL;
+}
+
+void ACodec::setComponentRole(
+        bool isEncoder, const char *mime) {
+    struct MimeToRole {
+        const char *mime;
+        const char *decoderRole;
+        const char *encoderRole;
+    };
+
+    static const MimeToRole kMimeToRole[] = {
+        { MEDIA_MIMETYPE_AUDIO_MPEG,
+            "audio_decoder.mp3", "audio_encoder.mp3" },
+        { MEDIA_MIMETYPE_AUDIO_AMR_NB,
+            "audio_decoder.amrnb", "audio_encoder.amrnb" },
+        { MEDIA_MIMETYPE_AUDIO_AMR_WB,
+            "audio_decoder.amrwb", "audio_encoder.amrwb" },
+        { MEDIA_MIMETYPE_AUDIO_AAC,
+            "audio_decoder.aac", "audio_encoder.aac" },
+        { MEDIA_MIMETYPE_VIDEO_AVC,
+            "video_decoder.avc", "video_encoder.avc" },
+        { MEDIA_MIMETYPE_VIDEO_MPEG4,
+            "video_decoder.mpeg4", "video_encoder.mpeg4" },
+        { MEDIA_MIMETYPE_VIDEO_H263,
+            "video_decoder.h263", "video_encoder.h263" },
+    };
+
+    static const size_t kNumMimeToRole =
+        sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
+
+    size_t i;
+    for (i = 0; i < kNumMimeToRole; ++i) {
+        if (!strcasecmp(mime, kMimeToRole[i].mime)) {
+            break;
+        }
+    }
+
+    if (i == kNumMimeToRole) {
+        return;
+    }
+
+    const char *role =
+        isEncoder ? kMimeToRole[i].encoderRole
+                  : kMimeToRole[i].decoderRole;
+
+    if (role != NULL) {
+        OMX_PARAM_COMPONENTROLETYPE roleParams;
+        InitOMXParams(&roleParams);
+
+        strncpy((char *)roleParams.cRole,
+                role, OMX_MAX_STRINGNAME_SIZE - 1);
+
+        roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+        status_t err = mOMX->setParameter(
+                mNode, OMX_IndexParamStandardComponentRole,
+                &roleParams, sizeof(roleParams));
+
+        if (err != OK) {
+            LOGW("[%s] Failed to set standard component role '%s'.",
+                 mComponentName.c_str(), role);
+        }
+    }
+}
+
+void ACodec::configureCodec(
+        const char *mime, const sp<AMessage> &msg) {
+    setComponentRole(false /* isEncoder */, mime);
+
+    if (!strncasecmp(mime, "video/", 6)) {
+        int32_t width, height;
+        CHECK(msg->findInt32("width", &width));
+        CHECK(msg->findInt32("height", &height));
+
+        CHECK_EQ(setupVideoDecoder(mime, width, height),
+                 (status_t)OK);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+        int32_t numChannels, sampleRate;
+        CHECK(msg->findInt32("channel-count", &numChannels));
+        CHECK(msg->findInt32("sample-rate", &sampleRate));
+
+        CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+    } else {
+        TRESPASS();
+    }
+
+    int32_t maxInputSize;
+    if (msg->findInt32("max-input-size", &maxInputSize)) {
+        CHECK_EQ(setMinBufferSize(kPortIndexInput, (size_t)maxInputSize),
+                 (status_t)OK);
+    } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
+        CHECK_EQ(setMinBufferSize(kPortIndexInput, 8192),  // XXX
+                 (status_t)OK);
+    }
+}
+
+status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = portIndex;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (def.nBufferSize >= size) {
+        return OK;
+    }
+
+    def.nBufferSize = size;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    CHECK(def.nBufferSize >= size);
+
+    return OK;
+}
+
+status_t ACodec::setupAACDecoder(int32_t numChannels, int32_t sampleRate) {
+    OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+    InitOMXParams(&profile);
+    profile.nPortIndex = kPortIndexInput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+    if (err != OK) {
+        return err;
+    }
+
+    profile.nChannels = numChannels;
+    profile.nSampleRate = sampleRate;
+    profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+    return err;
+}
+
+status_t ACodec::setVideoPortFormatType(
+        OMX_U32 portIndex,
+        OMX_VIDEO_CODINGTYPE compressionFormat,
+        OMX_COLOR_FORMATTYPE colorFormat) {
+    OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+    InitOMXParams(&format);
+    format.nPortIndex = portIndex;
+    format.nIndex = 0;
+    bool found = false;
+
+    OMX_U32 index = 0;
+    for (;;) {
+        format.nIndex = index;
+        status_t err = mOMX->getParameter(
+                mNode, OMX_IndexParamVideoPortFormat,
+                &format, sizeof(format));
+
+        if (err != OK) {
+            return err;
+        }
+
+        // The following assertion is violated by TI's video decoder.
+        // CHECK_EQ(format.nIndex, index);
+
+        if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) {
+            if (portIndex == kPortIndexInput
+                    && colorFormat == format.eColorFormat) {
+                // eCompressionFormat does not seem right.
+                found = true;
+                break;
+            }
+            if (portIndex == kPortIndexOutput
+                    && compressionFormat == format.eCompressionFormat) {
+                // eColorFormat does not seem right.
+                found = true;
+                break;
+            }
+        }
+
+        if (format.eCompressionFormat == compressionFormat
+            && format.eColorFormat == colorFormat) {
+            found = true;
+            break;
+        }
+
+        ++index;
+    }
+
+    if (!found) {
+        return UNKNOWN_ERROR;
+    }
+
+    status_t err = mOMX->setParameter(
+            mNode, OMX_IndexParamVideoPortFormat,
+            &format, sizeof(format));
+
+    return err;
+}
+
+status_t ACodec::setSupportedOutputFormat() {
+    OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+    InitOMXParams(&format);
+    format.nPortIndex = kPortIndexOutput;
+    format.nIndex = 0;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoPortFormat,
+            &format, sizeof(format));
+    CHECK_EQ(err, (status_t)OK);
+    CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
+
+    static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
+
+    CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar
+           || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
+           || format.eColorFormat == OMX_COLOR_FormatCbYCrY
+           || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar);
+
+    return mOMX->setParameter(
+            mNode, OMX_IndexParamVideoPortFormat,
+            &format, sizeof(format));
+}
+
+status_t ACodec::setupVideoDecoder(
+        const char *mime, int32_t width, int32_t height) {
+    OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
+    if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
+        compressionFormat = OMX_VIDEO_CodingAVC;
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
+        compressionFormat = OMX_VIDEO_CodingMPEG4;
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
+        compressionFormat = OMX_VIDEO_CodingH263;
+    } else {
+        TRESPASS();
+    }
+
+    status_t err = setVideoPortFormatType(
+            kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setSupportedOutputFormat();
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setVideoFormatOnPort(
+            kPortIndexInput, width, height, compressionFormat);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setVideoFormatOnPort(
+            kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return OK;
+}
+
+status_t ACodec::setVideoFormatOnPort(
+        OMX_U32 portIndex,
+        int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) {
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = portIndex;
+
+    OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    CHECK_EQ(err, (status_t)OK);
+
+    if (portIndex == kPortIndexInput) {
+        // XXX Need a (much) better heuristic to compute input buffer sizes.
+        const size_t X = 64 * 1024;
+        if (def.nBufferSize < X) {
+            def.nBufferSize = X;
+        }
+    }
+
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
+
+    video_def->nFrameWidth = width;
+    video_def->nFrameHeight = height;
+
+    if (portIndex == kPortIndexInput) {
+        video_def->eCompressionFormat = compressionFormat;
+        video_def->eColorFormat = OMX_COLOR_FormatUnused;
+    }
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    return err;
+}
+
+status_t ACodec::initNativeWindow() {
+    if (mNativeWindow != NULL) {
+        return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+    }
+
+    mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+    return OK;
+}
+
+bool ACodec::allYourBuffersAreBelongToUs(
+        OMX_U32 portIndex) {
+    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+        if (info->mStatus != BufferInfo::OWNED_BY_US
+                && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+            LOGV("[%s] Buffer %p on port %ld still has status %d",
+                    mComponentName.c_str(),
+                    info->mBufferID, portIndex, info->mStatus);
+            return false;
+        }
+    }
+
+    return true;
+}
+
+bool ACodec::allYourBuffersAreBelongToUs() {
+    return allYourBuffersAreBelongToUs(kPortIndexInput)
+        && allYourBuffersAreBelongToUs(kPortIndexOutput);
+}
+
+void ACodec::deferMessage(const sp<AMessage> &msg) {
+    bool wasEmptyBefore = mDeferredQueue.empty();
+    mDeferredQueue.push_back(msg);
+}
+
+void ACodec::processDeferredMessages() {
+    List<sp<AMessage> > queue = mDeferredQueue;
+    mDeferredQueue.clear();
+
+    List<sp<AMessage> >::iterator it = queue.begin();
+    while (it != queue.end()) {
+        onMessageReceived(*it++);
+    }
+}
+
+void ACodec::sendFormatChange() {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatOutputFormatChanged);
+
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexOutput;
+
+    CHECK_EQ(mOMX->getParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)),
+             (status_t)OK);
+
+    CHECK_EQ((int)def.eDir, (int)OMX_DirOutput);
+
+    switch (def.eDomain) {
+        case OMX_PortDomainVideo:
+        {
+            OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
+
+            notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+            notify->setInt32("width", videoDef->nFrameWidth);
+            notify->setInt32("height", videoDef->nFrameHeight);
+
+            OMX_CONFIG_RECTTYPE rect;
+            InitOMXParams(&rect);
+            rect.nPortIndex = kPortIndexOutput;
+
+            if (mOMX->getConfig(
+                        mNode, OMX_IndexConfigCommonOutputCrop,
+                        &rect, sizeof(rect)) != OK) {
+                rect.nLeft = 0;
+                rect.nTop = 0;
+                rect.nWidth = videoDef->nFrameWidth;
+                rect.nHeight = videoDef->nFrameHeight;
+            }
+
+            CHECK_GE(rect.nLeft, 0);
+            CHECK_GE(rect.nTop, 0);
+            CHECK_GE(rect.nWidth, 0u);
+            CHECK_GE(rect.nHeight, 0u);
+            CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+            CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+
+            notify->setRect(
+                    "crop",
+                    rect.nLeft,
+                    rect.nTop,
+                    rect.nLeft + rect.nWidth - 1,
+                    rect.nTop + rect.nHeight - 1);
+
+            if (mNativeWindow != NULL) {
+                android_native_rect_t crop;
+                crop.left = rect.nLeft;
+                crop.top = rect.nTop;
+                crop.right = rect.nLeft + rect.nWidth - 1;
+                crop.bottom = rect.nTop + rect.nHeight - 1;
+
+                CHECK_EQ(0, native_window_set_crop(
+                            mNativeWindow.get(), &crop));
+            }
+            break;
+        }
+
+        case OMX_PortDomainAudio:
+        {
+            OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
+            CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM);
+
+            OMX_AUDIO_PARAM_PCMMODETYPE params;
+            InitOMXParams(&params);
+            params.nPortIndex = kPortIndexOutput;
+
+            CHECK_EQ(mOMX->getParameter(
+                        mNode, OMX_IndexParamAudioPcm,
+                        &params, sizeof(params)),
+                     (status_t)OK);
+
+            CHECK(params.nChannels == 1 || params.bInterleaved);
+            CHECK_EQ(params.nBitPerSample, 16u);
+            CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+            CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
+
+            notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+            notify->setInt32("channel-count", params.nChannels);
+            notify->setInt32("sample-rate", params.nSamplingRate);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+
+    notify->post();
+
+    mSentFormat = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
+    : AState(parentState),
+      mCodec(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(OMX_U32 portIndex) {
+    return KEEP_BUFFERS;
+}
+
+bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatInputBufferFilled:
+        {
+            onInputBufferFilled(msg);
+            break;
+        }
+
+        case kWhatOutputBufferDrained:
+        {
+            onOutputBufferDrained(msg);
+            break;
+        }
+
+        case ACodec::kWhatOMXMessage:
+        {
+            return onOMXMessage(msg);
+        }
+
+        default:
+            return false;
+    }
+
+    return true;
+}
+
+bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
+    int32_t type;
+    CHECK(msg->findInt32("type", &type));
+
+    IOMX::node_id nodeID;
+    CHECK(msg->findPointer("node", &nodeID));
+    CHECK_EQ(nodeID, mCodec->mNode);
+
+    switch (type) {
+        case omx_message::EVENT:
+        {
+            int32_t event, data1, data2;
+            CHECK(msg->findInt32("event", &event));
+            CHECK(msg->findInt32("data1", &data1));
+            CHECK(msg->findInt32("data2", &data2));
+
+            return onOMXEvent(
+                    static_cast<OMX_EVENTTYPE>(event),
+                    static_cast<OMX_U32>(data1),
+                    static_cast<OMX_U32>(data2));
+        }
+
+        case omx_message::EMPTY_BUFFER_DONE:
+        {
+            IOMX::buffer_id bufferID;
+            CHECK(msg->findPointer("buffer", &bufferID));
+
+            return onOMXEmptyBufferDone(bufferID);
+        }
+
+        case omx_message::FILL_BUFFER_DONE:
+        {
+            IOMX::buffer_id bufferID;
+            CHECK(msg->findPointer("buffer", &bufferID));
+
+            int32_t rangeOffset, rangeLength, flags;
+            int64_t timeUs;
+            void *platformPrivate;
+            void *dataPtr;
+
+            CHECK(msg->findInt32("range_offset", &rangeOffset));
+            CHECK(msg->findInt32("range_length", &rangeLength));
+            CHECK(msg->findInt32("flags", &flags));
+            CHECK(msg->findInt64("timestamp", &timeUs));
+            CHECK(msg->findPointer("platform_private", &platformPrivate));
+            CHECK(msg->findPointer("data_ptr", &dataPtr));
+
+            return onOMXFillBufferDone(
+                    bufferID,
+                    (size_t)rangeOffset, (size_t)rangeLength,
+                    (OMX_U32)flags,
+                    timeUs,
+                    platformPrivate,
+                    dataPtr);
+        }
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+bool ACodec::BaseState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    if (event != OMX_EventError) {
+        LOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)",
+             mCodec->mComponentName.c_str(), event, data1, data2);
+
+        return false;
+    }
+
+    LOGE("[%s] ERROR(0x%08lx, 0x%08lx)",
+         mCodec->mComponentName.c_str(), data1, data2);
+
+    mCodec->changeState(mCodec->mErrorState);
+
+    return true;
+}
+
+bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {
+    BufferInfo *info =
+        mCodec->findBufferByID(kPortIndexInput, bufferID);
+
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
+    info->mStatus = BufferInfo::OWNED_BY_US;
+
+    PortMode mode = getPortMode(kPortIndexInput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+            break;
+
+        case RESUBMIT_BUFFERS:
+            postFillThisBuffer(info);
+            break;
+
+        default:
+        {
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+            TRESPASS();  // Not currently used
+            break;
+        }
+    }
+
+    return true;
+}
+
+void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
+    if (mCodec->mPortEOS[kPortIndexInput]) {
+        return;
+    }
+
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+    sp<AMessage> notify = mCodec->mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatFillThisBuffer);
+    notify->setPointer("buffer-id", info->mBufferID);
+
+    info->mData->meta()->clear();
+    notify->setObject("buffer", info->mData);
+
+    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id());
+    reply->setPointer("buffer-id", info->mBufferID);
+
+    notify->setMessage("reply", reply);
+
+    notify->post();
+
+    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+}
+
+void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
+    IOMX::buffer_id bufferID;
+    CHECK(msg->findPointer("buffer-id", &bufferID));
+
+    sp<RefBase> obj;
+    int32_t err = OK;
+    if (!msg->findObject("buffer", &obj)) {
+        CHECK(msg->findInt32("err", &err));
+
+        LOGV("[%s] saw error %d instead of an input buffer",
+             mCodec->mComponentName.c_str(), err);
+
+        obj.clear();
+    }
+
+    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+    BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM);
+
+    info->mStatus = BufferInfo::OWNED_BY_US;
+
+    PortMode mode = getPortMode(kPortIndexInput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+        {
+            if (buffer == NULL) {
+                mCodec->mPortEOS[kPortIndexInput] = true;
+            }
+            break;
+        }
+
+        case RESUBMIT_BUFFERS:
+        {
+            if (buffer != NULL) {
+                CHECK(!mCodec->mPortEOS[kPortIndexInput]);
+
+                int64_t timeUs;
+                CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+                OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME;
+
+                int32_t isCSD;
+                if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) {
+                    flags |= OMX_BUFFERFLAG_CODECCONFIG;
+                }
+
+                if (buffer != info->mData) {
+                    if (!(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
+                        LOGV("[%s] Needs to copy input data.",
+                             mCodec->mComponentName.c_str());
+                    }
+
+                    CHECK_LE(buffer->size(), info->mData->capacity());
+                    memcpy(info->mData->data(), buffer->data(), buffer->size());
+                }
+
+                CHECK_EQ(mCodec->mOMX->emptyBuffer(
+                            mCodec->mNode,
+                            bufferID,
+                            0,
+                            buffer->size(),
+                            flags,
+                            timeUs),
+                         (status_t)OK);
+
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+
+                getMoreInputDataIfPossible();
+            } else if (!mCodec->mPortEOS[kPortIndexInput]) {
+                LOGV("[%s] Signalling EOS on the input port",
+                     mCodec->mComponentName.c_str());
+
+                CHECK_EQ(mCodec->mOMX->emptyBuffer(
+                            mCodec->mNode,
+                            bufferID,
+                            0,
+                            0,
+                            OMX_BUFFERFLAG_EOS,
+                            0),
+                         (status_t)OK);
+
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+
+                mCodec->mPortEOS[kPortIndexInput] = true;
+            }
+            break;
+
+            default:
+                CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+                break;
+        }
+    }
+}
+
+void ACodec::BaseState::getMoreInputDataIfPossible() {
+    if (mCodec->mPortEOS[kPortIndexInput]) {
+        return;
+    }
+
+    BufferInfo *eligible = NULL;
+
+    for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+
+#if 0
+        if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) {
+            // There's already a "read" pending.
+            return;
+        }
+#endif
+
+        if (info->mStatus == BufferInfo::OWNED_BY_US) {
+            eligible = info;
+        }
+    }
+
+    if (eligible == NULL) {
+        return;
+    }
+
+    postFillThisBuffer(eligible);
+}
+
+bool ACodec::BaseState::onOMXFillBufferDone(
+        IOMX::buffer_id bufferID,
+        size_t rangeOffset, size_t rangeLength,
+        OMX_U32 flags,
+        int64_t timeUs,
+        void *platformPrivate,
+        void *dataPtr) {
+    ssize_t index;
+    BufferInfo *info =
+        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
+
+    info->mStatus = BufferInfo::OWNED_BY_US;
+
+    PortMode mode = getPortMode(kPortIndexOutput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+            break;
+
+        case RESUBMIT_BUFFERS:
+        {
+            if (rangeLength == 0) {
+                if (!(flags & OMX_BUFFERFLAG_EOS)) {
+                    CHECK_EQ(mCodec->mOMX->fillBuffer(
+                                mCodec->mNode, info->mBufferID),
+                             (status_t)OK);
+
+                    info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+                }
+            } else {
+                if (!mCodec->mSentFormat) {
+                    mCodec->sendFormatChange();
+                }
+
+                if (mCodec->mNativeWindow == NULL) {
+                    info->mData->setRange(rangeOffset, rangeLength);
+                }
+
+                info->mData->meta()->setInt64("timeUs", timeUs);
+
+                sp<AMessage> notify = mCodec->mNotify->dup();
+                notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
+                notify->setPointer("buffer-id", info->mBufferID);
+                notify->setObject("buffer", info->mData);
+
+                sp<AMessage> reply =
+                    new AMessage(kWhatOutputBufferDrained, mCodec->id());
+
+                reply->setPointer("buffer-id", info->mBufferID);
+
+                notify->setMessage("reply", reply);
+
+                notify->post();
+
+                info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+            }
+
+            if (flags & OMX_BUFFERFLAG_EOS) {
+                sp<AMessage> notify = mCodec->mNotify->dup();
+                notify->setInt32("what", ACodec::kWhatEOS);
+                notify->post();
+
+                mCodec->mPortEOS[kPortIndexOutput] = true;
+            }
+            break;
+        }
+
+        default:
+        {
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+
+            CHECK_EQ((status_t)OK,
+                     mCodec->freeBuffer(kPortIndexOutput, index));
+            break;
+        }
+    }
+
+    return true;
+}
+
+void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
+    IOMX::buffer_id bufferID;
+    CHECK(msg->findPointer("buffer-id", &bufferID));
+
+    ssize_t index;
+    BufferInfo *info =
+        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
+    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);
+
+    int32_t render;
+    if (mCodec->mNativeWindow != NULL
+            && msg->findInt32("render", &render) && render != 0) {
+        // The client wants this buffer to be rendered.
+
+        CHECK_EQ(mCodec->mNativeWindow->queueBuffer(
+                    mCodec->mNativeWindow.get(),
+                    info->mGraphicBuffer.get()),
+                 0);
+
+        info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
+    } else {
+        info->mStatus = BufferInfo::OWNED_BY_US;
+    }
+
+    PortMode mode = getPortMode(kPortIndexOutput);
+
+    switch (mode) {
+        case KEEP_BUFFERS:
+        {
+            // XXX fishy, revisit!!! What about the FREE_BUFFERS case below?
+
+            if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+                // We cannot resubmit the buffer we just rendered, dequeue
+                // the spare instead.
+
+                info = mCodec->dequeueBufferFromNativeWindow();
+            }
+            break;
+        }
+
+        case RESUBMIT_BUFFERS:
+        {
+            if (!mCodec->mPortEOS[kPortIndexOutput]) {
+                if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+                    // We cannot resubmit the buffer we just rendered, dequeue
+                    // the spare instead.
+
+                    info = mCodec->dequeueBufferFromNativeWindow();
+                }
+
+                CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
+                         (status_t)OK);
+
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+            }
+            break;
+        }
+
+        default:
+        {
+            CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+
+            CHECK_EQ((status_t)OK,
+                     mCodec->freeBuffer(kPortIndexOutput, index));
+            break;
+        }
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::UninitializedState::UninitializedState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case ACodec::kWhatSetup:
+        {
+            onSetup(msg);
+
+            handled = true;
+            break;
+        }
+
+        case ACodec::kWhatShutdown:
+        {
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+            notify->post();
+
+            handled = true;
+        }
+
+        case ACodec::kWhatFlush:
+        {
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatFlushCompleted);
+            notify->post();
+
+            handled = true;
+        }
+
+        default:
+            return BaseState::onMessageReceived(msg);
+    }
+
+    return handled;
+}
+
+void ACodec::UninitializedState::onSetup(
+        const sp<AMessage> &msg) {
+    OMXClient client;
+    CHECK_EQ(client.connect(), (status_t)OK);
+
+    sp<IOMX> omx = client.interface();
+
+    AString mime;
+    CHECK(msg->findString("mime", &mime));
+
+    AString componentName;
+
+    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+        componentName = "OMX.Nvidia.h264.decode";
+    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+        componentName = "OMX.Nvidia.aac.decoder";
+    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_MPEG)) {
+        componentName = "OMX.Nvidia.mp3.decoder";
+    } else {
+        TRESPASS();
+    }
+
+    sp<CodecObserver> observer = new CodecObserver;
+
+    IOMX::node_id node;
+    CHECK_EQ(omx->allocateNode(componentName.c_str(), observer, &node),
+             (status_t)OK);
+
+    sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id());
+    observer->setNotificationMessage(notify);
+
+    mCodec->mComponentName = componentName;
+    mCodec->mOMX = omx;
+    mCodec->mNode = node;
+
+    mCodec->configureCodec(mime.c_str(), msg);
+
+    sp<RefBase> obj;
+    if (msg->findObject("surface", &obj)) {
+        mCodec->mNativeWindow = static_cast<Surface *>(obj.get());
+    }
+
+    CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
+
+    CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle),
+             (status_t)OK);
+
+    mCodec->changeState(mCodec->mLoadedToIdleState);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+void ACodec::LoadedToIdleState::stateEntered() {
+    LOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str());
+
+    CHECK_EQ(allocateBuffers(), (status_t)OK);
+}
+
+status_t ACodec::LoadedToIdleState::allocateBuffers() {
+    status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return mCodec->allocateBuffersOnPort(kPortIndexOutput);
+}
+
+bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            return true;
+        }
+
+        default:
+            return BaseState::onMessageReceived(msg);
+    }
+}
+
+bool ACodec::LoadedToIdleState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+
+            CHECK_EQ(mCodec->mOMX->sendCommand(
+                        mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting),
+                     (status_t)OK);
+
+            mCodec->changeState(mCodec->mIdleToExecutingState);
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+void ACodec::IdleToExecutingState::stateEntered() {
+    LOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            return true;
+        }
+
+        default:
+            return BaseState::onMessageReceived(msg);
+    }
+}
+
+bool ACodec::IdleToExecutingState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting);
+
+            mCodec->mExecutingState->resume();
+            mCodec->changeState(mCodec->mExecutingState);
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ExecutingState::ExecutingState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode(
+        OMX_U32 portIndex) {
+    return RESUBMIT_BUFFERS;
+}
+
+void ACodec::ExecutingState::submitOutputBuffers() {
+    for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
+        BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
+
+        if (mCodec->mNativeWindow != NULL) {
+            CHECK(info->mStatus == BufferInfo::OWNED_BY_US
+                    || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW);
+
+            if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+                continue;
+            }
+
+            status_t err = mCodec->mNativeWindow->lockBuffer(
+                    mCodec->mNativeWindow.get(),
+                    info->mGraphicBuffer.get());
+            CHECK_EQ(err, (status_t)OK);
+        } else {
+            CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+        }
+
+        CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
+                 (status_t)OK);
+
+        info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+    }
+}
+
+void ACodec::ExecutingState::resume() {
+    submitOutputBuffers();
+
+    // Post the first input buffer.
+    CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
+    BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(0);
+
+    postFillThisBuffer(info);
+}
+
+void ACodec::ExecutingState::stateEntered() {
+    LOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
+
+    mCodec->processDeferredMessages();
+}
+
+bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            CHECK_EQ(mCodec->mOMX->sendCommand(
+                        mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle),
+                     (status_t)OK);
+
+            mCodec->changeState(mCodec->mExecutingToIdleState);
+
+            handled = true;
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            CHECK_EQ(mCodec->mOMX->sendCommand(
+                        mCodec->mNode, OMX_CommandFlush, OMX_ALL),
+                     (status_t)OK);
+
+            mCodec->changeState(mCodec->mFlushingState);
+
+            handled = true;
+            break;
+        }
+
+        case kWhatResume:
+        {
+            resume();
+
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+bool ACodec::ExecutingState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventPortSettingsChanged:
+        {
+            CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+
+            if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+                CHECK_EQ(mCodec->mOMX->sendCommand(
+                            mCodec->mNode,
+                            OMX_CommandPortDisable, kPortIndexOutput),
+                         (status_t)OK);
+
+                if (mCodec->mNativeWindow != NULL) {
+                    CHECK_EQ((status_t)OK,
+                             mCodec->freeOutputBuffersOwnedByNativeWindow());
+                }
+
+                mCodec->changeState(mCodec->mOutputPortSettingsChangedState);
+            } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
+                mCodec->mSentFormat = false;
+            } else {
+                LOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx",
+                     mCodec->mComponentName.c_str(), data2);
+            }
+
+            return true;
+        }
+
+        case OMX_EventBufferFlag:
+        {
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState(
+        ACodec *codec)
+    : BaseState(codec) {
+}
+
+ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode(
+        OMX_U32 portIndex) {
+    if (portIndex == kPortIndexOutput) {
+        return FREE_BUFFERS;
+    }
+
+    CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput);
+
+    return RESUBMIT_BUFFERS;
+}
+
+bool ACodec::OutputPortSettingsChangedState::onMessageReceived(
+        const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatFlush:
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+void ACodec::OutputPortSettingsChangedState::stateEntered() {
+    LOGV("[%s] Now handling output port settings change",
+         mCodec->mComponentName.c_str());
+}
+
+bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            if (data1 == (OMX_U32)OMX_CommandPortDisable) {
+                CHECK_EQ(data2, (OMX_U32)kPortIndexOutput);
+
+                LOGV("[%s] Output port now disabled.",
+                        mCodec->mComponentName.c_str());
+
+                CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty());
+                mCodec->mDealer[kPortIndexOutput].clear();
+
+                CHECK_EQ(mCodec->mOMX->sendCommand(
+                            mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput),
+                         (status_t)OK);
+
+                CHECK_EQ(mCodec->allocateBuffersOnPort(kPortIndexOutput),
+                         (status_t)OK);
+
+                return true;
+            } else if (data1 == (OMX_U32)OMX_CommandPortEnable) {
+                CHECK_EQ(data2, (OMX_U32)kPortIndexOutput);
+
+                mCodec->mSentFormat = false;
+
+                LOGV("[%s] Output port now reenabled.",
+                        mCodec->mComponentName.c_str());
+
+                mCodec->mExecutingState->submitOutputBuffers();
+                mCodec->changeState(mCodec->mExecutingState);
+
+                return true;
+            }
+
+            return false;
+        }
+
+        default:
+            return false;
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatFlush:
+        {
+            // Don't send me a flush request if you previously wanted me
+            // to shutdown.
+            TRESPASS();
+            break;
+        }
+
+        case kWhatShutdown:
+        {
+            // We're already doing that...
+
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+void ACodec::ExecutingToIdleState::stateEntered() {
+    LOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
+
+    mCodec->mSentFormat = false;
+}
+
+bool ACodec::ExecutingToIdleState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
+
+            changeStateIfWeOwnAllBuffers();
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
+    if (mCodec->allYourBuffersAreBelongToUs()) {
+        CHECK_EQ(mCodec->mOMX->sendCommand(
+                    mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded),
+                 (status_t)OK);
+
+        CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK);
+        CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK);
+
+        mCodec->changeState(mCodec->mIdleToLoadedState);
+    }
+}
+
+void ACodec::ExecutingToIdleState::onInputBufferFilled(
+        const sp<AMessage> &msg) {
+    BaseState::onInputBufferFilled(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::ExecutingToIdleState::onOutputBufferDrained(
+        const sp<AMessage> &msg) {
+    BaseState::onOutputBufferDrained(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            // We're already doing that...
+
+            handled = true;
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            // Don't send me a flush request if you previously wanted me
+            // to shutdown.
+            TRESPASS();
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+void ACodec::IdleToLoadedState::stateEntered() {
+    LOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::IdleToLoadedState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
+            CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded);
+
+            LOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
+
+            CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
+
+            mCodec->mNativeWindow.clear();
+            mCodec->mNode = NULL;
+            mCodec->mOMX.clear();
+            mCodec->mComponentName.clear();
+
+            mCodec->changeState(mCodec->mUninitializedState);
+
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+            notify->post();
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::ErrorState::ErrorState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+bool ACodec::ErrorState::onMessageReceived(const sp<AMessage> &msg) {
+    return BaseState::onMessageReceived(msg);
+}
+
+void ACodec::ErrorState::stateEntered() {
+    LOGV("[%s] Now in ErrorState", mCodec->mComponentName.c_str());
+}
+
+bool ACodec::ErrorState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    LOGV("EVENT(%d, 0x%08lx, 0x%08lx)", event, data1, data2);
+    return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ACodec::FlushingState::FlushingState(ACodec *codec)
+    : BaseState(codec) {
+}
+
+void ACodec::FlushingState::stateEntered() {
+    LOGV("[%s] Now Flushing", mCodec->mComponentName.c_str());
+
+    mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false;
+}
+
+bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) {
+    bool handled = false;
+
+    switch (msg->what()) {
+        case kWhatShutdown:
+        {
+            mCodec->deferMessage(msg);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            // We're already doing this right now.
+            handled = true;
+            break;
+        }
+
+        default:
+            handled = BaseState::onMessageReceived(msg);
+            break;
+    }
+
+    return handled;
+}
+
+bool ACodec::FlushingState::onOMXEvent(
+        OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush);
+
+            if (data2 == kPortIndexInput || data2 == kPortIndexOutput) {
+                CHECK(!mFlushComplete[data2]);
+                mFlushComplete[data2] = true;
+            } else {
+                CHECK_EQ(data2, OMX_ALL);
+                CHECK(mFlushComplete[kPortIndexInput]);
+                CHECK(mFlushComplete[kPortIndexOutput]);
+
+                changeStateIfWeOwnAllBuffers();
+            }
+
+            return true;
+        }
+
+        default:
+            return BaseState::onOMXEvent(event, data1, data2);
+    }
+
+    return true;
+}
+
+void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) {
+    BaseState::onOutputBufferDrained(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) {
+    BaseState::onInputBufferFilled(msg);
+
+    changeStateIfWeOwnAllBuffers();
+}
+
+void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
+    if (mFlushComplete[kPortIndexInput]
+            && mFlushComplete[kPortIndexOutput]
+            && mCodec->allYourBuffersAreBelongToUs()) {
+        sp<AMessage> notify = mCodec->mNotify->dup();
+        notify->setInt32("what", ACodec::kWhatFlushCompleted);
+        notify->post();
+
+        mCodec->mPortEOS[kPortIndexInput] =
+            mCodec->mPortEOS[kPortIndexOutput] = false;
+
+        mCodec->changeState(mCodec->mExecutingState);
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp
index 1b05528..ac87c29 100644
--- a/media/libstagefright/AMRExtractor.cpp
+++ b/media/libstagefright/AMRExtractor.cpp
@@ -55,7 +55,7 @@
     size_t mFrameSize;
     bool mIsWide;
 
-    off_t mOffset;
+    off64_t mOffset;
     int64_t mCurrentTimeUs;
     bool mStarted;
     MediaBufferGroup *mGroup;
@@ -115,9 +115,9 @@
 
     mFrameSize = getFrameSize(mIsWide, FT);
 
-    off_t streamSize;
+    off64_t streamSize;
     if (mDataSource->getSize(&streamSize) == OK) {
-        off_t numFrames = streamSize / mFrameSize;
+        off64_t numFrames = streamSize / mFrameSize;
 
         mMeta->setInt64(kKeyDuration, 20000ll * numFrames);
     }
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index c0b1abe..0db3d1d 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -24,20 +24,28 @@
 #include <media/mediarecorder.h>
 #include <sys/prctl.h>
 #include <sys/resource.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
 
 namespace android {
 
 AMRWriter::AMRWriter(const char *filename)
-    : mFile(fopen(filename, "wb")),
-      mInitCheck(mFile != NULL ? OK : NO_INIT),
+    : mFd(-1),
+      mInitCheck(NO_INIT),
       mStarted(false),
       mPaused(false),
       mResumed(false) {
+
+    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+    if (mFd >= 0) {
+        mInitCheck = OK;
+    }
 }
 
 AMRWriter::AMRWriter(int fd)
-    : mFile(fdopen(fd, "wb")),
-      mInitCheck(mFile != NULL ? OK : NO_INIT),
+    : mFd(dup(fd)),
+      mInitCheck(mFd < 0? NO_INIT: OK),
       mStarted(false),
       mPaused(false),
       mResumed(false) {
@@ -48,9 +56,9 @@
         stop();
     }
 
-    if (mFile != NULL) {
-        fclose(mFile);
-        mFile = NULL;
+    if (mFd != -1) {
+        close(mFd);
+        mFd = -1;
     }
 }
 
@@ -90,8 +98,8 @@
     mSource = source;
 
     const char *kHeader = isWide ? "#!AMR-WB\n" : "#!AMR\n";
-    size_t n = strlen(kHeader);
-    if (fwrite(kHeader, 1, n, mFile) != n) {
+    ssize_t n = strlen(kHeader);
+    if (write(mFd, kHeader, n) != n) {
         return ERROR_IO;
     }
 
@@ -240,11 +248,9 @@
             notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
             break;
         }
-        ssize_t n = fwrite(
-                (const uint8_t *)buffer->data() + buffer->range_offset(),
-                1,
-                buffer->range_length(),
-                mFile);
+        ssize_t n = write(mFd,
+                        (const uint8_t *)buffer->data() + buffer->range_offset(),
+                        buffer->range_length());
 
         if (n < (ssize_t)buffer->range_length()) {
             buffer->release();
@@ -266,9 +272,8 @@
         notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
     }
 
-    fflush(mFile);
-    fclose(mFile);
-    mFile = NULL;
+    close(mFd);
+    mFd = -1;
     mReachedEOS = true;
     if (err == ERROR_END_OF_STREAM) {
         return OK;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index d674547..2d486e3 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -4,12 +4,15 @@
 include frameworks/base/media/libstagefright/codecs/common/Config.mk
 
 LOCAL_SRC_FILES:=                         \
+        ACodec.cpp                        \
         AMRExtractor.cpp                  \
         AMRWriter.cpp                     \
         AudioPlayer.cpp                   \
         AudioSource.cpp                   \
         AwesomePlayer.cpp                 \
         CameraSource.cpp                  \
+        CameraSourceTimeLapse.cpp         \
+        VideoSourceDownSampler.cpp        \
         DataSource.cpp                    \
         DRMExtractor.cpp                  \
         ESDS.cpp                          \
@@ -25,6 +28,7 @@
         MediaDefs.cpp                     \
         MediaExtractor.cpp                \
         MediaSource.cpp                   \
+        MediaSourceSplitter.cpp           \
         MetaData.cpp                      \
         NuCachedSource2.cpp               \
         NuHTTPDataSource.cpp              \
@@ -41,9 +45,11 @@
         TimeSource.cpp                    \
         TimedEventQueue.cpp               \
         Utils.cpp                         \
+        VBRISeeker.cpp                    \
         WAVExtractor.cpp                  \
+        WVMExtractor.cpp                  \
+        XINGSeeker.cpp                    \
         avc_utils.cpp                     \
-        string.cpp
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
@@ -60,10 +66,13 @@
         libsonivox        \
         libvorbisidec     \
         libsurfaceflinger_client \
+        libstagefright_yuv \
         libcamera_client \
-        libdrmframework
+        libdrmframework  \
+        libcrypto
 
 LOCAL_STATIC_LIBRARIES := \
+        libstagefright_color_conversion \
         libstagefright_aacdec \
         libstagefright_aacenc \
         libstagefright_amrnbdec \
@@ -90,7 +99,6 @@
         libstagefright_enc_common \
         libstagefright_avc_common \
         libstagefright_foundation \
-        libstagefright_color_conversion
 
 ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
         LOCAL_LDLIBS += -lpthread -ldl
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 29f16d8..235d752 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -140,38 +140,6 @@
     return meta;
 }
 
-/*
- * Returns -1 if frame skipping request is too long.
- * Returns  0 if there is no need to skip frames.
- * Returns  1 if we need to skip frames.
- */
-static int skipFrame(int64_t timestampUs,
-        const MediaSource::ReadOptions *options) {
-
-    int64_t skipFrameUs;
-    if (!options || !options->getSkipFrame(&skipFrameUs)) {
-        return 0;
-    }
-
-    if (skipFrameUs <= timestampUs) {
-        return 0;
-    }
-
-    // Safe guard against the abuse of the kSkipFrame_Option.
-    if (skipFrameUs - timestampUs >= 1E6) {
-        LOGE("Frame skipping requested is way too long: %lld us",
-            skipFrameUs - timestampUs);
-
-        return -1;
-    }
-
-    LOGV("skipFrame: %lld us > timestamp: %lld us",
-        skipFrameUs, timestampUs);
-
-    return 1;
-
-}
-
 void AudioSource::rampVolume(
         int32_t startFrame, int32_t rampDurationFrames,
         uint8_t *data,   size_t bytes) {
@@ -218,7 +186,7 @@
     CHECK_EQ(mGroup->acquire_buffer(&buffer), OK);
 
     int err = 0;
-    while (mStarted) {
+    if (mStarted) {
 
         uint32_t numFramesRecorded;
         mRecord->getPosition(&numFramesRecorded);
@@ -268,12 +236,6 @@
             if (mCollectStats) {
                 mTotalLostFrames += (numLostBytes >> 1);
             }
-            if ((err = skipFrame(timestampUs, options)) == -1) {
-                buffer->release();
-                return UNKNOWN_ERROR;
-            } else if (err != 0) {
-                continue;
-            }
             memset(buffer->data(), 0, numLostBytes);
             buffer->set_range(0, numLostBytes);
             if (numFramesRecorded == 0) {
@@ -294,12 +256,6 @@
 
         int64_t recordDurationUs = (1000000LL * n >> 1) / sampleRate;
         timestampUs += recordDurationUs;
-        if ((err = skipFrame(timestampUs, options)) == -1) {
-            buffer->release();
-            return UNKNOWN_ERROR;
-        } else if (err != 0) {
-            continue;
-        }
 
         if (mPrevSampleTimeUs - mStartTimeUs < kAutoRampStartUs) {
             // Mute the initial video recording signal
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 50c5f3e..914e409 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -22,7 +22,6 @@
 
 #include "include/ARTSPController.h"
 #include "include/AwesomePlayer.h"
-#include "include/LiveSource.h"
 #include "include/SoftwareRenderer.h"
 #include "include/NuCachedSource2.h"
 #include "include/ThrottledSource.h"
@@ -34,20 +33,26 @@
 #include "UDPPusher.h"
 
 #include <binder/IPCThreadState.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/AudioPlayer.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/OMXCodec.h>
 
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
 
 #include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include "include/LiveSession.h"
+
+#define USE_SURFACE_ALLOC 1
+#define FRAME_DROP_FREQ 7
 
 namespace android {
 
@@ -77,49 +82,10 @@
     AwesomeEvent &operator=(const AwesomeEvent &);
 };
 
-struct AwesomeRemoteRenderer : public AwesomeRenderer {
-    AwesomeRemoteRenderer(const sp<IOMXRenderer> &target)
-        : mTarget(target) {
-    }
-
-    virtual status_t initCheck() const {
-        return OK;
-    }
-
-    virtual void render(MediaBuffer *buffer) {
-        void *id;
-        if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) {
-            mTarget->render((IOMX::buffer_id)id);
-        }
-    }
-
-private:
-    sp<IOMXRenderer> mTarget;
-
-    AwesomeRemoteRenderer(const AwesomeRemoteRenderer &);
-    AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &);
-};
-
 struct AwesomeLocalRenderer : public AwesomeRenderer {
     AwesomeLocalRenderer(
-            bool previewOnly,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            const sp<ISurface> &surface,
-            size_t displayWidth, size_t displayHeight,
-            size_t decodedWidth, size_t decodedHeight,
-            int32_t rotationDegrees)
-        : mInitCheck(NO_INIT),
-          mTarget(NULL),
-          mLibHandle(NULL) {
-            mInitCheck = init(previewOnly, componentName,
-                 colorFormat, surface, displayWidth,
-                 displayHeight, decodedWidth, decodedHeight,
-                 rotationDegrees);
-    }
-
-    virtual status_t initCheck() const {
-        return mInitCheck;
+            const sp<Surface> &surface, const sp<MetaData> &meta)
+        : mTarget(new SoftwareRenderer(surface, meta)) {
     }
 
     virtual void render(MediaBuffer *buffer) {
@@ -135,106 +101,64 @@
     virtual ~AwesomeLocalRenderer() {
         delete mTarget;
         mTarget = NULL;
-
-        if (mLibHandle) {
-            dlclose(mLibHandle);
-            mLibHandle = NULL;
-        }
     }
 
 private:
-    status_t mInitCheck;
-    VideoRenderer *mTarget;
-    void *mLibHandle;
-
-    status_t init(
-            bool previewOnly,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            const sp<ISurface> &surface,
-            size_t displayWidth, size_t displayHeight,
-            size_t decodedWidth, size_t decodedHeight,
-            int32_t rotationDegrees);
+    SoftwareRenderer *mTarget;
 
     AwesomeLocalRenderer(const AwesomeLocalRenderer &);
     AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);;
 };
 
-status_t AwesomeLocalRenderer::init(
-        bool previewOnly,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        const sp<ISurface> &surface,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight,
-        int32_t rotationDegrees) {
-    if (!previewOnly) {
-        // We will stick to the vanilla software-color-converting renderer
-        // for "previewOnly" mode, to avoid unneccessarily switching overlays
-        // more often than necessary.
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+    AwesomeNativeWindowRenderer(
+            const sp<ANativeWindow> &nativeWindow,
+            int32_t rotationDegrees)
+        : mNativeWindow(nativeWindow) {
+        applyRotation(rotationDegrees);
+    }
 
-        mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
+    virtual void render(MediaBuffer *buffer) {
+        status_t err = mNativeWindow->queueBuffer(
+                mNativeWindow.get(), buffer->graphicBuffer().get());
+        if (err != 0) {
+            LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+                    -err);
+            return;
+        }
 
-        if (mLibHandle) {
-            typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
-                    const sp<ISurface> &surface,
-                    const char *componentName,
-                    OMX_COLOR_FORMATTYPE colorFormat,
-                    size_t displayWidth, size_t displayHeight,
-                    size_t decodedWidth, size_t decodedHeight,
-                    int32_t rotationDegrees);
+        sp<MetaData> metaData = buffer->meta_data();
+        metaData->setInt32(kKeyRendered, 1);
+    }
 
-            typedef VideoRenderer *(*CreateRendererFunc)(
-                    const sp<ISurface> &surface,
-                    const char *componentName,
-                    OMX_COLOR_FORMATTYPE colorFormat,
-                    size_t displayWidth, size_t displayHeight,
-                    size_t decodedWidth, size_t decodedHeight);
+protected:
+    virtual ~AwesomeNativeWindowRenderer() {}
 
-            CreateRendererWithRotationFunc funcWithRotation =
-                (CreateRendererWithRotationFunc)dlsym(
-                        mLibHandle,
-                        "_Z26createRendererWithRotationRKN7android2spINS_8"
-                        "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
+private:
+    sp<ANativeWindow> mNativeWindow;
 
-            if (funcWithRotation) {
-                mTarget =
-                    (*funcWithRotation)(
-                            surface, componentName, colorFormat,
-                            displayWidth, displayHeight,
-                            decodedWidth, decodedHeight,
-                            rotationDegrees);
-            } else {
-                if (rotationDegrees != 0) {
-                    LOGW("renderer does not support rotation.");
-                }
+    void applyRotation(int32_t rotationDegrees) {
+        uint32_t transform;
+        switch (rotationDegrees) {
+            case 0: transform = 0; break;
+            case 90: transform = HAL_TRANSFORM_ROT_90; break;
+            case 180: transform = HAL_TRANSFORM_ROT_180; break;
+            case 270: transform = HAL_TRANSFORM_ROT_270; break;
+            default: transform = 0; break;
+        }
 
-                CreateRendererFunc func =
-                    (CreateRendererFunc)dlsym(
-                            mLibHandle,
-                            "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
-                            "OMX_COLOR_FORMATTYPEjjjj");
-
-                if (func) {
-                    mTarget =
-                        (*func)(surface, componentName, colorFormat,
-                            displayWidth, displayHeight,
-                            decodedWidth, decodedHeight);
-                }
-            }
+        if (transform) {
+            CHECK_EQ(0, native_window_set_buffers_transform(
+                        mNativeWindow.get(), transform));
         }
     }
 
-    if (mTarget != NULL) {
-        return OK;
-    }
+    AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+    AwesomeNativeWindowRenderer &operator=(
+            const AwesomeNativeWindowRenderer &);
+};
 
-    mTarget = new SoftwareRenderer(
-            colorFormat, surface, displayWidth, displayHeight,
-            decodedWidth, decodedHeight, rotationDegrees);
-
-    return ((SoftwareRenderer *)mTarget)->initCheck();
-}
+////////////////////////////////////////////////////////////////////////////////
 
 AwesomePlayer::AwesomePlayer()
     : mQueueStarted(false),
@@ -243,11 +167,9 @@
       mAudioPlayer(NULL),
       mFlags(0),
       mExtractorFlags(0),
-      mLastVideoBuffer(NULL),
       mVideoBuffer(NULL),
-      mSuspensionState(NULL),
       mDecryptHandle(NULL) {
-    CHECK_EQ(mClient.connect(), OK);
+    CHECK_EQ(mClient.connect(), (status_t)OK);
 
     DataSource::RegisterDefaultSniffers();
 
@@ -307,6 +229,17 @@
 
     mUri = uri;
 
+    if (!strncmp("http://", uri, 7)) {
+        // Hack to support http live.
+
+        size_t len = strlen(uri);
+        if (!strcasecmp(&uri[len - 5], ".m3u8")
+                || strstr(&uri[7], "m3u8") != NULL) {
+            mUri = "httplive://";
+            mUri.append(&uri[7]);
+        }
+    }
+
     if (headers) {
         mUriHeaders = *headers;
     }
@@ -337,6 +270,10 @@
     return setDataSource_l(dataSource);
 }
 
+status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) {
+    return INVALID_OPERATION;
+}
+
 status_t AwesomePlayer::setDataSource_l(
         const sp<DataSource> &dataSource) {
     sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
@@ -421,14 +358,19 @@
 }
 
 void AwesomePlayer::reset() {
+    LOGI("reset");
+
     Mutex::Autolock autoLock(mLock);
     reset_l();
 }
 
 void AwesomePlayer::reset_l() {
+    LOGI("reset_l");
+
     if (mDecryptHandle != NULL) {
             mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
                     Playback::STOP, 0);
+            mDrmManagerClient->closeDecryptSession(mDecryptHandle);
             mDecryptHandle = NULL;
             mDrmManagerClient = NULL;
     }
@@ -439,6 +381,16 @@
             LOGI("interrupting the connection process");
             mConnectingDataSource->disconnect();
         }
+
+        if (mFlags & PREPARING_CONNECTED) {
+            // We are basically done preparing, we're just buffering
+            // enough data to start playback, we can safely interrupt that.
+            finishAsyncPrepare_l();
+        }
+    }
+
+    if (mFlags & PREPARING) {
+        LOGI("waiting until preparation is completes.");
     }
 
     while (mFlags & PREPARING) {
@@ -464,6 +416,8 @@
     }
     mAudioSource.clear();
 
+    LOGI("audio source cleared");
+
     mTimeSource = NULL;
 
     delete mAudioPlayer;
@@ -471,11 +425,6 @@
 
     mVideoRenderer.clear();
 
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-
     if (mVideoBuffer) {
         mVideoBuffer->release();
         mVideoBuffer = NULL;
@@ -486,6 +435,11 @@
         mRTSPController.clear();
     }
 
+    if (mLiveSession != NULL) {
+        mLiveSession->disconnect();
+        mLiveSession.clear();
+    }
+
     mRTPPusher.clear();
     mRTCPPusher.clear();
     mRTPSession.clear();
@@ -504,10 +458,11 @@
         IPCThreadState::self()->flushCommands();
     }
 
+    LOGI("video source cleared");
+
     mDurationUs = -1;
     mFlags = 0;
     mExtractorFlags = 0;
-    mVideoWidth = mVideoHeight = -1;
     mTimeSourceDeltaUs = 0;
     mVideoTimeUs = 0;
 
@@ -520,10 +475,9 @@
 
     mFileSource.clear();
 
-    delete mSuspensionState;
-    mSuspensionState = NULL;
-
     mBitrate = -1;
+
+    LOGI("reset_l completed");
 }
 
 void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) {
@@ -537,7 +491,7 @@
 }
 
 bool AwesomePlayer::getBitrate(int64_t *bitrate) {
-    off_t size;
+    off64_t size;
     if (mDurationUs >= 0 && mCachedSource != NULL
             && mCachedSource->getSize(&size) == OK) {
         *bitrate = size * 8000000ll / mDurationUs;  // in bits/sec
@@ -570,6 +524,12 @@
     return false;
 }
 
+void AwesomePlayer::ensureCacheIsFetching_l() {
+    if (mCachedSource != NULL) {
+        mCachedSource->resumeFetchingIfNecessary();
+    }
+}
+
 void AwesomePlayer::onBufferingUpdate() {
     Mutex::Autolock autoLock(mLock);
     if (!mBufferingEventPending) {
@@ -612,6 +572,7 @@
                          kLowWaterMarkBytes);
                     mFlags |= CACHE_UNDERRUN;
                     pause_l();
+                    ensureCacheIsFetching_l();
                     notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
                 } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) {
                     if (mFlags & CACHE_UNDERRUN) {
@@ -633,12 +594,16 @@
     int64_t cachedDurationUs;
     bool eos;
     if (getCachedDuration_l(&cachedDurationUs, &eos)) {
+        LOGV("cachedDurationUs = %.2f secs, eos=%d",
+             cachedDurationUs / 1E6, eos);
+
         if ((mFlags & PLAYING) && !eos
                 && (cachedDurationUs < kLowWaterMarkUs)) {
             LOGI("cache is running low (%.2f secs) , pausing.",
                  cachedDurationUs / 1E6);
             mFlags |= CACHE_UNDERRUN;
             pause_l();
+            ensureCacheIsFetching_l();
             notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
         } else if (eos || cachedDurationUs > kHighWaterMarkUs) {
             if (mFlags & CACHE_UNDERRUN) {
@@ -664,11 +629,6 @@
 
     mVideoRenderer.clear();
 
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-
     if (mVideoBuffer) {
         mVideoBuffer->release();
         mVideoBuffer = NULL;
@@ -688,7 +648,8 @@
         IPCThreadState::self()->flushCommands();
     }
 
-    CHECK_EQ(OK, initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData));
+    CHECK_EQ((status_t)OK,
+             initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData));
 }
 
 void AwesomePlayer::onStreamDone() {
@@ -844,9 +805,47 @@
     return OK;
 }
 
-status_t AwesomePlayer::initRenderer_l() {
-    if (mISurface == NULL) {
-        return OK;
+void AwesomePlayer::notifyVideoSize_l() {
+    sp<MetaData> meta = mVideoSource->getFormat();
+
+    int32_t cropLeft, cropTop, cropRight, cropBottom;
+    if (!meta->findRect(
+                kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        cropLeft = cropTop = 0;
+        cropRight = width - 1;
+        cropBottom = height - 1;
+
+        LOGV("got dimensions only %d x %d", width, height);
+    } else {
+        LOGV("got crop rect %d, %d, %d, %d",
+             cropLeft, cropTop, cropRight, cropBottom);
+    }
+
+    int32_t usableWidth = cropRight - cropLeft + 1;
+    int32_t usableHeight = cropBottom - cropTop + 1;
+
+    int32_t rotationDegrees;
+    if (!mVideoTrack->getFormat()->findInt32(
+                kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    if (rotationDegrees == 90 || rotationDegrees == 270) {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
+    } else {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
+    }
+}
+
+void AwesomePlayer::initRenderer_l() {
+    if (mSurface == NULL) {
+        return;
     }
 
     sp<MetaData> meta = mVideoSource->getFormat();
@@ -871,37 +870,19 @@
     // before creating a new one.
     IPCThreadState::self()->flushCommands();
 
-    if (!strncmp("OMX.", component, 4)) {
-        // Our OMX codecs allocate buffers on the media_server side
-        // therefore they require a remote IOMXRenderer that knows how
-        // to display them.
-
-        sp<IOMXRenderer> native =
-            mClient.interface()->createRenderer(
-                    mISurface, component,
-                    (OMX_COLOR_FORMATTYPE)format,
-                    decodedWidth, decodedHeight,
-                    mVideoWidth, mVideoHeight,
-                    rotationDegrees);
-
-        if (native == NULL) {
-            return NO_INIT;
-        }
-
-        mVideoRenderer = new AwesomeRemoteRenderer(native);
+    if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
+        // Hardware decoders avoid the CPU color conversion by decoding
+        // directly to ANativeBuffers, so we must use a renderer that
+        // just pushes those buffers to the ANativeWindow.
+        mVideoRenderer =
+            new AwesomeNativeWindowRenderer(mSurface, rotationDegrees);
     } else {
         // Other decoders are instantiated locally and as a consequence
-        // allocate their buffers in local address space.
-        mVideoRenderer = new AwesomeLocalRenderer(
-            false,  // previewOnly
-            component,
-            (OMX_COLOR_FORMATTYPE)format,
-            mISurface,
-            mVideoWidth, mVideoHeight,
-            decodedWidth, decodedHeight, rotationDegrees);
+        // allocate their buffers in local address space.  This renderer
+        // then performs a color conversion and copy to get the data
+        // into the ANativeBuffer.
+        mVideoRenderer = new AwesomeLocalRenderer(mSurface, meta);
     }
-
-    return mVideoRenderer->initCheck();
 }
 
 status_t AwesomePlayer::pause() {
@@ -944,10 +925,10 @@
     return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
 }
 
-void AwesomePlayer::setISurface(const sp<ISurface> &isurface) {
+void AwesomePlayer::setSurface(const sp<Surface> &surface) {
     Mutex::Autolock autoLock(mLock);
 
-    mISurface = isurface;
+    mSurface = surface;
 }
 
 void AwesomePlayer::setAudioSink(
@@ -1064,20 +1045,6 @@
     }
 }
 
-status_t AwesomePlayer::getVideoDimensions(
-        int32_t *width, int32_t *height) const {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mVideoWidth < 0 || mVideoHeight < 0) {
-        return UNKNOWN_ERROR;
-    }
-
-    *width = mVideoWidth;
-    *height = mVideoHeight;
-
-    return OK;
-}
-
 void AwesomePlayer::setAudioSource(sp<MediaSource> source) {
     CHECK(source != NULL);
 
@@ -1135,7 +1102,7 @@
             mClient.interface(), mVideoTrack->getFormat(),
             false, // createEncoder
             mVideoTrack,
-            NULL, flags);
+            NULL, flags, USE_SURFACE_ALLOC ? mSurface : NULL);
 
     if (mVideoSource != NULL) {
         int64_t durationUs;
@@ -1146,9 +1113,6 @@
             }
         }
 
-        CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth));
-        CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight));
-
         status_t err = mVideoSource->start();
 
         if (err != OK) {
@@ -1166,7 +1130,7 @@
     }
 
     if (mAudioPlayer != NULL) {
-        LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+        LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
 
         // If we don't have a video time, seek audio to the originally
         // requested seek time instead.
@@ -1184,6 +1148,13 @@
     mFlags |= FIRST_FRAME;
     mSeeking = false;
     mSeekNotificationSent = false;
+
+    if (mDecryptHandle != NULL) {
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::PAUSE, 0);
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::START, videoTimeUs / 1000);
+    }
 }
 
 void AwesomePlayer::onVideoEvent() {
@@ -1196,11 +1167,6 @@
     mVideoEventPending = false;
 
     if (mSeeking) {
-        if (mLastVideoBuffer) {
-            mLastVideoBuffer->release();
-            mLastVideoBuffer = NULL;
-        }
-
         if (mVideoBuffer) {
             mVideoBuffer->release();
             mVideoBuffer = NULL;
@@ -1235,23 +1201,18 @@
             options.clearSeekTo();
 
             if (err != OK) {
-                CHECK_EQ(mVideoBuffer, NULL);
+                CHECK(mVideoBuffer == NULL);
 
                 if (err == INFO_FORMAT_CHANGED) {
                     LOGV("VideoSource signalled format change.");
 
+                    notifyVideoSize_l();
+
                     if (mVideoRenderer != NULL) {
                         mVideoRendererIsPreview = false;
-                        err = initRenderer_l();
-
-                        if (err == OK) {
-                            continue;
-                        }
-
-                        // fall through
-                    } else {
-                        continue;
+                        initRenderer_l();
                     }
+                    continue;
                 }
 
                 // So video playback is complete, but we may still have
@@ -1293,16 +1254,9 @@
 
     TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
 
-    if (mDecryptHandle != NULL) {
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::PAUSE, 0);
-        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
-                Playback::START, timeUs / 1000);
-    }
-
     if (mFlags & FIRST_FRAME) {
         mFlags &= ~FIRST_FRAME;
-
+        mSinceLastDropped = 0;
         mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
     }
 
@@ -1330,12 +1284,16 @@
     if (latenessUs > 40000) {
         // We're more than 40ms late.
         LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6);
+        if ( mSinceLastDropped > FRAME_DROP_FREQ)
+        {
+            LOGV("we're late by %lld us (%.2f secs) dropping one after %d frames", latenessUs, latenessUs / 1E6, mSinceLastDropped);
+            mSinceLastDropped = 0;
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
 
-        mVideoBuffer->release();
-        mVideoBuffer = NULL;
-
-        postVideoEvent_l();
-        return;
+            postVideoEvent_l();
+            return;
+        }
     }
 
     if (latenessUs < -10000) {
@@ -1348,26 +1306,15 @@
     if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
         mVideoRendererIsPreview = false;
 
-        status_t err = initRenderer_l();
-
-        if (err != OK) {
-            finishSeekIfNecessary(-1);
-
-            mFlags |= VIDEO_AT_EOS;
-            postStreamDoneEvent_l(err);
-            return;
-        }
+        initRenderer_l();
     }
 
     if (mVideoRenderer != NULL) {
+        mSinceLastDropped++;
         mVideoRenderer->render(mVideoBuffer);
     }
 
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-    mLastVideoBuffer = mVideoBuffer;
+    mVideoBuffer->release();
     mVideoBuffer = NULL;
 
     postVideoEvent_l();
@@ -1527,16 +1474,23 @@
         String8 uri("http://");
         uri.append(mUri.string() + 11);
 
-        sp<LiveSource> liveSource = new LiveSource(uri.string());
+        if (mLooper == NULL) {
+            mLooper = new ALooper;
+            mLooper->setName("httplive");
+            mLooper->start();
+        }
 
-        mCachedSource = new NuCachedSource2(liveSource);
-        dataSource = mCachedSource;
+        mLiveSession = new LiveSession;
+        mLooper->registerHandler(mLiveSession);
+
+        mLiveSession->connect(uri.string());
+        dataSource = mLiveSession->getDataSource();
 
         sp<MediaExtractor> extractor =
             MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);
 
         static_cast<MPEG2TSExtractor *>(extractor.get())
-            ->setLiveSource(liveSource);
+            ->setLiveSession(mLiveSession);
 
         return setDataSource_l(extractor);
     } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) {
@@ -1684,9 +1638,15 @@
     }
 
     dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient);
-    if (mDecryptHandle != NULL
-            && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
-        notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+    if (mDecryptHandle != NULL) {
+        if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) {
+            if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) {
+                LOGD("Setting mCachedSource to NULL for WVM\n");
+                mCachedSource.clear();
+            }
+        } else {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+        }
     }
 
     return setDataSource_l(extractor);
@@ -1700,7 +1660,7 @@
     }
 
     mPrepareResult = err;
-    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
 }
@@ -1748,6 +1708,8 @@
         }
     }
 
+    mFlags |= PREPARING_CONNECTED;
+
     if (mCachedSource != NULL || mRTSPController != NULL) {
         postBufferingEvent_l();
     } else {
@@ -1757,175 +1719,22 @@
 
 void AwesomePlayer::finishAsyncPrepare_l() {
     if (mIsAsyncPrepare) {
-        if (mVideoWidth < 0 || mVideoHeight < 0) {
+        if (mVideoSource == NULL) {
             notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
         } else {
-            int32_t rotationDegrees;
-            if (!mVideoTrack->getFormat()->findInt32(
-                        kKeyRotation, &rotationDegrees)) {
-                rotationDegrees = 0;
-            }
-
-#if 1
-            if (rotationDegrees == 90 || rotationDegrees == 270) {
-                notifyListener_l(
-                        MEDIA_SET_VIDEO_SIZE, mVideoHeight, mVideoWidth);
-            } else
-#endif
-            {
-                notifyListener_l(
-                        MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight);
-            }
+            notifyVideoSize_l();
         }
 
         notifyListener_l(MEDIA_PREPARED);
     }
 
     mPrepareResult = OK;
-    mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
     mFlags |= PREPARED;
     mAsyncPrepareEvent = NULL;
     mPreparedCondition.broadcast();
 }
 
-status_t AwesomePlayer::suspend() {
-    LOGV("suspend");
-    Mutex::Autolock autoLock(mLock);
-
-    if (mSuspensionState != NULL) {
-        if (mLastVideoBuffer == NULL) {
-            //go into here if video is suspended again
-            //after resuming without being played between
-            //them
-            SuspensionState *state = mSuspensionState;
-            mSuspensionState = NULL;
-            reset_l();
-            mSuspensionState = state;
-            return OK;
-        }
-
-        delete mSuspensionState;
-        mSuspensionState = NULL;
-    }
-
-    if (mFlags & PREPARING) {
-        mFlags |= PREPARE_CANCELLED;
-        if (mConnectingDataSource != NULL) {
-            LOGI("interrupting the connection process");
-            mConnectingDataSource->disconnect();
-        }
-    }
-
-    while (mFlags & PREPARING) {
-        mPreparedCondition.wait(mLock);
-    }
-
-    SuspensionState *state = new SuspensionState;
-    state->mUri = mUri;
-    state->mUriHeaders = mUriHeaders;
-    state->mFileSource = mFileSource;
-
-    state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS);
-    getPosition(&state->mPositionUs);
-
-    if (mLastVideoBuffer) {
-        size_t size = mLastVideoBuffer->range_length();
-
-        if (size) {
-            int32_t unreadable;
-            if (!mLastVideoBuffer->meta_data()->findInt32(
-                        kKeyIsUnreadable, &unreadable)
-                    || unreadable == 0) {
-                state->mLastVideoFrameSize = size;
-                state->mLastVideoFrame = malloc(size);
-                memcpy(state->mLastVideoFrame,
-                       (const uint8_t *)mLastVideoBuffer->data()
-                            + mLastVideoBuffer->range_offset(),
-                       size);
-
-                state->mVideoWidth = mVideoWidth;
-                state->mVideoHeight = mVideoHeight;
-
-                sp<MetaData> meta = mVideoSource->getFormat();
-                CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat));
-                CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth));
-                CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight));
-            } else {
-                LOGV("Unable to save last video frame, we have no access to "
-                     "the decoded video data.");
-            }
-        }
-    }
-
-    reset_l();
-
-    mSuspensionState = state;
-
-    return OK;
-}
-
-status_t AwesomePlayer::resume() {
-    LOGV("resume");
-    Mutex::Autolock autoLock(mLock);
-
-    if (mSuspensionState == NULL) {
-        return INVALID_OPERATION;
-    }
-
-    SuspensionState *state = mSuspensionState;
-    mSuspensionState = NULL;
-
-    status_t err;
-    if (state->mFileSource != NULL) {
-        err = setDataSource_l(state->mFileSource);
-
-        if (err == OK) {
-            mFileSource = state->mFileSource;
-        }
-    } else {
-        err = setDataSource_l(state->mUri, &state->mUriHeaders);
-    }
-
-    if (err != OK) {
-        delete state;
-        state = NULL;
-
-        return err;
-    }
-
-    seekTo_l(state->mPositionUs);
-
-    mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
-
-    if (state->mLastVideoFrame && mISurface != NULL) {
-        mVideoRenderer =
-            new AwesomeLocalRenderer(
-                    true,  // previewOnly
-                    "",
-                    (OMX_COLOR_FORMATTYPE)state->mColorFormat,
-                    mISurface,
-                    state->mVideoWidth,
-                    state->mVideoHeight,
-                    state->mDecodedWidth,
-                    state->mDecodedHeight,
-                    0);
-
-        mVideoRendererIsPreview = true;
-
-        ((AwesomeLocalRenderer *)mVideoRenderer.get())->render(
-                state->mLastVideoFrame, state->mLastVideoFrameSize);
-    }
-
-    if (state->mFlags & PLAYING) {
-        play_l();
-    }
-
-    mSuspensionState = state;
-    state = NULL;
-
-    return OK;
-}
-
 uint32_t AwesomePlayer::flags() const {
     return mExtractorFlags;
 }
@@ -1939,4 +1748,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 89cb135..b1c6b18 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,6 +27,7 @@
 #include <media/stagefright/MetaData.h>
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
+#include <surfaceflinger/Surface.h>
 #include <utils/String8.h>
 #include <cutils/properties.h>
 
@@ -65,6 +66,11 @@
 void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
     LOGV("postData(%d, ptr:%p, size:%d)",
          msgType, dataPtr->pointer(), dataPtr->size());
+
+    sp<CameraSource> source = mSource.promote();
+    if (source.get() != NULL) {
+        source->dataCallback(msgType, dataPtr);
+    }
 }
 
 void CameraSourceListener::postDataTimestamp(
@@ -77,6 +83,10 @@
 }
 
 static int32_t getColorFormat(const char* colorFormat) {
+    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+       return OMX_COLOR_FormatYUV420Planar;
+    }
+
     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
        return OMX_COLOR_FormatYUV422SemiPlanar;
     }
@@ -99,72 +109,409 @@
     CHECK_EQ(0, "Unknown color format");
 }
 
-// static
 CameraSource *CameraSource::Create() {
-    sp<Camera> camera = Camera::connect(0);
+    Size size;
+    size.width = -1;
+    size.height = -1;
 
-    if (camera.get() == NULL) {
-        return NULL;
-    }
-
-    return new CameraSource(camera);
+    sp<ICamera> camera;
+    return new CameraSource(camera, 0, size, -1, NULL, false);
 }
 
 // static
-CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
-    if (camera.get() == NULL) {
-        return NULL;
-    }
+CameraSource *CameraSource::CreateFromCamera(
+    const sp<ICamera>& camera,
+    int32_t cameraId,
+    Size videoSize,
+    int32_t frameRate,
+    const sp<Surface>& surface,
+    bool storeMetaDataInVideoBuffers) {
 
-    return new CameraSource(camera);
+    CameraSource *source = new CameraSource(camera, cameraId,
+                    videoSize, frameRate, surface,
+                    storeMetaDataInVideoBuffers);
+    return source;
 }
 
-CameraSource::CameraSource(const sp<Camera> &camera)
-    : mCamera(camera),
-      mFirstFrameTimeUs(0),
-      mLastFrameTimestampUs(0),
+CameraSource::CameraSource(
+    const sp<ICamera>& camera,
+    int32_t cameraId,
+    Size videoSize,
+    int32_t frameRate,
+    const sp<Surface>& surface,
+    bool storeMetaDataInVideoBuffers)
+    : mCameraFlags(0),
+      mVideoFrameRate(-1),
+      mCamera(0),
+      mSurface(surface),
       mNumFramesReceived(0),
+      mLastFrameTimestampUs(0),
+      mStarted(false),
+      mFirstFrameTimeUs(0),
       mNumFramesEncoded(0),
       mNumFramesDropped(0),
       mNumGlitches(0),
       mGlitchDurationThresholdUs(200000),
-      mCollectStats(false),
-      mStarted(false) {
+      mCollectStats(false) {
 
+    mVideoSize.width  = -1;
+    mVideoSize.height = -1;
+
+    mInitCheck = init(camera, cameraId,
+                    videoSize, frameRate,
+                    storeMetaDataInVideoBuffers);
+}
+
+status_t CameraSource::initCheck() const {
+    return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+    const sp<ICamera>& camera, int32_t cameraId) {
+
+    if (camera == 0) {
+        mCamera = Camera::connect(cameraId);
+        mCameraFlags &= ~FLAGS_HOT_CAMERA;
+    } else {
+        mCamera = Camera::create(camera);
+        mCameraFlags |= FLAGS_HOT_CAMERA;
+    }
+
+    // Is camera available?
+    if (mCamera == 0) {
+        LOGE("Camera connection could not be established.");
+        return -EBUSY;
+    }
+    if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
+        mCamera->lock();
+    }
+    return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+    int32_t width, int32_t height,
+    const Vector<Size>& supportedSizes) {
+
+    LOGV("isVideoSizeSupported");
+    for (size_t i = 0; i < supportedSizes.size(); ++i) {
+        if (width  == supportedSizes[i].width &&
+            height == supportedSizes[i].height) {
+            return true;
+        }
+    }
+    return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ *      CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ *      supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+    const CameraParameters& params,
+    bool *isSetVideoSizeSupported,
+    Vector<Size>& sizes) {
+
+    *isSetVideoSizeSupported = true;
+    params.getSupportedVideoSizes(sizes);
+    if (sizes.size() == 0) {
+        LOGD("Camera does not support setVideoSize()");
+        params.getSupportedPreviewSizes(sizes);
+        *isSetVideoSizeSupported = false;
+    }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+        const CameraParameters& params) {
+    mColorFormat = getColorFormat(params.get(
+            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+    if (mColorFormat == -1) {
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+        CameraParameters* params,
+        int32_t width, int32_t height,
+        int32_t frameRate) {
+
+    Vector<Size> sizes;
+    bool isSetVideoSizeSupportedByCamera = true;
+    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+    bool isCameraParamChanged = false;
+    if (width != -1 && height != -1) {
+        if (!isVideoSizeSupported(width, height, sizes)) {
+            LOGE("Video dimension (%dx%d) is unsupported", width, height);
+            releaseCamera();
+            return BAD_VALUE;
+        }
+        if (isSetVideoSizeSupportedByCamera) {
+            params->setVideoSize(width, height);
+        } else {
+            params->setPreviewSize(width, height);
+        }
+        isCameraParamChanged = true;
+    } else if ((width == -1 && height != -1) ||
+               (width != -1 && height == -1)) {
+        // If one and only one of the width and height is -1
+        // we reject such a request.
+        LOGE("Requested video size (%dx%d) is not supported", width, height);
+        releaseCamera();
+        return BAD_VALUE;
+    } else {  // width == -1 && height == -1
+        // Do not configure the camera.
+        // Use the current width and height value setting from the camera.
+    }
+
+    if (frameRate != -1) {
+        CHECK(frameRate > 0 && frameRate <= 120);
+        const char* supportedFrameRates =
+                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+        CHECK(supportedFrameRates != NULL);
+        LOGV("Supported frame rates: %s", supportedFrameRates);
+        char buf[4];
+        snprintf(buf, 4, "%d", frameRate);
+        if (strstr(supportedFrameRates, buf) == NULL) {
+            LOGE("Requested frame rate (%d) is not supported: %s",
+                frameRate, supportedFrameRates);
+            releaseCamera();
+            return BAD_VALUE;
+        }
+
+        // The frame rate is supported, set the camera to the requested value.
+        params->setPreviewFrameRate(frameRate);
+        isCameraParamChanged = true;
+    } else {  // frameRate == -1
+        // Do not configure the camera.
+        // Use the current frame rate value setting from the camera
+    }
+
+    if (isCameraParamChanged) {
+        // Either frame rate or frame size needs to be changed.
+        String8 s = params->flatten();
+        if (OK != mCamera->setParameters(s)) {
+            LOGE("Could not change settings."
+                 " Someone else is using camera %p?", mCamera.get());
+            return -EBUSY;
+        }
+    }
+    return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+        const CameraParameters& params,
+        int32_t width, int32_t height) {
+
+    // The actual video size is the same as the preview size
+    // if the camera hal does not support separate video and
+    // preview output. In this case, we retrieve the video
+    // size from preview.
+    int32_t frameWidthActual = -1;
+    int32_t frameHeightActual = -1;
+    Vector<Size> sizes;
+    params.getSupportedVideoSizes(sizes);
+    if (sizes.size() == 0) {
+        // video size is the same as preview size
+        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+    } else {
+        // video size may not be the same as preview
+        params.getVideoSize(&frameWidthActual, &frameHeightActual);
+    }
+    if (frameWidthActual < 0 || frameHeightActual < 0) {
+        LOGE("Failed to retrieve video frame size (%dx%d)",
+                frameWidthActual, frameHeightActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Check the actual video frame size against the target/requested
+    // video frame size.
+    if (width != -1 && height != -1) {
+        if (frameWidthActual != width || frameHeightActual != height) {
+            LOGE("Failed to set video frame size to %dx%d. "
+                    "The actual video size is %dx%d ", width, height,
+                    frameWidthActual, frameHeightActual);
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    // Good now.
+    mVideoSize.width = frameWidthActual;
+    mVideoSize.height = frameHeightActual;
+    return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+        const CameraParameters& params,
+        int32_t frameRate) {
+
+    int32_t frameRateActual = params.getPreviewFrameRate();
+    if (frameRateActual < 0) {
+        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Check the actual video frame rate against the target/requested
+    // video frame rate.
+    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+        LOGE("Failed to set preview frame rate to %d fps. The actual "
+                "frame rate is %d", frameRate, frameRateActual);
+        return UNKNOWN_ERROR;
+    }
+
+    // Good now.
+    mVideoFrameRate = frameRateActual;
+    return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ *      as the video source
+ * @param videoSize the target video frame size. If both
+ *      width and height in videoSize is -1, use the current
+ *      width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ *      if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ *      data or real YUV data in video buffers. Request to
+ *      store meta data in video buffers may not be honored
+ *      if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+        const sp<ICamera>& camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t frameRate,
+        bool storeMetaDataInVideoBuffers) {
+
+    status_t err = OK;
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    String8 s = mCamera->getParameters();
+
+    if ((err  = isCameraAvailable(camera, cameraId)) != OK) {
+        return err;
+    }
+    CameraParameters params(mCamera->getParameters());
+    if ((err = isCameraColorFormatSupported(params)) != OK) {
+        return err;
+    }
+
+    // Set the camera to use the requested video frame size
+    // and/or frame rate.
+    if ((err = configureCamera(&params,
+                    videoSize.width, videoSize.height,
+                    frameRate))) {
+        return err;
+    }
+
+    // Check on video frame size and frame rate.
+    CameraParameters newCameraParams(mCamera->getParameters());
+    if ((err = checkVideoSize(newCameraParams,
+                videoSize.width, videoSize.height)) != OK) {
+        return err;
+    }
+    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+        return err;
+    }
+
+    // This CHECK is good, since we just passed the lock/unlock
+    // check earlier by calling mCamera->setParameters().
+    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
+
+    // By default, do not store metadata in video buffers
+    mIsMetaDataStoredInVideoBuffers = false;
+    mCamera->storeMetaDataInBuffers(false);
+    if (storeMetaDataInVideoBuffers) {
+        if (OK == mCamera->storeMetaDataInBuffers(true)) {
+            mIsMetaDataStoredInVideoBuffers = true;
+        }
+    }
+
     IPCThreadState::self()->restoreCallingIdentity(token);
 
-    printf("params: \"%s\"\n", s.string());
-
-    int32_t width, height, stride, sliceHeight;
-    CameraParameters params(s);
-    params.getPreviewSize(&width, &height);
-
-    // Calculate glitch duraton threshold based on frame rate
-    int32_t frameRate = params.getPreviewFrameRate();
-    int64_t glitchDurationUs = (1000000LL / frameRate);
+    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
     if (glitchDurationUs > mGlitchDurationThresholdUs) {
         mGlitchDurationThresholdUs = glitchDurationUs;
     }
 
-    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
-    CHECK(colorFormatStr != NULL);
-    int32_t colorFormat = getColorFormat(colorFormatStr);
-
     // XXX: query camera for the stride and slice height
     // when the capability becomes available.
-    stride = width;
-    sliceHeight = height;
-
     mMeta = new MetaData;
-    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
-    mMeta->setInt32(kKeyColorFormat, colorFormat);
-    mMeta->setInt32(kKeyWidth, width);
-    mMeta->setInt32(kKeyHeight, height);
-    mMeta->setInt32(kKeyStride, stride);
-    mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
+    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
+    mMeta->setInt32(kKeyColorFormat, mColorFormat);
+    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
+    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
+    mMeta->setInt32(kKeyStride,      mVideoSize.width);
+    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
+    return OK;
 }
 
 CameraSource::~CameraSource() {
@@ -173,8 +520,17 @@
     }
 }
 
+void CameraSource::startCameraRecording() {
+    CHECK_EQ(OK, mCamera->startRecording());
+    CHECK(mCamera->recordingEnabled());
+}
+
 status_t CameraSource::start(MetaData *meta) {
     CHECK(!mStarted);
+    if (mInitCheck != OK) {
+        LOGE("CameraSource is not initialized yet");
+        return mInitCheck;
+    }
 
     char value[PROPERTY_VALUE_MAX];
     if (property_get("media.stagefright.record-stats", value, NULL)
@@ -188,31 +544,51 @@
         mStartTimeUs = startTimeUs;
     }
 
+    // Call setListener first before calling startCameraRecording()
+    // to avoid recording frames being dropped.
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
     mCamera->setListener(new CameraSourceListener(this));
-    CHECK_EQ(OK, mCamera->startRecording());
+    startCameraRecording();
     IPCThreadState::self()->restoreCallingIdentity(token);
 
     mStarted = true;
     return OK;
 }
 
+void CameraSource::stopCameraRecording() {
+    mCamera->setListener(NULL);
+    mCamera->stopRecording();
+}
+
+void CameraSource::releaseCamera() {
+    LOGV("releaseCamera");
+    if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+        LOGV("Camera was cold when we started, stopping preview");
+        mCamera->stopPreview();
+    }
+    mCamera->unlock();
+    mCamera.clear();
+    mCamera = 0;
+    mCameraFlags = 0;
+}
+
 status_t CameraSource::stop() {
-    LOGV("stop");
+    LOGD("stop: E");
     Mutex::Autolock autoLock(mLock);
     mStarted = false;
     mFrameAvailableCondition.signal();
 
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mCamera->setListener(NULL);
-    mCamera->stopRecording();
     releaseQueuedFrames();
     while (!mFramesBeingEncoded.empty()) {
-        LOGI("Waiting for outstanding frames being encoded: %d",
+        if (NO_ERROR !=
+            mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) {
+            LOGW("Timed out waiting for outstanding frames being encoded: %d",
                 mFramesBeingEncoded.size());
-        mFrameCompleteCondition.wait(mLock);
+        }
     }
-    mCamera = NULL;
+    stopCameraRecording();
+    releaseCamera();
     IPCThreadState::self()->restoreCallingIdentity(token);
 
     if (mCollectStats) {
@@ -221,15 +597,27 @@
                 mLastFrameTimestampUs - mFirstFrameTimeUs);
     }
 
+    if (mNumGlitches > 0) {
+        LOGW("%d long delays between neighboring video frames during",
+                mNumGlitches);
+    }
+
     CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+    LOGD("stop: X");
     return OK;
 }
 
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+    if (mCamera != NULL) {
+        mCamera->releaseRecordingFrame(frame);
+    }
+}
+
 void CameraSource::releaseQueuedFrames() {
     List<sp<IMemory> >::iterator it;
     while (!mFramesReceived.empty()) {
         it = mFramesReceived.begin();
-        mCamera->releaseRecordingFrame(*it);
+        releaseRecordingFrame(*it);
         mFramesReceived.erase(it);
         ++mNumFramesDropped;
     }
@@ -241,7 +629,7 @@
 
 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
     int64_t token = IPCThreadState::self()->clearCallingIdentity();
-    mCamera->releaseRecordingFrame(frame);
+    releaseRecordingFrame(frame);
     IPCThreadState::self()->restoreCallingIdentity(token);
 }
 
@@ -251,7 +639,6 @@
     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
          it != mFramesBeingEncoded.end(); ++it) {
         if ((*it)->pointer() ==  buffer->data()) {
-
             releaseOneRecordingFrame((*it));
             mFramesBeingEncoded.erase(it);
             ++mNumFramesEncoded;
@@ -281,45 +668,26 @@
 
     {
         Mutex::Autolock autoLock(mLock);
-        while (mStarted) {
-            while(mFramesReceived.empty()) {
-                mFrameAvailableCondition.wait(mLock);
-            }
-
-            if (!mStarted) {
-                return OK;
-            }
-
-            frame = *mFramesReceived.begin();
-            mFramesReceived.erase(mFramesReceived.begin());
-
-            frameTime = *mFrameTimes.begin();
-            mFrameTimes.erase(mFrameTimes.begin());
-            int64_t skipTimeUs;
-            if (!options || !options->getSkipFrame(&skipTimeUs)) {
-                skipTimeUs = frameTime;
-            }
-            if (skipTimeUs > frameTime) {
-                LOGV("skipTimeUs: %lld us > frameTime: %lld us",
-                    skipTimeUs, frameTime);
-                releaseOneRecordingFrame(frame);
-                ++mNumFramesDropped;
-                // Safeguard against the abuse of the kSkipFrame_Option.
-                if (skipTimeUs - frameTime >= 1E6) {
-                    LOGE("Frame skipping requested is way too long: %lld us",
-                        skipTimeUs - frameTime);
-                    return UNKNOWN_ERROR;
-                }
-            } else {
-                mFramesBeingEncoded.push_back(frame);
-                *buffer = new MediaBuffer(frame->pointer(), frame->size());
-                (*buffer)->setObserver(this);
-                (*buffer)->add_ref();
-                (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
-
-                return OK;
+        while (mStarted && mFramesReceived.empty()) {
+            if (NO_ERROR !=
+                mFrameAvailableCondition.waitRelative(mLock, 3000000000LL)) {
+                LOGW("Timed out waiting for incoming camera video frames: %lld us",
+                    mLastFrameTimestampUs);
             }
         }
+        if (!mStarted) {
+            return OK;
+        }
+        frame = *mFramesReceived.begin();
+        mFramesReceived.erase(mFramesReceived.begin());
+
+        frameTime = *mFrameTimes.begin();
+        mFrameTimes.erase(mFrameTimes.begin());
+        mFramesBeingEncoded.push_back(frame);
+        *buffer = new MediaBuffer(frame->pointer(), frame->size());
+        (*buffer)->setObserver(this);
+        (*buffer)->add_ref();
+        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
     }
     return OK;
 }
@@ -338,11 +706,18 @@
     if (mNumFramesReceived > 0 &&
         timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
         if (mNumGlitches % 10 == 0) {  // Don't spam the log
-            LOGW("Long delay detected in video recording");
+            LOGV("Long delay detected in video recording");
         }
         ++mNumGlitches;
     }
 
+    // May need to skip frame or modify timestamp. Currently implemented
+    // by the subclass CameraSourceTimeLapse.
+    if (skipCurrentFrame(timestampUs)) {
+        releaseOneRecordingFrame(data);
+        return;
+    }
+
     mLastFrameTimestampUs = timestampUs;
     if (mNumFramesReceived == 0) {
         mFirstFrameTimeUs = timestampUs;
@@ -367,4 +742,31 @@
     mFrameAvailableCondition.signal();
 }
 
+size_t CameraSource::getNumberOfVideoBuffers() const {
+    LOGV("getNumberOfVideoBuffers");
+    size_t nBuffers = 0;
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    if (mInitCheck == OK && mCamera != 0) {
+        nBuffers = mCamera->getNumberOfVideoBuffers();
+    }
+    IPCThreadState::self()->restoreCallingIdentity(token);
+    return nBuffers;
+}
+
+sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
+    LOGV("getVideoBuffer: %d", index);
+    sp<IMemory> buffer = 0;
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    if (mInitCheck == OK && mCamera != 0) {
+        buffer = mCamera->getVideoBuffer(index);
+    }
+    IPCThreadState::self()->restoreCallingIdentity(token);
+    return buffer;
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+    LOGV("isMetaDataStoredInVideoBuffers");
+    return mIsMetaDataStoredInVideoBuffers;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..31b6ec9
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,536 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include "OMX_Video.h"
+#include <limits.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
+        const sp<ICamera> &camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs) {
+
+    CameraSourceTimeLapse *source = new
+            CameraSourceTimeLapse(camera, cameraId,
+                videoSize, videoFrameRate, surface,
+                timeBetweenTimeLapseFrameCaptureUs);
+
+    if (source != NULL) {
+        if (source->initCheck() != OK) {
+            delete source;
+            return NULL;
+        }
+    }
+    return source;
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(
+        const sp<ICamera>& camera,
+        int32_t cameraId,
+        Size videoSize,
+        int32_t videoFrameRate,
+        const sp<Surface>& surface,
+        int64_t timeBetweenTimeLapseFrameCaptureUs)
+    : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
+      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+      mLastTimeLapseFrameRealTimestampUs(0),
+      mSkipCurrentFrame(false) {
+
+    LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs);
+    mVideoWidth = videoSize.width;
+    mVideoHeight = videoSize.height;
+
+    if (trySettingVideoSize(videoSize.width, videoSize.height)) {
+        mUseStillCameraForTimeLapse = false;
+    } else {
+        // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
+        // than the fastest rate at which the still camera can take pictures.
+        mUseStillCameraForTimeLapse = true;
+        CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
+        mNeedCropping = computeCropRectangleOffset();
+        mMeta->setInt32(kKeyWidth, videoSize.width);
+        mMeta->setInt32(kKeyHeight, videoSize.height);
+    }
+
+    // Initialize quick stop variables.
+    mQuickStop = false;
+    mForceRead = false;
+    mLastReadBufferCopy = NULL;
+    mStopWaitingForIdleCamera = false;
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+void CameraSourceTimeLapse::startQuickReadReturns() {
+    Mutex::Autolock autoLock(mQuickStopLock);
+    LOGV("Enabling quick read returns");
+
+    // Enable quick stop mode.
+    mQuickStop = true;
+
+    if (mUseStillCameraForTimeLapse) {
+        // wake up the thread right away.
+        mTakePictureCondition.signal();
+    } else {
+        // Force dataCallbackTimestamp() coming from the video camera to not skip the
+        // next frame as we want read() to get a get a frame right away.
+        mForceRead = true;
+    }
+}
+
+bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) {
+    LOGV("trySettingVideoSize: %dx%d", width, height);
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    String8 s = mCamera->getParameters();
+
+    CameraParameters params(s);
+    Vector<Size> supportedSizes;
+    params.getSupportedVideoSizes(supportedSizes);
+    bool videoOutputSupported = false;
+    if (supportedSizes.size() == 0) {
+        params.getSupportedPreviewSizes(supportedSizes);
+    } else {
+        videoOutputSupported = true;
+    }
+
+    bool videoSizeSupported = false;
+    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+        int32_t pictureWidth = supportedSizes[i].width;
+        int32_t pictureHeight = supportedSizes[i].height;
+
+        if ((pictureWidth == width) && (pictureHeight == height)) {
+            videoSizeSupported = true;
+        }
+    }
+
+    bool isSuccessful = false;
+    if (videoSizeSupported) {
+        LOGV("Video size (%d, %d) is supported", width, height);
+        if (videoOutputSupported) {
+            params.setVideoSize(width, height);
+        } else {
+            params.setPreviewSize(width, height);
+        }
+        if (mCamera->setParameters(params.flatten()) == OK) {
+            isSuccessful = true;
+        } else {
+            LOGE("Failed to set preview size to %dx%d", width, height);
+            isSuccessful = false;
+        }
+    }
+
+    IPCThreadState::self()->restoreCallingIdentity(token);
+    return isSuccessful;
+}
+
+bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+    LOGV("setPictureSizeToClosestSupported: %dx%d", width, height);
+    int64_t token = IPCThreadState::self()->clearCallingIdentity();
+    String8 s = mCamera->getParameters();
+    IPCThreadState::self()->restoreCallingIdentity(token);
+
+    CameraParameters params(s);
+    Vector<Size> supportedSizes;
+    params.getSupportedPictureSizes(supportedSizes);
+
+    int32_t minPictureSize = INT_MAX;
+    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+        int32_t pictureWidth = supportedSizes[i].width;
+        int32_t pictureHeight = supportedSizes[i].height;
+
+        if ((pictureWidth >= width) && (pictureHeight >= height)) {
+            int32_t pictureSize = pictureWidth*pictureHeight;
+            if (pictureSize < minPictureSize) {
+                minPictureSize = pictureSize;
+                mPictureWidth = pictureWidth;
+                mPictureHeight = pictureHeight;
+            }
+        }
+    }
+    LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
+    return (minPictureSize != INT_MAX);
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+        return false;
+    }
+
+    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+    int32_t widthDifference = mPictureWidth - mVideoWidth;
+    int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+    mCropRectStartX = widthDifference/2;
+    mCropRectStartY = heightDifference/2;
+
+    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+    return true;
+}
+
+void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+    Mutex::Autolock autoLock(mQuickStopLock);
+    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+        buffer->setObserver(NULL);
+        buffer->release();
+    } else {
+        return CameraSource::signalBufferReturned(buffer);
+    }
+}
+
+void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+    size_t sourceSize = sourceBuffer.size();
+    void* sourcePointer = sourceBuffer.data();
+
+    (*newBuffer) = new MediaBuffer(sourceSize);
+    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
+
+    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
+}
+
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+    int64_t frameTime;
+    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
+    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+    mLastReadBufferCopy->add_ref();
+    mLastReadBufferCopy->setObserver(this);
+}
+
+status_t CameraSourceTimeLapse::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    if (mLastReadBufferCopy == NULL) {
+        mLastReadStatus = CameraSource::read(buffer, options);
+
+        // mQuickStop may have turned to true while read was blocked. Make a copy of
+        // the buffer in that case.
+        Mutex::Autolock autoLock(mQuickStopLock);
+        if (mQuickStop && *buffer) {
+            fillLastReadBufferCopy(**buffer);
+        }
+        return mLastReadStatus;
+    } else {
+        (*buffer) = mLastReadBufferCopy;
+        (*buffer)->add_ref();
+        return mLastReadStatus;
+    }
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+    source->threadTimeLapseEntry();
+    return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+    while (mStarted) {
+        {
+            Mutex::Autolock autoLock(mCameraIdleLock);
+            if (!mCameraIdle) {
+                mCameraIdleCondition.wait(mCameraIdleLock);
+            }
+            CHECK(mCameraIdle);
+            mCameraIdle = false;
+        }
+
+        // Even if mQuickStop == true we need to take one more picture
+        // as a read() may be blocked, waiting for a frame to get available.
+        // After this takePicture, if mQuickStop == true, we can safely exit
+        // this thread as read() will make a copy of this last frame and keep
+        // returning it in the quick stop mode.
+        Mutex::Autolock autoLock(mQuickStopLock);
+        CHECK_EQ(OK, mCamera->takePicture());
+        if (mQuickStop) {
+            LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
+            return;
+        }
+        mTakePictureCondition.waitRelative(mQuickStopLock,
+                mTimeBetweenTimeLapseFrameCaptureUs * 1000);
+    }
+    LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+    if (mUseStillCameraForTimeLapse) {
+        LOGV("start time lapse recording using still camera");
+
+        int64_t token = IPCThreadState::self()->clearCallingIdentity();
+        String8 s = mCamera->getParameters();
+
+        CameraParameters params(s);
+        params.setPictureSize(mPictureWidth, mPictureHeight);
+        mCamera->setParameters(params.flatten());
+        mCameraIdle = true;
+        mStopWaitingForIdleCamera = false;
+
+        // disable shutter sound and play the recording sound.
+        mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
+        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+        IPCThreadState::self()->restoreCallingIdentity(token);
+
+        // create a thread which takes pictures in a loop
+        pthread_attr_t attr;
+        pthread_attr_init(&attr);
+        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+        pthread_attr_destroy(&attr);
+    } else {
+        LOGV("start time lapse recording using video camera");
+        CHECK_EQ(OK, mCamera->startRecording());
+    }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+    if (mUseStillCameraForTimeLapse) {
+        void *dummy;
+        pthread_join(mThreadTimeLapse, &dummy);
+
+        // Last takePicture may still be underway. Wait for the camera to get
+        // idle.
+        Mutex::Autolock autoLock(mCameraIdleLock);
+        mStopWaitingForIdleCamera = true;
+        if (!mCameraIdle) {
+            mCameraIdleCondition.wait(mCameraIdleLock);
+        }
+        CHECK(mCameraIdle);
+        mCamera->setListener(NULL);
+
+        // play the recording sound.
+        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+    } else {
+        mCamera->setListener(NULL);
+        mCamera->stopRecording();
+    }
+    if (mLastReadBufferCopy) {
+        mLastReadBufferCopy->release();
+        mLastReadBufferCopy = NULL;
+    }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+    if (!mUseStillCameraForTimeLapse &&
+        mCamera != NULL) {
+        mCamera->releaseRecordingFrame(frame);
+    }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+    size_t source_size = source_data->size();
+    void* source_pointer = source_data->pointer();
+
+    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+    memcpy(newMemory->pointer(), source_pointer, source_size);
+    return newMemory;
+}
+
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+    return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+    source->threadStartPreview();
+    return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+    CHECK_EQ(OK, mCamera->startPreview());
+    Mutex::Autolock autoLock(mCameraIdleLock);
+    mCameraIdle = true;
+    mCameraIdleCondition.signal();
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+    // Start this in a different thread, so that the dataCallback can return
+    LOGV("restartPreview");
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+    pthread_t threadPreview;
+    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+    pthread_attr_destroy(&attr);
+}
+
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+    // find the YUV format
+    int32_t srcFormat;
+    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+    YUVImage::YUVFormat yuvFormat;
+    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+        yuvFormat = YUVImage::YUV420SemiPlanar;
+    } else {
+        CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
+        yuvFormat = YUVImage::YUV420Planar;
+    }
+
+    // allocate memory for cropped image and setup a canvas using it.
+    sp<IMemory> croppedImageMemory = allocateIMemory(
+            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+    YUVImage yuvImageCropped(yuvFormat,
+            mVideoWidth, mVideoHeight,
+            (uint8_t *)croppedImageMemory->pointer());
+    YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+    YUVImage yuvImageSource(yuvFormat,
+            mPictureWidth, mPictureHeight,
+            (uint8_t *)source_data->pointer());
+    yuvCanvasCrop.CopyImageRect(
+            Rect(mCropRectStartX, mCropRectStartY,
+                mCropRectStartX + mVideoWidth,
+                mCropRectStartY + mVideoHeight),
+            0, 0,
+            yuvImageSource);
+
+    return croppedImageMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+        // takePicture will complete after this callback, so restart preview.
+        restartPreview();
+        return;
+    }
+    if (msgType != CAMERA_MSG_RAW_IMAGE) {
+        return;
+    }
+
+    LOGV("dataCallback for timelapse still frame");
+    CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+    int64_t timestampUs;
+    if (mNumFramesReceived == 0) {
+        timestampUs = mStartTimeUs;
+    } else {
+        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+    }
+
+    if (mNeedCropping) {
+        sp<IMemory> croppedImageData = cropYUVImage(data);
+        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+    } else {
+        sp<IMemory> dataCopy = createIMemoryCopy(data);
+        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+    }
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+    if (mSkipCurrentFrame) {
+        mSkipCurrentFrame = false;
+        return true;
+    } else {
+        return false;
+    }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+    if (!mUseStillCameraForTimeLapse) {
+        if (mLastTimeLapseFrameRealTimestampUs == 0) {
+            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+            // to current time (timestampUs) and save frame data.
+            LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+            return false;
+        }
+
+        {
+            Mutex::Autolock autoLock(mQuickStopLock);
+
+            // mForceRead may be set to true by startQuickReadReturns(). In that
+            // case don't skip this frame.
+            if (mForceRead) {
+                LOGV("dataCallbackTimestamp timelapse: forced read");
+                mForceRead = false;
+                *timestampUs = mLastFrameTimestampUs;
+                return false;
+            }
+        }
+
+        if (*timestampUs <
+                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+            // Skip all frames from last encoded frame until
+            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+            // Tell the camera to release its recording frame and return.
+            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+            return true;
+        } else {
+            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+            // of the last encoded frame's time stamp.
+            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+            return false;
+        }
+    }
+    return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+            const sp<IMemory> &data) {
+    if (!mUseStillCameraForTimeLapse) {
+        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
+    } else {
+        Mutex::Autolock autoLock(mCameraIdleLock);
+        // If we are using the still camera and stop() has been called, it may
+        // be waiting for the camera to get idle. In that case return
+        // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
+        // to a deadlock since it tries to access CameraSource::mLock which in
+        // this case is held by CameraSource::stop() currently waiting for the
+        // camera to get idle. And camera will not get idle until this call
+        // returns.
+        if (mStopWaitingForIdleCamera) {
+            return;
+        }
+    }
+    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp
index aa9ad23..3c98932 100644
--- a/media/libstagefright/DRMExtractor.cpp
+++ b/media/libstagefright/DRMExtractor.cpp
@@ -280,18 +280,23 @@
         if (gDrmManagerClient == NULL) {
             gDrmManagerClient = new DrmManagerClient();
         }
+
+        if (gDrmManagerClient == NULL) {
+            return false;
+        }
     }
 
     DecryptHandle *decryptHandle = source->DrmInitialization(gDrmManagerClient);
 
     if (decryptHandle != NULL) {
         if (decryptHandle->decryptApiType == DecryptApiType::CONTAINER_BASED) {
-            *mimeType = String8("drm+container_based+");
+            *mimeType = String8("drm+container_based+") + decryptHandle->mimeType;
         } else if (decryptHandle->decryptApiType == DecryptApiType::ELEMENTARY_STREAM_BASED) {
-            *mimeType = String8("drm+es_based+");
+            *mimeType = String8("drm+es_based+") + decryptHandle->mimeType;
+        } else if (decryptHandle->decryptApiType == DecryptApiType::WV_BASED) {
+            *mimeType = MEDIA_MIMETYPE_CONTAINER_WVM;
+            LOGW("SniffWVM: found match\n");
         }
-
-        *mimeType += decryptHandle->mimeType;
         *confidence = 10.0f;
 
         return true;
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 0b8997c..ee0d792 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -36,7 +36,7 @@
 
 namespace android {
 
-bool DataSource::getUInt16(off_t offset, uint16_t *x) {
+bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
     *x = 0;
 
     uint8_t byte[2];
@@ -49,7 +49,7 @@
     return true;
 }
 
-status_t DataSource::getSize(off_t *size) {
+status_t DataSource::getSize(off64_t *size) {
     *size = 0;
 
     return ERROR_UNSUPPORTED;
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index b46d8d0..98d5b50 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -16,12 +16,16 @@
 
 #include <media/stagefright/FileSource.h>
 #include <media/stagefright/MediaDebug.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
 
 namespace android {
 
 FileSource::FileSource(const char *filename)
-    : mFile(fopen(filename, "rb")),
-      mFd(fileno(mFile)),
+    : mFd(-1),
       mOffset(0),
       mLength(-1),
       mDecryptHandle(NULL),
@@ -29,11 +33,12 @@
       mDrmBufOffset(0),
       mDrmBufSize(0),
       mDrmBuf(NULL){
+
+    mFd = open(filename, O_LARGEFILE | O_RDONLY);
 }
 
 FileSource::FileSource(int fd, int64_t offset, int64_t length)
-    : mFile(fdopen(fd, "rb")),
-      mFd(fd),
+    : mFd(fd),
       mOffset(offset),
       mLength(length),
       mDecryptHandle(NULL),
@@ -46,26 +51,23 @@
 }
 
 FileSource::~FileSource() {
-    if (mFile != NULL) {
-        fclose(mFile);
-        mFile = NULL;
+    if (mFd >= 0) {
+        close(mFd);
+        mFd = -1;
     }
 
     if (mDrmBuf != NULL) {
         delete[] mDrmBuf;
         mDrmBuf = NULL;
     }
-    if (mDecryptHandle != NULL) {
-        mDrmManagerClient->closeDecryptSession(mDecryptHandle);
-    }
 }
 
 status_t FileSource::initCheck() const {
-    return mFile != NULL ? OK : NO_INIT;
+    return mFd >= 0 ? OK : NO_INIT;
 }
 
-ssize_t FileSource::readAt(off_t offset, void *data, size_t size) {
-    if (mFile == NULL) {
+ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) {
+    if (mFd < 0) {
         return NO_INIT;
     }
 
@@ -85,18 +87,18 @@
             == mDecryptHandle->decryptApiType) {
         return readAtDRM(offset, data, size);
    } else {
-        int err = fseeko(mFile, offset + mOffset, SEEK_SET);
-        if (err < 0) {
+        off64_t result = lseek64(mFd, offset + mOffset, SEEK_SET);
+        if (result == -1) {
             LOGE("seek to %lld failed", offset + mOffset);
             return UNKNOWN_ERROR;
         }
 
-        return fread(data, 1, size, mFile);
+        return ::read(mFd, data, size);
     }
 }
 
-status_t FileSource::getSize(off_t *size) {
-    if (mFile == NULL) {
+status_t FileSource::getSize(off64_t *size) {
+    if (mFd < 0) {
         return NO_INIT;
     }
 
@@ -106,14 +108,17 @@
         return OK;
     }
 
-    fseek(mFile, 0, SEEK_END);
-    *size = ftello(mFile);
+    *size = lseek64(mFd, 0, SEEK_END);
 
     return OK;
 }
 
 DecryptHandle* FileSource::DrmInitialization(DrmManagerClient* client) {
+    if (client == NULL) {
+        return NULL;
+    }
     mDrmManagerClient = client;
+
     if (mDecryptHandle == NULL) {
         mDecryptHandle = mDrmManagerClient->openDecryptSession(
                 mFd, mOffset, mLength);
@@ -132,7 +137,7 @@
     *client = mDrmManagerClient;
 }
 
-ssize_t FileSource::readAtDRM(off_t offset, void *data, size_t size) {
+ssize_t FileSource::readAtDRM(off64_t offset, void *data, size_t size) {
     size_t DRM_CACHE_SIZE = 1024;
     if (mDrmBuf == NULL) {
         mDrmBuf = new unsigned char[DRM_CACHE_SIZE];
diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp
index ccc6a34..e7f00aa 100644
--- a/media/libstagefright/HTTPStream.cpp
+++ b/media/libstagefright/HTTPStream.cpp
@@ -36,7 +36,7 @@
 namespace android {
 
 // static
-const char *HTTPStream::kStatusKey = ":status:";
+const char *HTTPStream::kStatusKey = ":status:";  // MUST be lowercase.
 
 HTTPStream::HTTPStream()
     : mState(READY),
@@ -220,7 +220,7 @@
         return err;
     }
 
-    mHeaders.add(string(kStatusKey), string(line));
+    mHeaders.add(AString(kStatusKey), AString(line));
 
     char *spacePos = strchr(line, ' ');
     if (spacePos == NULL) {
@@ -264,7 +264,10 @@
 
         char *colonPos = strchr(line, ':');
         if (colonPos == NULL) {
-            mHeaders.add(string(line), string());
+            AString key = line;
+            key.tolower();
+
+            mHeaders.add(key, AString());
         } else {
             char *end_of_key = colonPos;
             while (end_of_key > line && isspace(end_of_key[-1])) {
@@ -278,7 +281,10 @@
 
             *end_of_key = '\0';
 
-            mHeaders.add(string(line), string(start_of_value));
+            AString key = line;
+            key.tolower();
+
+            mHeaders.add(key, AString(start_of_value));
         }
     }
 
@@ -314,8 +320,11 @@
     return (ssize_t)total;
 }
 
-bool HTTPStream::find_header_value(const string &key, string *value) const {
-    ssize_t index = mHeaders.indexOfKey(key);
+bool HTTPStream::find_header_value(const AString &key, AString *value) const {
+    AString key_lower = key;
+    key_lower.tolower();
+
+    ssize_t index = mHeaders.indexOfKey(key_lower);
     if (index < 0) {
         value->clear();
         return false;
diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp
index ec81097..e818115 100644
--- a/media/libstagefright/JPEGSource.cpp
+++ b/media/libstagefright/JPEGSource.cpp
@@ -142,7 +142,7 @@
     mWidth = 0;
     mHeight = 0;
 
-    off_t i = 0;
+    off64_t i = 0;
 
     uint16_t soi;
     if (!mSource->getUInt16(i, &soi)) {
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index b15c720..0bb3a86 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -21,6 +21,8 @@
 #include "include/MP3Extractor.h"
 
 #include "include/ID3.h"
+#include "include/VBRISeeker.h"
+#include "include/XINGSeeker.h"
 
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/DataSource.h>
@@ -42,10 +44,11 @@
 // Yes ... there are things that must indeed match...
 static const uint32_t kMask = 0xfffe0cc0;
 
-static bool get_mp3_frame_size(
+// static
+bool MP3Extractor::get_mp3_frame_size(
         uint32_t header, size_t *frame_size,
-        int *out_sampling_rate = NULL, int *out_channels = NULL,
-        int *out_bitrate = NULL) {
+        int *out_sampling_rate, int *out_channels,
+        int *out_bitrate) {
     *frame_size = 0;
 
     if (out_sampling_rate) {
@@ -178,136 +181,13 @@
     return true;
 }
 
-static bool parse_xing_header(
-        const sp<DataSource> &source, off_t first_frame_pos,
-        int32_t *frame_number = NULL, int32_t *byte_number = NULL,
-        char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
-        int64_t *duration = NULL) {
-
-    if (frame_number) {
-        *frame_number = 0;
-    }
-    if (byte_number) {
-        *byte_number = 0;
-    }
-    if (table_of_contents) {
-        table_of_contents[0] = 0;
-    }
-    if (quality_indicator) {
-        *quality_indicator = 0;
-    }
-    if (duration) {
-        *duration = 0;
-    }
-
-    uint8_t buffer[4];
-    int offset = first_frame_pos;
-    if (source->readAt(offset, &buffer, 4) < 4) { // get header
-        return false;
-    }
-    offset += 4;
-
-    uint8_t id, layer, sr_index, mode;
-    layer = (buffer[1] >> 1) & 3;
-    id = (buffer[1] >> 3) & 3;
-    sr_index = (buffer[2] >> 2) & 3;
-    mode = (buffer[3] >> 6) & 3;
-    if (layer == 0) {
-        return false;
-    }
-    if (id == 1) {
-        return false;
-    }
-    if (sr_index == 3) {
-        return false;
-    }
-    // determine offset of XING header
-    if(id&1) { // mpeg1
-        if (mode != 3) offset += 32;
-        else offset += 17;
-    } else { // mpeg2
-        if (mode != 3) offset += 17;
-        else offset += 9;
-    }
-
-    if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
-        return false;
-    }
-    offset += 4;
-    // Check XING ID
-    if ((buffer[0] != 'X') || (buffer[1] != 'i')
-                || (buffer[2] != 'n') || (buffer[3] != 'g')) {
-        if ((buffer[0] != 'I') || (buffer[1] != 'n')
-                    || (buffer[2] != 'f') || (buffer[3] != 'o')) {
-            return false;
-        }
-    }
-
-    if (source->readAt(offset, &buffer, 4) < 4) { // flags
-        return false;
-    }
-    offset += 4;
-    uint32_t flags = U32_AT(buffer);
-
-    if (flags & 0x0001) {  // Frames field is present
-        if (source->readAt(offset, buffer, 4) < 4) {
-             return false;
-        }
-        if (frame_number) {
-           *frame_number = U32_AT(buffer);
-        }
-        int32_t frame = U32_AT(buffer);
-        // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
-        const int samplesPerFrames[2][3] =
-        {
-            { 384, 1152, 576  }, // MPEG 2, 2.5: layer1, layer2, layer3
-            { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
-        };
-        // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
-        const int samplingRates[4][3] =
-        {
-            { 11025, 12000, 8000,  },    // MPEG 2.5
-            { 0,     0,     0,     },    // reserved
-            { 22050, 24000, 16000, },    // MPEG 2
-            { 44100, 48000, 32000, }     // MPEG 1
-        };
-        if (duration) {
-            *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
-                / samplingRates[id][sr_index];
-        }
-        offset += 4;
-    }
-    if (flags & 0x0002) {  // Bytes field is present
-        if (byte_number) {
-            if (source->readAt(offset, buffer, 4) < 4) {
-                return false;
-            }
-            *byte_number = U32_AT(buffer);
-        }
-        offset += 4;
-    }
-    if (flags & 0x0004) {  // TOC field is present
-       if (table_of_contents) {
-            if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
-                return false;
-            }
-        }
-        offset += 100;
-    }
-    if (flags & 0x0008) {  // Quality indicator field is present
-        if (quality_indicator) {
-            if (source->readAt(offset, buffer, 4) < 4) {
-                return false;
-            }
-            *quality_indicator = U32_AT(buffer);
-        }
-    }
-    return true;
-}
-
 static bool Resync(
         const sp<DataSource> &source, uint32_t match_header,
-        off_t *inout_pos, uint32_t *out_header) {
+        off64_t *inout_pos, off64_t *post_id3_pos, uint32_t *out_header) {
+    if (post_id3_pos != NULL) {
+        *post_id3_pos = 0;
+    }
+
     if (*inout_pos == 0) {
         // Skip an optional ID3 header if syncing at the very beginning
         // of the datasource.
@@ -340,9 +220,13 @@
             LOGV("skipped ID3 tag, new starting offset is %ld (0x%08lx)",
                  *inout_pos, *inout_pos);
         }
+
+        if (post_id3_pos != NULL) {
+            *post_id3_pos = *inout_pos;
+        }
     }
 
-    off_t pos = *inout_pos;
+    off64_t pos = *inout_pos;
     bool valid = false;
     do {
         if (pos >= *inout_pos + 128 * 1024) {
@@ -365,8 +249,9 @@
 
         size_t frame_size;
         int sample_rate, num_channels, bitrate;
-        if (!get_mp3_frame_size(header, &frame_size,
-                               &sample_rate, &num_channels, &bitrate)) {
+        if (!MP3Extractor::get_mp3_frame_size(
+                    header, &frame_size,
+                    &sample_rate, &num_channels, &bitrate)) {
             ++pos;
             continue;
         }
@@ -376,7 +261,7 @@
         // We found what looks like a valid frame,
         // now find its successors.
 
-        off_t test_pos = pos + frame_size;
+        off64_t test_pos = pos + frame_size;
 
         valid = true;
         for (int j = 0; j < 3; ++j) {
@@ -396,7 +281,8 @@
             }
 
             size_t test_frame_size;
-            if (!get_mp3_frame_size(test_header, &test_frame_size)) {
+            if (!MP3Extractor::get_mp3_frame_size(
+                        test_header, &test_frame_size)) {
                 valid = false;
                 break;
             }
@@ -426,8 +312,8 @@
 public:
     MP3Source(
             const sp<MetaData> &meta, const sp<DataSource> &source,
-            off_t first_frame_pos, uint32_t fixed_header,
-            int32_t byte_number, const char *table_of_contents);
+            off64_t first_frame_pos, uint32_t fixed_header,
+            const sp<MP3Seeker> &seeker);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -443,14 +329,12 @@
 private:
     sp<MetaData> mMeta;
     sp<DataSource> mDataSource;
-    off_t mFirstFramePos;
+    off64_t mFirstFramePos;
     uint32_t mFixedHeader;
-    off_t mCurrentPos;
+    off64_t mCurrentPos;
     int64_t mCurrentTimeUs;
     bool mStarted;
-    int32_t mByteNumber; // total number of bytes in this MP3
-    // TOC entries in XING header. Skip the first one since it's always 0.
-    char mTableOfContents[99];
+    sp<MP3Seeker> mSeeker;
     MediaBufferGroup *mGroup;
 
     MP3Source(const MP3Source &);
@@ -462,25 +346,28 @@
     : mInitCheck(NO_INIT),
       mDataSource(source),
       mFirstFramePos(-1),
-      mFixedHeader(0),
-      mByteNumber(0) {
-    off_t pos = 0;
+      mFixedHeader(0) {
+    off64_t pos = 0;
+    off64_t post_id3_pos;
     uint32_t header;
     bool success;
 
     int64_t meta_offset;
     uint32_t meta_header;
+    int64_t meta_post_id3_offset;
     if (meta != NULL
             && meta->findInt64("offset", &meta_offset)
-            && meta->findInt32("header", (int32_t *)&meta_header)) {
+            && meta->findInt32("header", (int32_t *)&meta_header)
+            && meta->findInt64("post-id3-offset", &meta_post_id3_offset)) {
         // The sniffer has already done all the hard work for us, simply
         // accept its judgement.
-        pos = (off_t)meta_offset;
+        pos = (off64_t)meta_offset;
         header = meta_header;
+        post_id3_pos = (off64_t)meta_post_id3_offset;
 
         success = true;
     } else {
-        success = Resync(mDataSource, 0, &pos, &header);
+        success = Resync(mDataSource, 0, &pos, &post_id3_pos, &header);
     }
 
     if (!success) {
@@ -505,21 +392,27 @@
     mMeta->setInt32(kKeyBitRate, bitrate * 1000);
     mMeta->setInt32(kKeyChannelCount, num_channels);
 
-    int64_t duration;
-    parse_xing_header(
-            mDataSource, mFirstFramePos, NULL, &mByteNumber,
-            mTableOfContents, NULL, &duration);
-    if (duration > 0) {
-        mMeta->setInt64(kKeyDuration, duration);
-    } else {
-        off_t fileSize;
+    mSeeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos);
+
+    if (mSeeker == NULL) {
+        mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos);
+    }
+
+    int64_t durationUs;
+
+    if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) {
+        off64_t fileSize;
         if (mDataSource->getSize(&fileSize) == OK) {
-            mMeta->setInt64(
-                    kKeyDuration,
-                    8000LL * (fileSize - mFirstFramePos) / bitrate);
+            durationUs = 8000LL * (fileSize - mFirstFramePos) / bitrate;
+        } else {
+            durationUs = -1;
         }
     }
 
+    if (durationUs >= 0) {
+        mMeta->setInt64(kKeyDuration, durationUs);
+    }
+
     mInitCheck = OK;
 }
 
@@ -534,7 +427,7 @@
 
     return new MP3Source(
             mMeta, mDataSource, mFirstFramePos, mFixedHeader,
-            mByteNumber, mTableOfContents);
+            mSeeker);
 }
 
 sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) {
@@ -549,8 +442,8 @@
 
 MP3Source::MP3Source(
         const sp<MetaData> &meta, const sp<DataSource> &source,
-        off_t first_frame_pos, uint32_t fixed_header,
-        int32_t byte_number, const char *table_of_contents)
+        off64_t first_frame_pos, uint32_t fixed_header,
+        const sp<MP3Seeker> &seeker)
     : mMeta(meta),
       mDataSource(source),
       mFirstFramePos(first_frame_pos),
@@ -558,9 +451,8 @@
       mCurrentPos(0),
       mCurrentTimeUs(0),
       mStarted(false),
-      mByteNumber(byte_number),
+      mSeeker(seeker),
       mGroup(NULL) {
-    memcpy (mTableOfContents, table_of_contents, sizeof(mTableOfContents));
 }
 
 MP3Source::~MP3Source() {
@@ -607,43 +499,21 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
-        int32_t bitrate;
-        if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
-            // bitrate is in bits/sec.
-            LOGI("no bitrate");
+        int64_t actualSeekTimeUs = seekTimeUs;
+        if (mSeeker == NULL
+                || !mSeeker->getOffsetForTime(&actualSeekTimeUs, &mCurrentPos)) {
+            int32_t bitrate;
+            if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
+                // bitrate is in bits/sec.
+                LOGI("no bitrate");
 
-            return ERROR_UNSUPPORTED;
-        }
-
-        mCurrentTimeUs = seekTimeUs;
-        // interpolate in TOC to get file seek point in bytes
-        int64_t duration;
-        if ((mByteNumber > 0) && (mTableOfContents[0] > 0)
-            && mMeta->findInt64(kKeyDuration, &duration)) {
-            float percent = (float)seekTimeUs * 100 / duration;
-            float fx;
-            if( percent <= 0.0f ) {
-                fx = 0.0f;
-            } else if( percent >= 100.0f ) {
-                fx = 256.0f;
-            } else {
-                int a = (int)percent;
-                float fa, fb;
-                if ( a == 0 ) {
-                    fa = 0.0f;
-                } else {
-                    fa = (float)mTableOfContents[a-1];
-                }
-                if ( a < 99 ) {
-                    fb = (float)mTableOfContents[a];
-                } else {
-                    fb = 256.0f;
-                }
-                fx = fa + (fb-fa)*(percent-a);
+                return ERROR_UNSUPPORTED;
             }
-            mCurrentPos = mFirstFramePos + (int)((1.0f/256.0f)*fx*mByteNumber);
-        } else {
+
+            mCurrentTimeUs = seekTimeUs;
             mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 8000000;
+        } else {
+            mCurrentTimeUs = actualSeekTimeUs;
         }
     }
 
@@ -667,15 +537,16 @@
         uint32_t header = U32_AT((const uint8_t *)buffer->data());
 
         if ((header & kMask) == (mFixedHeader & kMask)
-            && get_mp3_frame_size(header, &frame_size, NULL, NULL, &bitrate)) {
+            && MP3Extractor::get_mp3_frame_size(
+                header, &frame_size, NULL, NULL, &bitrate)) {
             break;
         }
 
         // Lost sync.
         LOGV("lost sync! header = 0x%08x, old header = 0x%08x\n", header, mFixedHeader);
 
-        off_t pos = mCurrentPos;
-        if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) {
+        off64_t pos = mCurrentPos;
+        if (!Resync(mDataSource, mFixedHeader, &pos, NULL, NULL)) {
             LOGE("Unable to resync. Signalling end of stream.");
 
             buffer->release();
@@ -781,15 +652,17 @@
 bool SniffMP3(
         const sp<DataSource> &source, String8 *mimeType,
         float *confidence, sp<AMessage> *meta) {
-    off_t pos = 0;
+    off64_t pos = 0;
+    off64_t post_id3_pos;
     uint32_t header;
-    if (!Resync(source, 0, &pos, &header)) {
+    if (!Resync(source, 0, &pos, &post_id3_pos, &header)) {
         return false;
     }
 
     *meta = new AMessage;
     (*meta)->setInt64("offset", pos);
     (*meta)->setInt32("header", header);
+    (*meta)->setInt64("post-id3-offset", post_id3_pos);
 
     *mimeType = MEDIA_MIMETYPE_AUDIO_MPEG;
     *confidence = 0.2f;
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 81a2b0d..4e4f289 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -42,12 +42,21 @@
     unsigned streamType() const;
     unsigned incrementContinuityCounter();
 
+    void readMore();
+
     enum {
         kNotifyStartFailed,
         kNotifyBuffer,
         kNotifyReachedEOS,
     };
 
+    sp<ABuffer> lastAccessUnit();
+    int64_t lastAccessUnitTimeUs();
+    void setLastAccessUnit(const sp<ABuffer> &accessUnit);
+
+    void setEOSReceived();
+    bool eosReceived() const;
+
 protected:
     virtual void onMessageReceived(const sp<AMessage> &msg);
 
@@ -67,13 +76,16 @@
 
     sp<ABuffer> mAACBuffer;
 
+    sp<ABuffer> mLastAccessUnit;
+    bool mEOSReceived;
+
     unsigned mStreamType;
     unsigned mContinuityCounter;
 
     void extractCodecSpecificData();
 
-    void appendAACFrames(MediaBuffer *buffer);
-    void flushAACFrames();
+    bool appendAACFrames(MediaBuffer *buffer);
+    bool flushAACFrames();
 
     void postAVCFrame(MediaBuffer *buffer);
 
@@ -83,6 +95,7 @@
 MPEG2TSWriter::SourceInfo::SourceInfo(const sp<MediaSource> &source)
     : mSource(source),
       mLooper(new ALooper),
+      mEOSReceived(false),
       mStreamType(0),
       mContinuityCounter(0) {
     mLooper->setName("MPEG2TSWriter source");
@@ -232,6 +245,7 @@
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("what", kNotifyBuffer);
     notify->setObject("buffer", out);
+    notify->setInt32("oob", true);
     notify->post();
 }
 
@@ -260,11 +274,13 @@
     notify->post();
 }
 
-void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
+bool MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) {
+    bool accessUnitPosted = false;
+
     if (mAACBuffer != NULL
             && mAACBuffer->size() + 7 + buffer->range_length()
                     > mAACBuffer->capacity()) {
-        flushAACFrames();
+        accessUnitPosted = flushAACFrames();
     }
 
     if (mAACBuffer == NULL) {
@@ -324,11 +340,13 @@
     ptr += buffer->range_length();
 
     mAACBuffer->setRange(0, ptr - mAACBuffer->data());
+
+    return accessUnitPosted;
 }
 
-void MPEG2TSWriter::SourceInfo::flushAACFrames() {
+bool MPEG2TSWriter::SourceInfo::flushAACFrames() {
     if (mAACBuffer == NULL) {
-        return;
+        return false;
     }
 
     sp<AMessage> notify = mNotify->dup();
@@ -337,6 +355,12 @@
     notify->post();
 
     mAACBuffer.clear();
+
+    return true;
+}
+
+void MPEG2TSWriter::SourceInfo::readMore() {
+    (new AMessage(kWhatRead, id()))->post();
 }
 
 void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) {
@@ -353,7 +377,7 @@
 
             extractCodecSpecificData();
 
-            (new AMessage(kWhatRead, id()))->post();
+            readMore();
             break;
         }
 
@@ -388,7 +412,9 @@
                            buffer->range_length());
                 } else if (buffer->range_length() > 0) {
                     if (mStreamType == 0x0f) {
-                        appendAACFrames(buffer);
+                        if (!appendAACFrames(buffer)) {
+                            msg->post();
+                        }
                     } else {
                         postAVCFrame(buffer);
                     }
@@ -398,7 +424,7 @@
                 buffer = NULL;
             }
 
-            msg->post();
+            // Do not read more data until told to.
             break;
         }
 
@@ -407,10 +433,39 @@
     }
 }
 
+sp<ABuffer> MPEG2TSWriter::SourceInfo::lastAccessUnit() {
+    return mLastAccessUnit;
+}
+
+void MPEG2TSWriter::SourceInfo::setLastAccessUnit(
+        const sp<ABuffer> &accessUnit) {
+    mLastAccessUnit = accessUnit;
+}
+
+int64_t MPEG2TSWriter::SourceInfo::lastAccessUnitTimeUs() {
+    if (mLastAccessUnit == NULL) {
+        return -1;
+    }
+
+    int64_t timeUs;
+    CHECK(mLastAccessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    return timeUs;
+}
+
+void MPEG2TSWriter::SourceInfo::setEOSReceived() {
+    CHECK(!mEOSReceived);
+    mEOSReceived = true;
+}
+
+bool MPEG2TSWriter::SourceInfo::eosReceived() const {
+    return mEOSReceived;
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 MPEG2TSWriter::MPEG2TSWriter(int fd)
-    : mFile(fdopen(fd, "wb")),
+    : mFile(fdopen(dup(fd), "wb")),
       mStarted(false),
       mNumSourcesDone(0),
       mNumTSPacketsWritten(0),
@@ -527,15 +582,89 @@
 
             if (what == SourceInfo::kNotifyReachedEOS
                     || what == SourceInfo::kNotifyStartFailed) {
+                sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
+                source->setEOSReceived();
+
+                sp<ABuffer> buffer = source->lastAccessUnit();
+                source->setLastAccessUnit(NULL);
+
+                if (buffer != NULL) {
+                    writeTS();
+                    writeAccessUnit(sourceIndex, buffer);
+                }
+
                 ++mNumSourcesDone;
             } else if (what == SourceInfo::kNotifyBuffer) {
                 sp<RefBase> obj;
                 CHECK(msg->findObject("buffer", &obj));
 
-                writeTS();
-
                 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
-                writeAccessUnit(sourceIndex, buffer);
+
+                int32_t oob;
+                if (msg->findInt32("oob", &oob) && oob) {
+                    // This is codec specific data delivered out of band.
+                    // It can be written out immediately.
+                    writeTS();
+                    writeAccessUnit(sourceIndex, buffer);
+                    break;
+                }
+
+                // We don't just write out data as we receive it from
+                // the various sources. That would essentially write them
+                // out in random order (as the thread scheduler determines
+                // how the messages are dispatched).
+                // Instead we gather an access unit for all tracks and
+                // write out the one with the smallest timestamp, then
+                // request more data for the written out track.
+                // Rinse, repeat.
+                // If we don't have data on any track we don't write
+                // anything just yet.
+
+                sp<SourceInfo> source = mSources.editItemAt(sourceIndex);
+
+                CHECK(source->lastAccessUnit() == NULL);
+                source->setLastAccessUnit(buffer);
+
+                LOGV("lastAccessUnitTimeUs[%d] = %.2f secs",
+                     sourceIndex, source->lastAccessUnitTimeUs() / 1E6);
+
+                int64_t minTimeUs = -1;
+                size_t minIndex = 0;
+
+                for (size_t i = 0; i < mSources.size(); ++i) {
+                    const sp<SourceInfo> &source = mSources.editItemAt(i);
+
+                    if (source->eosReceived()) {
+                        continue;
+                    }
+
+                    int64_t timeUs = source->lastAccessUnitTimeUs();
+                    if (timeUs < 0) {
+                        minTimeUs = -1;
+                        break;
+                    } else if (minTimeUs < 0 || timeUs < minTimeUs) {
+                        minTimeUs = timeUs;
+                        minIndex = i;
+                    }
+                }
+
+                if (minTimeUs < 0) {
+                    LOGV("not a all tracks have valid data.");
+                    break;
+                }
+
+                LOGV("writing access unit at time %.2f secs (index %d)",
+                     minTimeUs / 1E6, minIndex);
+
+                source = mSources.editItemAt(minIndex);
+
+                buffer = source->lastAccessUnit();
+                source->setLastAccessUnit(NULL);
+
+                writeTS();
+                writeAccessUnit(minIndex, buffer);
+
+                source->readMore();
             }
             break;
         }
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 5497322..bafa243 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -98,11 +98,11 @@
     MPEG4DataSource(const sp<DataSource> &source);
 
     virtual status_t initCheck() const;
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
-    virtual status_t getSize(off_t *size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
-    status_t setCachedRange(off_t offset, size_t size);
+    status_t setCachedRange(off64_t offset, size_t size);
 
 protected:
     virtual ~MPEG4DataSource();
@@ -111,7 +111,7 @@
     Mutex mLock;
 
     sp<DataSource> mSource;
-    off_t mCachedOffset;
+    off64_t mCachedOffset;
     size_t mCachedSize;
     uint8_t *mCache;
 
@@ -146,7 +146,7 @@
     return mSource->initCheck();
 }
 
-ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t MPEG4DataSource::readAt(off64_t offset, void *data, size_t size) {
     Mutex::Autolock autoLock(mLock);
 
     if (offset >= mCachedOffset
@@ -158,7 +158,7 @@
     return mSource->readAt(offset, data, size);
 }
 
-status_t MPEG4DataSource::getSize(off_t *size) {
+status_t MPEG4DataSource::getSize(off64_t *size) {
     return mSource->getSize(size);
 }
 
@@ -166,7 +166,7 @@
     return mSource->flags();
 }
 
-status_t MPEG4DataSource::setCachedRange(off_t offset, size_t size) {
+status_t MPEG4DataSource::setCachedRange(off64_t offset, size_t size) {
     Mutex::Autolock autoLock(mLock);
 
     clearCache();
@@ -247,6 +247,8 @@
             return MEDIA_MIMETYPE_VIDEO_MPEG4;
 
         case FOURCC('s', '2', '6', '3'):
+        case FOURCC('h', '2', '6', '3'):
+        case FOURCC('H', '2', '6', '3'):
             return MEDIA_MIMETYPE_VIDEO_H263;
 
         case FOURCC('a', 'v', 'c', '1'):
@@ -363,7 +365,7 @@
         return OK;
     }
 
-    off_t offset = 0;
+    off64_t offset = 0;
     status_t err;
     while ((err = parseChunk(&offset, 0)) == OK) {
     }
@@ -404,7 +406,7 @@
 }
 
 // Reads an encoded integer 7 bits at a time until it encounters the high bit clear.
-int32_t readSize(off_t offset,
+int32_t readSize(off64_t offset,
         const sp<DataSource> DataSource, uint8_t *numOfBytes) {
     uint32_t size = 0;
     uint8_t data;
@@ -424,7 +426,7 @@
     return size;
 }
 
-status_t MPEG4Extractor::parseDrmSINF(off_t *offset, off_t data_offset) {
+status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
     uint8_t updateIdTag;
     if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
         return ERROR_IO;
@@ -596,14 +598,14 @@
     s->setTo(tmp);
 }
 
-status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
+status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
     uint32_t hdr[2];
     if (mDataSource->readAt(*offset, hdr, 8) < 8) {
         return ERROR_IO;
     }
     uint64_t chunk_size = ntohl(hdr[0]);
     uint32_t chunk_type = ntohl(hdr[1]);
-    off_t data_offset = *offset + 8;
+    off64_t data_offset = *offset + 8;
 
     if (chunk_size == 1) {
         if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
@@ -644,11 +646,11 @@
 
     PathAdder autoAdder(&mPath, chunk_type);
 
-    off_t chunk_data_size = *offset + chunk_size - data_offset;
+    off64_t chunk_data_size = *offset + chunk_size - data_offset;
 
     if (chunk_type != FOURCC('c', 'p', 'r', 't')
             && mPath.size() == 5 && underMetaDataPath(mPath)) {
-        off_t stop_offset = *offset + chunk_size;
+        off64_t stop_offset = *offset + chunk_size;
         *offset = data_offset;
         while (*offset < stop_offset) {
             status_t err = parseChunk(offset, depth + 1);
@@ -715,7 +717,7 @@
                 track->meta->setCString(kKeyMIMEType, "application/octet-stream");
             }
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset;
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -788,7 +790,7 @@
                 return ERROR_IO;
             }
 
-            off_t timescale_offset;
+            off64_t timescale_offset;
 
             if (version == 1) {
                 timescale_offset = data_offset + 4 + 16;
@@ -838,7 +840,7 @@
             }
 
             uint8_t buffer[8];
-            if (chunk_data_size < (off_t)sizeof(buffer)) {
+            if (chunk_data_size < (off64_t)sizeof(buffer)) {
                 return ERROR_MALFORMED;
             }
 
@@ -862,7 +864,7 @@
                 break;
             }
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + 8;
             for (uint32_t i = 0; i < entry_count; ++i) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -919,7 +921,7 @@
             mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
             mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + sizeof(buffer);
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -936,6 +938,8 @@
 
         case FOURCC('m', 'p', '4', 'v'):
         case FOURCC('s', '2', '6', '3'):
+        case FOURCC('H', '2', '6', '3'):
+        case FOURCC('h', '2', '6', '3'):
         case FOURCC('a', 'v', 'c', '1'):
         {
             mHasVideo = true;
@@ -955,6 +959,13 @@
             uint16_t width = U16_AT(&buffer[6 + 18]);
             uint16_t height = U16_AT(&buffer[6 + 20]);
 
+            // The video sample is not stand-compliant if it has invalid dimension.
+            // Use some default width and height value, and
+            // let the decoder figure out the actual width and height (and thus
+            // be prepared for INFO_FOMRAT_CHANGED event).
+            if (width == 0)  width  = 352;
+            if (height == 0) height = 288;
+
             // printf("*** coding='%s' width=%d height=%d\n",
             //        chunk, width, height);
 
@@ -962,7 +973,7 @@
             mLastTrack->meta->setInt32(kKeyWidth, width);
             mLastTrack->meta->setInt32(kKeyHeight, height);
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + sizeof(buffer);
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -1069,7 +1080,7 @@
             }
 
             uint8_t buffer[256];
-            if (chunk_data_size > (off_t)sizeof(buffer)) {
+            if (chunk_data_size > (off64_t)sizeof(buffer)) {
                 return ERROR_BUFFER_TOO_SMALL;
             }
 
@@ -1108,7 +1119,7 @@
         case FOURCC('a', 'v', 'c', 'C'):
         {
             char buffer[256];
-            if (chunk_data_size > (off_t)sizeof(buffer)) {
+            if (chunk_data_size > (off64_t)sizeof(buffer)) {
                 return ERROR_BUFFER_TOO_SMALL;
             }
 
@@ -1127,7 +1138,7 @@
         case FOURCC('m', 'e', 't', 'a'):
         {
             uint8_t buffer[4];
-            if (chunk_data_size < (off_t)sizeof(buffer)) {
+            if (chunk_data_size < (off64_t)sizeof(buffer)) {
                 return ERROR_MALFORMED;
             }
 
@@ -1147,7 +1158,7 @@
                 return OK;
             }
 
-            off_t stop_offset = *offset + chunk_size;
+            off64_t stop_offset = *offset + chunk_size;
             *offset = data_offset + sizeof(buffer);
             while (*offset < stop_offset) {
                 status_t err = parseChunk(offset, depth + 1);
@@ -1232,7 +1243,7 @@
 }
 
 status_t MPEG4Extractor::parseTrackHeader(
-        off_t data_offset, off_t data_size) {
+        off64_t data_offset, off64_t data_size) {
     if (data_size < 4) {
         return ERROR_MALFORMED;
     }
@@ -1246,7 +1257,7 @@
 
     uint8_t buffer[36 + 60];
 
-    if (data_size != (off_t)dynSize + 60) {
+    if (data_size != (off64_t)dynSize + 60) {
         return ERROR_MALFORMED;
     }
 
@@ -1263,7 +1274,9 @@
         mtime = U64_AT(&buffer[12]);
         id = U32_AT(&buffer[20]);
         duration = U64_AT(&buffer[28]);
-    } else if (version == 0) {
+    } else {
+        CHECK_EQ((unsigned)version, 0u);
+
         ctime = U32_AT(&buffer[4]);
         mtime = U32_AT(&buffer[8]);
         id = U32_AT(&buffer[12]);
@@ -1316,7 +1329,7 @@
     return OK;
 }
 
-status_t MPEG4Extractor::parseMetaData(off_t offset, size_t size) {
+status_t MPEG4Extractor::parseMetaData(off64_t offset, size_t size) {
     if (size < 4) {
         return ERROR_MALFORMED;
     }
@@ -1816,7 +1829,7 @@
         // fall through
     }
 
-    off_t offset;
+    off64_t offset;
     size_t size;
     uint32_t dts;
     bool isSyncSample;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 7eb7d46..06c4c98 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -33,6 +33,10 @@
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/Utils.h>
 #include <media/mediarecorder.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
 
 #include "include/ESDS.h"
 
@@ -64,7 +68,7 @@
     bool isAvc() const { return mIsAvc; }
     bool isAudio() const { return mIsAudio; }
     bool isMPEG4() const { return mIsMPEG4; }
-    void addChunkOffset(off_t offset);
+    void addChunkOffset(off64_t offset);
     status_t dump(int fd, const Vector<String16>& args) const;
 
 private:
@@ -99,7 +103,7 @@
     List<MediaBuffer *> mChunkSamples;
 
     size_t              mNumStcoTableEntries;
-    List<off_t>         mChunkOffsets;
+    List<off64_t>         mChunkOffsets;
 
     size_t              mNumStscTableEntries;
     struct StscTableEntry {
@@ -214,7 +218,8 @@
 };
 
 MPEG4Writer::MPEG4Writer(const char *filename)
-    : mFile(fopen(filename, "wb")),
+    : mFd(-1),
+      mInitCheck(NO_INIT),
       mUse4ByteNalLength(true),
       mUse32BitOffset(true),
       mIsFileSizeLimitExplicitlyRequested(false),
@@ -224,11 +229,16 @@
       mMdatOffset(0),
       mEstimatedMoovBoxSize(0),
       mInterleaveDurationUs(1000000) {
-    CHECK(mFile != NULL);
+
+    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+    if (mFd >= 0) {
+        mInitCheck = OK;
+    }
 }
 
 MPEG4Writer::MPEG4Writer(int fd)
-    : mFile(fdopen(fd, "wb")),
+    : mFd(dup(fd)),
+      mInitCheck(mFd < 0? NO_INIT: OK),
       mUse4ByteNalLength(true),
       mUse32BitOffset(true),
       mIsFileSizeLimitExplicitlyRequested(false),
@@ -238,7 +248,6 @@
       mMdatOffset(0),
       mEstimatedMoovBoxSize(0),
       mInterleaveDurationUs(1000000) {
-    CHECK(mFile != NULL);
 }
 
 MPEG4Writer::~MPEG4Writer() {
@@ -368,7 +377,7 @@
 }
 
 status_t MPEG4Writer::start(MetaData *param) {
-    if (mFile == NULL) {
+    if (mInitCheck != OK) {
         return UNKNOWN_ERROR;
     }
 
@@ -459,13 +468,13 @@
         mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
     }
     CHECK(mEstimatedMoovBoxSize >= 8);
-    fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+    lseek64(mFd, mFreeBoxOffset, SEEK_SET);
     writeInt32(mEstimatedMoovBoxSize);
     write("free", 4);
 
     mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
     mOffset = mMdatOffset;
-    fseeko(mFile, mMdatOffset, SEEK_SET);
+    lseek64(mFd, mMdatOffset, SEEK_SET);
     if (mUse32BitOffset) {
         write("????mdat", 8);
     } else {
@@ -491,7 +500,7 @@
 }
 
 status_t MPEG4Writer::pause() {
-    if (mFile == NULL) {
+    if (mInitCheck != OK) {
         return OK;
     }
     mPaused = true;
@@ -507,7 +516,7 @@
 }
 
 void MPEG4Writer::stopWriterThread() {
-    LOGV("stopWriterThread");
+    LOGD("Stopping writer thread");
 
     {
         Mutex::Autolock autolock(mLock);
@@ -518,6 +527,7 @@
 
     void *dummy;
     pthread_join(mThread, &dummy);
+    LOGD("Writer thread stopped");
 }
 
 /*
@@ -572,8 +582,9 @@
     writeInt32(0x40000000);  // w
 }
 
+
 status_t MPEG4Writer::stop() {
-    if (mFile == NULL) {
+    if (mInitCheck != OK) {
         return OK;
     }
 
@@ -596,28 +607,28 @@
 
     // Do not write out movie header on error.
     if (err != OK) {
-        fflush(mFile);
-        fclose(mFile);
-        mFile = NULL;
+        close(mFd);
+        mFd = -1;
+        mInitCheck = NO_INIT;
         mStarted = false;
         return err;
     }
 
     // Fix up the size of the 'mdat' chunk.
     if (mUse32BitOffset) {
-        fseeko(mFile, mMdatOffset, SEEK_SET);
+        lseek64(mFd, mMdatOffset, SEEK_SET);
         int32_t size = htonl(static_cast<int32_t>(mOffset - mMdatOffset));
-        fwrite(&size, 1, 4, mFile);
+        ::write(mFd, &size, 4);
     } else {
-        fseeko(mFile, mMdatOffset + 8, SEEK_SET);
+        lseek64(mFd, mMdatOffset + 8, SEEK_SET);
         int64_t size = mOffset - mMdatOffset;
         size = hton64(size);
-        fwrite(&size, 1, 8, mFile);
+        ::write(mFd, &size, 8);
     }
-    fseeko(mFile, mOffset, SEEK_SET);
+    lseek64(mFd, mOffset, SEEK_SET);
 
     time_t now = time(NULL);
-    const off_t moovOffset = mOffset;
+    const off64_t moovOffset = mOffset;
     mWriteMoovBoxToMemory = true;
     mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
     mMoovBoxBufferOffset = 0;
@@ -637,7 +648,7 @@
         writeInt16(0);             // reserved
         writeInt32(0);             // reserved
         writeInt32(0);             // reserved
-        writeCompositionMatrix(0);
+        writeCompositionMatrix(0); // matrix
         writeInt32(0);             // predefined
         writeInt32(0);             // predefined
         writeInt32(0);             // predefined
@@ -659,12 +670,12 @@
         CHECK(mMoovBoxBufferOffset + 8 <= mEstimatedMoovBoxSize);
 
         // Moov box
-        fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+        lseek64(mFd, mFreeBoxOffset, SEEK_SET);
         mOffset = mFreeBoxOffset;
-        write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, mFile);
+        write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset);
 
         // Free box
-        fseeko(mFile, mOffset, SEEK_SET);
+        lseek64(mFd, mOffset, SEEK_SET);
         writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
         write("free", 4);
 
@@ -678,9 +689,9 @@
 
     CHECK(mBoxes.empty());
 
-    fflush(mFile);
-    fclose(mFile);
-    mFile = NULL;
+    close(mFd);
+    mFd = -1;
+    mInitCheck = NO_INIT;
     mStarted = false;
     return err;
 }
@@ -698,11 +709,12 @@
     mLock.unlock();
 }
 
-off_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
-    off_t old_offset = mOffset;
+off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
+    off64_t old_offset = mOffset;
 
-    fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
-           1, buffer->range_length(), mFile);
+    ::write(mFd,
+          (const uint8_t *)buffer->data() + buffer->range_offset(),
+          buffer->range_length());
 
     mOffset += buffer->range_length();
 
@@ -723,33 +735,34 @@
     }
 }
 
-off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
-    off_t old_offset = mOffset;
+off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
+    off64_t old_offset = mOffset;
 
     size_t length = buffer->range_length();
 
     if (mUse4ByteNalLength) {
         uint8_t x = length >> 24;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = (length >> 16) & 0xff;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = (length >> 8) & 0xff;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = length & 0xff;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
 
-        fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
-                1, length, mFile);
+        ::write(mFd,
+              (const uint8_t *)buffer->data() + buffer->range_offset(),
+              length);
+
         mOffset += length + 4;
     } else {
         CHECK(length < 65536);
 
         uint8_t x = length >> 8;
-        fwrite(&x, 1, 1, mFile);
+        ::write(mFd, &x, 1);
         x = length & 0xff;
-        fwrite(&x, 1, 1, mFile);
-        fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
-                1, length, mFile);
+        ::write(mFd, &x, 1);
+        ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
         mOffset += length + 2;
     }
 
@@ -757,19 +770,21 @@
 }
 
 size_t MPEG4Writer::write(
-        const void *ptr, size_t size, size_t nmemb, FILE *stream) {
+        const void *ptr, size_t size, size_t nmemb) {
 
     const size_t bytes = size * nmemb;
     if (mWriteMoovBoxToMemory) {
-        off_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
+        // This happens only when we write the moov box at the end of
+        // recording, not for each output video/audio frame we receive.
+        off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
         if (moovBoxSize > mEstimatedMoovBoxSize) {
-            for (List<off_t>::iterator it = mBoxes.begin();
+            for (List<off64_t>::iterator it = mBoxes.begin();
                  it != mBoxes.end(); ++it) {
                 (*it) += mOffset;
             }
-            fseeko(mFile, mOffset, SEEK_SET);
-            fwrite(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, stream);
-            fwrite(ptr, size, nmemb, stream);
+            lseek64(mFd, mOffset, SEEK_SET);
+            ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
+            ::write(mFd, ptr, size * nmemb);
             mOffset += (bytes + mMoovBoxBufferOffset);
             free(mMoovBoxBuffer);
             mMoovBoxBuffer = NULL;
@@ -781,7 +796,7 @@
             mMoovBoxBufferOffset += bytes;
         }
     } else {
-        fwrite(ptr, size, nmemb, stream);
+        ::write(mFd, ptr, size * nmemb);
         mOffset += bytes;
     }
     return bytes;
@@ -800,51 +815,51 @@
 void MPEG4Writer::endBox() {
     CHECK(!mBoxes.empty());
 
-    off_t offset = *--mBoxes.end();
+    off64_t offset = *--mBoxes.end();
     mBoxes.erase(--mBoxes.end());
 
     if (mWriteMoovBoxToMemory) {
        int32_t x = htonl(mMoovBoxBufferOffset - offset);
        memcpy(mMoovBoxBuffer + offset, &x, 4);
     } else {
-        fseeko(mFile, offset, SEEK_SET);
+        lseek64(mFd, offset, SEEK_SET);
         writeInt32(mOffset - offset);
         mOffset -= 4;
-        fseeko(mFile, mOffset, SEEK_SET);
+        lseek64(mFd, mOffset, SEEK_SET);
     }
 }
 
 void MPEG4Writer::writeInt8(int8_t x) {
-    write(&x, 1, 1, mFile);
+    write(&x, 1, 1);
 }
 
 void MPEG4Writer::writeInt16(int16_t x) {
     x = htons(x);
-    write(&x, 1, 2, mFile);
+    write(&x, 1, 2);
 }
 
 void MPEG4Writer::writeInt32(int32_t x) {
     x = htonl(x);
-    write(&x, 1, 4, mFile);
+    write(&x, 1, 4);
 }
 
 void MPEG4Writer::writeInt64(int64_t x) {
     x = hton64(x);
-    write(&x, 1, 8, mFile);
+    write(&x, 1, 8);
 }
 
 void MPEG4Writer::writeCString(const char *s) {
     size_t n = strlen(s);
-    write(s, 1, n + 1, mFile);
+    write(s, 1, n + 1);
 }
 
 void MPEG4Writer::writeFourcc(const char *s) {
     CHECK_EQ(strlen(s), 4);
-    write(s, 1, 4, mFile);
+    write(s, 1, 4);
 }
 
 void MPEG4Writer::write(const void *data, size_t size) {
-    write(data, 1, size, mFile);
+    write(data, 1, size);
 }
 
 bool MPEG4Writer::isFileStreamable() const {
@@ -988,7 +1003,7 @@
     ++mNumSttsTableEntries;
 }
 
-void MPEG4Writer::Track::addChunkOffset(off_t offset) {
+void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
     ++mNumStcoTableEntries;
     mChunkOffsets.push_back(offset);
 }
@@ -1103,7 +1118,7 @@
     for (List<MediaBuffer *>::iterator it = chunkIt->mSamples.begin();
          it != chunkIt->mSamples.end(); ++it) {
 
-        off_t offset = info->mTrack->isAvc()
+        off64_t offset = info->mTrack->isAvc()
                             ? addLengthPrefixedSample_l(*it)
                             : addSample_l(*it);
         if (it == chunkIt->mSamples.begin()) {
@@ -1228,7 +1243,7 @@
     }
 
     int32_t rotationDegrees;
-    if (!mIsAudio && params && params->findInt32(kKeyRotationDegree, &rotationDegrees)) {
+    if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
         mRotation = rotationDegrees;
     }
 
@@ -1282,6 +1297,7 @@
 }
 
 status_t MPEG4Writer::Track::stop() {
+    LOGD("Stopping %s track", mIsAudio? "Audio": "Video");
     if (mDone) {
         return OK;
     }
@@ -1293,6 +1309,7 @@
 
     status_t err = (status_t) dummy;
 
+    LOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
     {
         status_t status = mSource->stop();
         if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
@@ -1300,6 +1317,7 @@
         }
     }
 
+    LOGD("%s track stopped", mIsAudio? "Audio": "Video");
     return err;
 }
 
@@ -1716,6 +1734,8 @@
     } else {
         prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0);
     }
+    setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_AUDIO);
+
     sp<MetaData> meta_data;
 
     mNumSamples = 0;
@@ -1924,7 +1944,7 @@
             trackProgressStatus(timestampUs);
         }
         if (mOwner->numTracks() == 1) {
-            off_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
+            off64_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
                                  : mOwner->addSample_l(copy);
             if (mChunkOffsets.empty()) {
                 addChunkOffset(offset);
@@ -2125,7 +2145,7 @@
         mOwner->writeInt16(mIsAudio ? 0x100 : 0);  // volume
         mOwner->writeInt16(0);             // reserved
 
-        mOwner->writeCompositionMatrix(mRotation);
+        mOwner->writeCompositionMatrix(mRotation);       // matrix
 
         if (mIsAudio) {
             mOwner->writeInt32(0);
@@ -2263,6 +2283,9 @@
                         CHECK(mCodecSpecificData);
                         CHECK(mCodecSpecificDataSize > 0);
 
+                        // Make sure all sizes encode to a single byte.
+                        CHECK(mCodecSpecificDataSize + 23 < 128);
+
                         mOwner->writeInt32(0);     // version=0, flags=0
                         mOwner->writeInt8(0x03);   // ES_DescrTag
                         mOwner->writeInt8(23 + mCodecSpecificDataSize);
@@ -2472,7 +2495,7 @@
           mOwner->beginBox(use32BitOffset? "stco": "co64");
             mOwner->writeInt32(0);  // version=0, flags=0
             mOwner->writeInt32(mNumStcoTableEntries);
-            for (List<off_t>::iterator it = mChunkOffsets.begin();
+            for (List<off64_t>::iterator it = mChunkOffsets.begin();
                  it != mChunkOffsets.end(); ++it) {
                 if (use32BitOffset) {
                     mOwner->writeInt32(static_cast<int32_t>(*it));
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index b973745..cbccd31 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -25,6 +25,8 @@
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MetaData.h>
 
+#include <ui/GraphicBuffer.h>
+
 namespace android {
 
 // XXX make this truly atomic.
@@ -61,6 +63,20 @@
       mOriginal(NULL) {
 }
 
+MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
+    : mObserver(NULL),
+      mNextBuffer(NULL),
+      mRefCount(0),
+      mData(NULL),
+      mSize(1),
+      mRangeOffset(0),
+      mRangeLength(mSize),
+      mGraphicBuffer(graphicBuffer),
+      mOwnsData(false),
+      mMetaData(new MetaData),
+      mOriginal(NULL) {
+}
+
 void MediaBuffer::release() {
     if (mObserver == NULL) {
         CHECK_EQ(mRefCount, 0);
@@ -92,10 +108,12 @@
 }
 
 void *MediaBuffer::data() const {
+    CHECK(mGraphicBuffer == NULL);
     return mData;
 }
 
 size_t MediaBuffer::size() const {
+    CHECK(mGraphicBuffer == NULL);
     return mSize;
 }
 
@@ -108,15 +126,19 @@
 }
 
 void MediaBuffer::set_range(size_t offset, size_t length) {
-    if (offset + length > mSize) {
+    if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
         LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
     }
-    CHECK(offset + length <= mSize);
+    CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
 
     mRangeOffset = offset;
     mRangeLength = length;
 }
 
+sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
+    return mGraphicBuffer;
+}
+
 sp<MetaData> MediaBuffer::meta_data() {
     return mMetaData;
 }
@@ -158,6 +180,8 @@
 }
 
 MediaBuffer *MediaBuffer::clone() {
+    CHECK_EQ(mGraphicBuffer, NULL);
+
     MediaBuffer *buffer = new MediaBuffer(mData, mSize);
     buffer->set_range(mRangeOffset, mRangeLength);
     buffer->mMetaData = new MetaData(*mMetaData.get());
@@ -169,4 +193,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 7648d42..4599fca 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -42,4 +42,6 @@
 const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska";
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
 
+const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
+
 }  // namespace android
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 965c370..d12ac64 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -25,6 +25,7 @@
 #include "include/OggExtractor.h"
 #include "include/MPEG2TSExtractor.h"
 #include "include/DRMExtractor.h"
+#include "include/WVMExtractor.h"
 
 #include "matroska/MatroskaExtractor.h"
 
@@ -92,6 +93,8 @@
         return new MatroskaExtractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
         return new MPEG2TSExtractor(source);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WVM)) {
+        return new WVMExtractor(source);
     }
 
     return NULL;
diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp
index b4ef338..fd0e79c 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libstagefright/MediaSource.cpp
@@ -32,7 +32,6 @@
     mOptions = 0;
     mSeekTimeUs = 0;
     mLatenessUs = 0;
-    mSkipFrameUntilTimeUs = 0;
 }
 
 void MediaSource::ReadOptions::setSeekTo(int64_t time_us, SeekMode mode) {
@@ -54,21 +53,6 @@
     return (mOptions & kSeekTo_Option) != 0;
 }
 
-void MediaSource::ReadOptions::clearSkipFrame() {
-    mOptions &= ~kSkipFrame_Option;
-    mSkipFrameUntilTimeUs = 0;
-}
-
-void MediaSource::ReadOptions::setSkipFrame(int64_t timeUs) {
-    mOptions |= kSkipFrame_Option;
-    mSkipFrameUntilTimeUs = timeUs;
-}
-
-bool MediaSource::ReadOptions::getSkipFrame(int64_t *timeUs) const {
-    *timeUs = mSkipFrameUntilTimeUs;
-    return (mOptions & kSkipFrame_Option) != 0;
-}
-
 void MediaSource::ReadOptions::setLateBy(int64_t lateness_us) {
     mLatenessUs = lateness_us;
 }
diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp
new file mode 100644
index 0000000..abc7012
--- /dev/null
+++ b/media/libstagefright/MediaSourceSplitter.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSourceSplitter"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSourceSplitter.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) {
+    mNumberOfClients = 0;
+    mSource = mediaSource;
+    mSourceStarted = false;
+
+    mNumberOfClientsStarted = 0;
+    mNumberOfCurrentReads = 0;
+    mCurrentReadBit = 0;
+    mLastReadCompleted = true;
+}
+
+MediaSourceSplitter::~MediaSourceSplitter() {
+}
+
+sp<MediaSource> MediaSourceSplitter::createClient() {
+    Mutex::Autolock autoLock(mLock);
+
+    sp<MediaSource> client = new Client(this, mNumberOfClients++);
+    mClientsStarted.push(false);
+    mClientsDesiredReadBit.push(0);
+    return client;
+}
+
+status_t MediaSourceSplitter::start(int clientId, MetaData *params) {
+    Mutex::Autolock autoLock(mLock);
+
+    LOGV("start client (%d)", clientId);
+    if (mClientsStarted[clientId]) {
+        return OK;
+    }
+
+    mNumberOfClientsStarted++;
+
+    if (!mSourceStarted) {
+        LOGV("Starting real source from client (%d)", clientId);
+        status_t err = mSource->start(params);
+
+        if (err == OK) {
+            mSourceStarted = true;
+            mClientsStarted.editItemAt(clientId) = true;
+            mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+        }
+
+        return err;
+    } else {
+        mClientsStarted.editItemAt(clientId) = true;
+        if (mLastReadCompleted) {
+            // Last read was completed. So join in the threads for the next read.
+            mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+        } else {
+            // Last read is ongoing. So join in the threads for the current read.
+            mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit;
+        }
+        return OK;
+    }
+}
+
+status_t MediaSourceSplitter::stop(int clientId) {
+    Mutex::Autolock autoLock(mLock);
+
+    LOGV("stop client (%d)", clientId);
+    CHECK(clientId >= 0 && clientId < mNumberOfClients);
+    CHECK(mClientsStarted[clientId]);
+
+    if (--mNumberOfClientsStarted == 0) {
+        LOGV("Stopping real source from client (%d)", clientId);
+        status_t err = mSource->stop();
+        mSourceStarted = false;
+        mClientsStarted.editItemAt(clientId) = false;
+        return err;
+    } else {
+        mClientsStarted.editItemAt(clientId) = false;
+        if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) {
+            // !mLastReadCompleted implies that buffer has been read from source, but all
+            // clients haven't read it.
+            // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this
+            // client would have wanted to read from this buffer. (i.e. it has not yet
+            // called read() for the current read buffer.)
+            // Since other threads may be waiting for all the clients' reads to complete,
+            // signal that this read has been aborted.
+            signalReadComplete_lock(true);
+        }
+        return OK;
+    }
+}
+
+sp<MetaData> MediaSourceSplitter::getFormat(int clientId) {
+    Mutex::Autolock autoLock(mLock);
+
+    LOGV("getFormat client (%d)", clientId);
+    return mSource->getFormat();
+}
+
+status_t MediaSourceSplitter::read(int clientId,
+        MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(clientId >= 0 && clientId < mNumberOfClients);
+
+    LOGV("read client (%d)", clientId);
+    *buffer = NULL;
+
+    if (!mClientsStarted[clientId]) {
+        return OK;
+    }
+
+    if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) {
+        // Desired buffer has not been read from source yet.
+
+        // If the current client is the special client with clientId = 0
+        // then read from source, else wait until the client 0 has finished
+        // reading from source.
+        if (clientId == 0) {
+            // Wait for all client's last read to complete first so as to not
+            // corrupt the buffer at mLastReadMediaBuffer.
+            waitForAllClientsLastRead_lock(clientId);
+
+            readFromSource_lock(options);
+            *buffer = mLastReadMediaBuffer;
+        } else {
+            waitForReadFromSource_lock(clientId);
+
+            *buffer = mLastReadMediaBuffer;
+            (*buffer)->add_ref();
+        }
+        CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]);
+    } else {
+        // Desired buffer has already been read from source. Use the cached data.
+        CHECK(clientId != 0);
+
+        *buffer = mLastReadMediaBuffer;
+        (*buffer)->add_ref();
+    }
+
+    mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId];
+    signalReadComplete_lock(false);
+
+    return mLastReadStatus;
+}
+
+void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) {
+    mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options);
+
+    mCurrentReadBit = !mCurrentReadBit;
+    mLastReadCompleted = false;
+    mReadFromSourceCondition.broadcast();
+}
+
+void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) {
+    mReadFromSourceCondition.wait(mLock);
+}
+
+void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) {
+    if (mLastReadCompleted) {
+        return;
+    }
+    mAllReadsCompleteCondition.wait(mLock);
+    CHECK(mLastReadCompleted);
+}
+
+void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) {
+    if (!readAborted) {
+        mNumberOfCurrentReads++;
+    }
+
+    if (mNumberOfCurrentReads == mNumberOfClientsStarted) {
+        mLastReadCompleted = true;
+        mNumberOfCurrentReads = 0;
+        mAllReadsCompleteCondition.broadcast();
+    }
+}
+
+status_t MediaSourceSplitter::pause(int clientId) {
+    return ERROR_UNSUPPORTED;
+}
+
+// Client
+
+MediaSourceSplitter::Client::Client(
+        sp<MediaSourceSplitter> splitter,
+        int32_t clientId) {
+    mSplitter = splitter;
+    mClientId = clientId;
+}
+
+status_t MediaSourceSplitter::Client::start(MetaData *params) {
+    return mSplitter->start(mClientId, params);
+}
+
+status_t MediaSourceSplitter::Client::stop() {
+    return mSplitter->stop(mClientId);
+}
+
+sp<MetaData> MediaSourceSplitter::Client::getFormat() {
+    return mSplitter->getFormat(mClientId);
+}
+
+status_t MediaSourceSplitter::Client::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    return mSplitter->read(mClientId, buffer, options);
+}
+
+status_t MediaSourceSplitter::Client::pause() {
+    return mSplitter->pause(mClientId);
+}
+
+}  // namespace android
diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp
index 63b476e..884f3b4 100644
--- a/media/libstagefright/MetaData.cpp
+++ b/media/libstagefright/MetaData.cpp
@@ -70,6 +70,19 @@
     return setData(key, TYPE_POINTER, &value, sizeof(value));
 }
 
+bool MetaData::setRect(
+        uint32_t key,
+        int32_t left, int32_t top,
+        int32_t right, int32_t bottom) {
+    Rect r;
+    r.mLeft = left;
+    r.mTop = top;
+    r.mRight = right;
+    r.mBottom = bottom;
+
+    return setData(key, TYPE_RECT, &r, sizeof(r));
+}
+
 bool MetaData::findCString(uint32_t key, const char **value) {
     uint32_t type;
     const void *data;
@@ -143,6 +156,28 @@
     return true;
 }
 
+bool MetaData::findRect(
+        uint32_t key,
+        int32_t *left, int32_t *top,
+        int32_t *right, int32_t *bottom) {
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (!findData(key, &type, &data, &size) || type != TYPE_RECT) {
+        return false;
+    }
+
+    CHECK_EQ(size, sizeof(Rect));
+
+    const Rect *r = (const Rect *)data;
+    *left = r->mLeft;
+    *top = r->mTop;
+    *right = r->mRight;
+    *bottom = r->mBottom;
+
+    return true;
+}
+
 bool MetaData::setData(
         uint32_t key, uint32_t type, const void *data, size_t size) {
     bool overwrote_existing = true;
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index b67002d..110fb03 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -179,8 +179,7 @@
       mFinalStatus(OK),
       mLastAccessPos(0),
       mFetching(true),
-      mLastFetchTimeUs(-1),
-      mSuspended(false) {
+      mLastFetchTimeUs(-1) {
     mLooper->setName("NuCachedSource2");
     mLooper->registerHandler(mReflector);
     mLooper->start();
@@ -201,7 +200,7 @@
     return mSource->initCheck();
 }
 
-status_t NuCachedSource2::getSize(off_t *size) {
+status_t NuCachedSource2::getSize(off64_t *size) {
     return mSource->getSize(size);
 }
 
@@ -223,12 +222,6 @@
             break;
         }
 
-        case kWhatSuspend:
-        {
-            onSuspend();
-            break;
-        }
-
         default:
             TRESPASS();
     }
@@ -270,7 +263,6 @@
 
     bool keepAlive =
         !mFetching
-            && !mSuspended
             && mFinalStatus == OK
             && ALooper::GetNowUs() >= mLastFetchTimeUs + kKeepAliveIntervalUs;
 
@@ -287,7 +279,7 @@
             LOGI("Cache full, done prefetching for now");
             mFetching = false;
         }
-    } else if (!mSuspended) {
+    } else {
         Mutex::Autolock autoLock(mLock);
         restartPrefetcherIfNecessary_l();
     }
@@ -325,15 +317,17 @@
     mCondition.signal();
 }
 
-void NuCachedSource2::restartPrefetcherIfNecessary_l() {
+void NuCachedSource2::restartPrefetcherIfNecessary_l(
+        bool ignoreLowWaterThreshold) {
     static const size_t kGrayArea = 256 * 1024;
 
     if (mFetching || mFinalStatus != OK) {
         return;
     }
 
-    if (mCacheOffset + mCache->totalSize() - mLastAccessPos
-            >= kLowWaterThreshold) {
+    if (!ignoreLowWaterThreshold
+            && mCacheOffset + mCache->totalSize() - mLastAccessPos
+                >= kLowWaterThreshold) {
         return;
     }
 
@@ -351,7 +345,7 @@
     mFetching = true;
 }
 
-ssize_t NuCachedSource2::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
     Mutex::Autolock autoSerializer(mSerializer);
 
     LOGV("readAt offset %ld, size %d", offset, size);
@@ -406,27 +400,27 @@
 
 size_t NuCachedSource2::approxDataRemaining_l(bool *eos) {
     *eos = (mFinalStatus != OK);
-    off_t lastBytePosCached = mCacheOffset + mCache->totalSize();
+    off64_t lastBytePosCached = mCacheOffset + mCache->totalSize();
     if (mLastAccessPos < lastBytePosCached) {
         return lastBytePosCached - mLastAccessPos;
     }
     return 0;
 }
 
-ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) {
     LOGV("readInternal offset %ld size %d", offset, size);
 
     Mutex::Autolock autoLock(mLock);
 
     if (offset < mCacheOffset
-            || offset >= (off_t)(mCacheOffset + mCache->totalSize())) {
-        static const off_t kPadding = 32768;
+            || offset >= (off64_t)(mCacheOffset + mCache->totalSize())) {
+        static const off64_t kPadding = 32768;
 
         // In the presence of multiple decoded streams, once of them will
         // trigger this seek request, the other one will request data "nearby"
         // soon, adjust the seek position so that that subsequent request
         // does not trigger another seek.
-        off_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
+        off64_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
 
         seekInternal_l(seekOffset);
     }
@@ -455,15 +449,15 @@
     return -EAGAIN;
 }
 
-status_t NuCachedSource2::seekInternal_l(off_t offset) {
+status_t NuCachedSource2::seekInternal_l(off64_t offset) {
     mLastAccessPos = offset;
 
     if (offset >= mCacheOffset
-            && offset <= (off_t)(mCacheOffset + mCache->totalSize())) {
+            && offset <= (off64_t)(mCacheOffset + mCache->totalSize())) {
         return OK;
     }
 
-    LOGI("new range: offset= %ld", offset);
+    LOGI("new range: offset= %lld", offset);
 
     mCacheOffset = offset;
 
@@ -476,39 +470,22 @@
     return OK;
 }
 
-void NuCachedSource2::clearCacheAndResume() {
-    LOGV("clearCacheAndResume");
-
+void NuCachedSource2::resumeFetchingIfNecessary() {
     Mutex::Autolock autoLock(mLock);
 
-    CHECK(mSuspended);
-
-    mCacheOffset = 0;
-    mFinalStatus = OK;
-    mLastAccessPos = 0;
-    mLastFetchTimeUs = -1;
-
-    size_t totalSize = mCache->totalSize();
-    CHECK_EQ(mCache->releaseFromStart(totalSize), totalSize);
-
-    mFetching = true;
-    mSuspended = false;
+    restartPrefetcherIfNecessary_l(true /* ignore low water threshold */);
 }
 
-void NuCachedSource2::suspend() {
-    (new AMessage(kWhatSuspend, mReflector->id()))->post();
-
-    while (!mSuspended) {
-        usleep(10000);
-    }
+DecryptHandle* NuCachedSource2::DrmInitialization(DrmManagerClient* client) {
+    return mSource->DrmInitialization(client);
 }
 
-void NuCachedSource2::onSuspend() {
-    Mutex::Autolock autoLock(mLock);
-
-    mFetching = false;
-    mSuspended = true;
+void NuCachedSource2::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+    mSource->getDrmInfo(handle, client);
 }
 
+String8 NuCachedSource2::getUri() {
+    return mSource->getUri();
+}
 }  // namespace android
 
diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp
index 04cca47..653c85e 100644
--- a/media/libstagefright/NuHTTPDataSource.cpp
+++ b/media/libstagefright/NuHTTPDataSource.cpp
@@ -5,6 +5,7 @@
 #include "include/NuHTTPDataSource.h"
 
 #include <cutils/properties.h>
+#include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaErrors.h>
 
@@ -69,7 +70,12 @@
       mContentLength(0),
       mContentLengthValid(false),
       mHasChunkedTransferEncoding(false),
-      mChunkDataBytesLeft(0) {
+      mChunkDataBytesLeft(0),
+      mNumBandwidthHistoryItems(0),
+      mTotalTransferTimeUs(0),
+      mTotalTransferBytes(0),
+      mDecryptHandle(NULL),
+      mDrmManagerClient(NULL) {
 }
 
 NuHTTPDataSource::~NuHTTPDataSource() {
@@ -78,7 +84,7 @@
 status_t NuHTTPDataSource::connect(
         const char *uri,
         const KeyedVector<String8, String8> *overrides,
-        off_t offset) {
+        off64_t offset) {
     String8 headers;
     MakeFullHeaders(overrides, &headers);
 
@@ -88,9 +94,12 @@
 status_t NuHTTPDataSource::connect(
         const char *uri,
         const String8 &headers,
-        off_t offset) {
+        off64_t offset) {
     String8 host, path;
     unsigned port;
+
+    mUri = uri;
+
     if (!ParseURL(uri, &host, &port, &path)) {
         return ERROR_MALFORMED;
     }
@@ -106,8 +115,8 @@
 status_t NuHTTPDataSource::connect(
         const char *host, unsigned port, const char *path,
         const String8 &headers,
-        off_t offset) {
-    LOGI("connect to %s:%u%s @%ld", host, port, path, offset);
+        off64_t offset) {
+    LOGI("connect to %s:%u%s @%lld", host, port, path, offset);
 
     bool needsToReconnect = true;
 
@@ -149,11 +158,14 @@
         request.append(" HTTP/1.1\r\n");
         request.append("Host: ");
         request.append(mHost);
+        if (mPort != 80) {
+            request.append(StringPrintf(":%u", mPort).c_str());
+        }
         request.append("\r\n");
 
         if (offset != 0) {
             char rangeHeader[128];
-            sprintf(rangeHeader, "Range: bytes=%ld-\r\n", offset);
+            sprintf(rangeHeader, "Range: bytes=%lld-\r\n", offset);
             request.append(rangeHeader);
         }
 
@@ -169,7 +181,7 @@
         }
 
         if (IsRedirectStatusCode(httpStatus)) {
-            string value;
+            AString value;
             CHECK(mHTTP.find_header_value("Location", &value));
 
             mState = DISCONNECTED;
@@ -189,9 +201,8 @@
         mHasChunkedTransferEncoding = false;
 
         {
-            string value;
-            if (mHTTP.find_header_value("Transfer-Encoding", &value)
-                    || mHTTP.find_header_value("Transfer-encoding", &value)) {
+            AString value;
+            if (mHTTP.find_header_value("Transfer-Encoding", &value)) {
                 // We don't currently support any transfer encodings but
                 // chunked.
 
@@ -213,11 +224,11 @@
         applyTimeoutResponse();
 
         if (offset == 0) {
-            string value;
+            AString value;
             unsigned long x;
-            if (mHTTP.find_header_value(string("Content-Length"), &value)
+            if (mHTTP.find_header_value(AString("Content-Length"), &value)
                     && ParseSingleUnsignedLong(value.c_str(), &x)) {
-                mContentLength = (off_t)x;
+                mContentLength = (off64_t)x;
                 mContentLengthValid = true;
             } else {
                 LOGW("Server did not give us the content length!");
@@ -230,9 +241,9 @@
                 return ERROR_UNSUPPORTED;
             }
 
-            string value;
+            AString value;
             unsigned long x;
-            if (mHTTP.find_header_value(string("Content-Range"), &value)) {
+            if (mHTTP.find_header_value(AString("Content-Range"), &value)) {
                 const char *slashPos = strchr(value.c_str(), '/');
                 if (slashPos != NULL
                         && ParseSingleUnsignedLong(slashPos + 1, &x)) {
@@ -322,7 +333,7 @@
     return n;
 }
 
-ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
     LOGV("readAt offset %ld, size %d", offset, size);
 
     Mutex::Autolock autoLock(mLock);
@@ -349,6 +360,8 @@
 
     size_t numBytesRead = 0;
     while (numBytesRead < size) {
+        int64_t startTimeUs = ALooper::GetNowUs();
+
         ssize_t n =
             internalRead((uint8_t *)data + numBytesRead, size - numBytesRead);
 
@@ -356,6 +369,9 @@
             return n;
         }
 
+        int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+        addBandwidthMeasurement_l(n, delayUs);
+
         numBytesRead += (size_t)n;
 
         if (n == 0) {
@@ -374,7 +390,7 @@
     return numBytesRead;
 }
 
-status_t NuHTTPDataSource::getSize(off_t *size) {
+status_t NuHTTPDataSource::getSize(off64_t *size) {
     *size = 0;
 
     if (mState != CONNECTED) {
@@ -425,7 +441,7 @@
 }
 
 void NuHTTPDataSource::applyTimeoutResponse() {
-    string timeout;
+    AString timeout;
     if (mHTTP.find_header_value("X-SocketTimeout", &timeout)) {
         const char *s = timeout.c_str();
         char *end;
@@ -440,4 +456,64 @@
     }
 }
 
+bool NuHTTPDataSource::estimateBandwidth(int32_t *bandwidth_bps) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mNumBandwidthHistoryItems < 2) {
+        return false;
+    }
+
+    *bandwidth_bps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs);
+
+    return true;
+}
+
+void NuHTTPDataSource::addBandwidthMeasurement_l(
+        size_t numBytes, int64_t delayUs) {
+    BandwidthEntry entry;
+    entry.mDelayUs = delayUs;
+    entry.mNumBytes = numBytes;
+    mTotalTransferTimeUs += delayUs;
+    mTotalTransferBytes += numBytes;
+
+    mBandwidthHistory.push_back(entry);
+    if (++mNumBandwidthHistoryItems > 100) {
+        BandwidthEntry *entry = &*mBandwidthHistory.begin();
+        mTotalTransferTimeUs -= entry->mDelayUs;
+        mTotalTransferBytes -= entry->mNumBytes;
+        mBandwidthHistory.erase(mBandwidthHistory.begin());
+        --mNumBandwidthHistoryItems;
+    }
+}
+
+DecryptHandle* NuHTTPDataSource::DrmInitialization(DrmManagerClient* client) {
+    if (client == NULL) {
+        return NULL;
+    }
+    mDrmManagerClient = client;
+
+    if (mDecryptHandle == NULL) {
+        /* Note if redirect occurs, mUri is the redirect uri instead of the
+         * original one
+         */
+        mDecryptHandle = mDrmManagerClient->openDecryptSession(mUri);
+    }
+
+    if (mDecryptHandle == NULL) {
+        mDrmManagerClient = NULL;
+    }
+
+    return mDecryptHandle;
+}
+
+void NuHTTPDataSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+    *handle = mDecryptHandle;
+
+    *client = mDrmManagerClient;
+}
+
+String8 NuHTTPDataSource::getUri() {
+    return mUri;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 980da77..e516cb4 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -38,10 +38,11 @@
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
 #include <binder/ProcessState.h>
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/IMediaPlayerService.h>
+#include <media/stagefright/HardwareAPI.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaExtractor.h>
 #include <media/stagefright/MetaData.h>
@@ -53,6 +54,7 @@
 #include <OMX_Component.h>
 
 #include "include/ThreadedSource.h"
+#include "include/avc_utils.h"
 
 namespace android {
 
@@ -152,37 +154,37 @@
 
 static const CodecInfo kDecoderInfo[] = {
     { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
+//    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
 //    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
     { MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
-//    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },
 //    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
+//    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },
+//    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
     { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },
+//    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
     { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
     { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
+    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
-//    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
-//    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
-//    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },
     { MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
     { MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
 };
@@ -194,25 +196,24 @@
     { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
     { MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
-//    { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
+    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
     { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
-//    { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
+    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
     { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
     { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
-//    { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
+    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
     { MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-//    { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
 };
 
 #undef OPTIONAL
@@ -234,7 +235,9 @@
         sp<OMXCodec> codec = mTarget.promote();
 
         if (codec.get() != NULL) {
+            Mutex::Autolock autoLock(codec->mLock);
             codec->on_message(msg);
+            codec.clear();
         }
     }
 
@@ -264,39 +267,6 @@
     return NULL;
 }
 
-enum {
-    kAVCProfileBaseline      = 0x42,
-    kAVCProfileMain          = 0x4d,
-    kAVCProfileExtended      = 0x58,
-    kAVCProfileHigh          = 0x64,
-    kAVCProfileHigh10        = 0x6e,
-    kAVCProfileHigh422       = 0x7a,
-    kAVCProfileHigh444       = 0xf4,
-    kAVCProfileCAVLC444Intra = 0x2c
-};
-
-static const char *AVCProfileToString(uint8_t profile) {
-    switch (profile) {
-        case kAVCProfileBaseline:
-            return "Baseline";
-        case kAVCProfileMain:
-            return "Main";
-        case kAVCProfileExtended:
-            return "Extended";
-        case kAVCProfileHigh:
-            return "High";
-        case kAVCProfileHigh10:
-            return "High 10";
-        case kAVCProfileHigh422:
-            return "High 422";
-        case kAVCProfileHigh444:
-            return "High 444";
-        case kAVCProfileCAVLC444Intra:
-            return "CAVLC 444 Intra";
-        default:   return "Unknown";
-    }
-}
-
 template<class T>
 static void InitOMXParams(T *params) {
     params->nSize = sizeof(T);
@@ -307,16 +277,15 @@
 }
 
 static bool IsSoftwareCodec(const char *componentName) {
-    if (!strncmp("OMX.PV.", componentName, 7)) {
-        return true;
+    if (!strncmp("OMX.", componentName, 4)) {
+        return false;
     }
 
-    return false;
+    return true;
 }
 
 // A sort order in which non-OMX components are first,
-// followed by software codecs, i.e. OMX.PV.*, followed
-// by all the others.
+// followed by software codecs, and followed by all the others.
 static int CompareSoftwareCodecsFirst(
         const String8 *elem1, const String8 *elem2) {
     bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
@@ -350,9 +319,13 @@
         const char *componentName, bool isEncoder) {
     uint32_t quirks = 0;
 
-    if (!strcmp(componentName, "OMX.PV.avcdec")) {
-        quirks |= kWantsNALFragments;
+    if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
+         !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
+         !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
+         !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
+        quirks |= kDecoderLiesAboutNumberOfChannels;
     }
+
     if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
         quirks |= kNeedsFlushBeforeDisable;
         quirks |= kDecoderLiesAboutNumberOfChannels;
@@ -412,13 +385,6 @@
         quirks |= kOutputBuffersAreUnreadable;
     }
 
-    if (!strncmp(componentName, "OMX.SEC.", 8) && isEncoder) {
-        // These input buffers contain meta data (for instance,
-        // information helps locate the actual YUV data, or
-        // the physical address of the YUV data).
-        quirks |= kStoreMetaDataInInputVideoBuffers;
-    }
-
     return quirks;
 }
 
@@ -454,7 +420,16 @@
             continue;
         }
 
-        matchingCodecs->push(String8(componentName));
+        // When requesting software-only codecs, only push software codecs
+        // When requesting hardware-only codecs, only push hardware codecs
+        // When there is request neither for software-only nor for
+        // hardware-only codecs, push all codecs
+        if (((flags & kSoftwareCodecsOnly) &&   IsSoftwareCodec(componentName)) ||
+            ((flags & kHardwareCodecsOnly) &&  !IsSoftwareCodec(componentName)) ||
+            (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+            matchingCodecs->push(String8(componentName));
+        }
     }
 
     if (flags & kPreferSoftwareCodecs) {
@@ -468,7 +443,8 @@
         const sp<MetaData> &meta, bool createEncoder,
         const sp<MediaSource> &source,
         const char *matchComponentName,
-        uint32_t flags) {
+        uint32_t flags,
+        const sp<ANativeWindow> &nativeWindow) {
     const char *mime;
     bool success = meta->findCString(kKeyMIMEType, &mime);
     CHECK(success);
@@ -524,7 +500,7 @@
             sp<OMXCodec> codec = new OMXCodec(
                     omx, node, quirks,
                     createEncoder, mime, componentName,
-                    source);
+                    source, nativeWindow);
 
             observer->setCodec(codec);
 
@@ -542,13 +518,17 @@
 }
 
 status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
+    mIsMetaDataStoredInVideoBuffers = false;
+    if (flags & kStoreMetaDataInVideoBuffers) {
+        mIsMetaDataStoredInVideoBuffers = true;
+    }
     if (!(flags & kIgnoreCodecSpecificData)) {
         uint32_t type;
         const void *data;
         size_t size;
         if (meta->findData(kKeyESDS, &type, &data, &size)) {
             ESDS esds((const char *)data, size);
-            CHECK_EQ(esds.InitCheck(), OK);
+            CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
             const void *codec_specific_data;
             size_t codec_specific_data_size;
@@ -563,7 +543,7 @@
             const uint8_t *ptr = (const uint8_t *)data;
 
             CHECK(size >= 7);
-            CHECK_EQ(ptr[0], 1);  // configurationVersion == 1
+            CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
             uint8_t profile = ptr[1];
             uint8_t level = ptr[3];
 
@@ -617,7 +597,7 @@
                 size -= length;
             }
 
-            CODEC_LOGV(
+            CODEC_LOGI(
                     "AVC profile = %d (%s), level = %d",
                     (int)profile, AVCProfileToString(profile), level);
 
@@ -732,6 +712,16 @@
         mQuirks &= ~kOutputBuffersAreUnreadable;
     }
 
+    if (mNativeWindow != NULL
+        && !mIsEncoder
+        && !strncasecmp(mMIME, "video/", 6)
+        && !strncmp(mComponentName, "OMX.", 4)) {
+        status_t err = initNativeWindow();
+        if (err != OK) {
+            return err;
+        }
+    }
+
     return OK;
 }
 
@@ -742,7 +732,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     if ((portIndex == kPortIndexInput && (mQuirks & kInputBufferSizesAreBogus))
         || (def.nBufferSize < size)) {
@@ -751,11 +741,11 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     // Make sure the setting actually stuck.
     if (portIndex == kPortIndexInput
@@ -915,7 +905,7 @@
     int32_t width, height, frameRate, bitRate, stride, sliceHeight;
     bool success = meta->findInt32(kKeyWidth, &width);
     success = success && meta->findInt32(kKeyHeight, &height);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyBitRate, &bitRate);
     success = success && meta->findInt32(kKeyStride, &stride);
     success = success && meta->findInt32(kKeySliceHeight, &sliceHeight);
@@ -935,7 +925,7 @@
     }
 
     OMX_COLOR_FORMATTYPE colorFormat;
-    CHECK_EQ(OK, findTargetColorFormat(meta, &colorFormat));
+    CHECK_EQ((status_t)OK, findTargetColorFormat(meta, &colorFormat));
 
     status_t err;
     OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -944,19 +934,19 @@
     //////////////////////// Input port /////////////////////////
     CHECK_EQ(setVideoPortFormatType(
             kPortIndexInput, OMX_VIDEO_CodingUnused,
-            colorFormat), OK);
+            colorFormat), (status_t)OK);
 
     InitOMXParams(&def);
     def.nPortIndex = kPortIndexInput;
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     def.nBufferSize = getFrameSize(colorFormat,
             stride > 0? stride: -stride, sliceHeight);
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
     video_def->nFrameWidth = width;
     video_def->nFrameHeight = height;
@@ -968,20 +958,20 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     //////////////////////// Output port /////////////////////////
     CHECK_EQ(setVideoPortFormatType(
             kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused),
-            OK);
+            (status_t)OK);
     InitOMXParams(&def);
     def.nPortIndex = kPortIndexOutput;
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
 
-    CHECK_EQ(err, OK);
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ(err, (status_t)OK);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
     video_def->nFrameWidth = width;
     video_def->nFrameHeight = height;
@@ -996,23 +986,23 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     /////////////////// Codec-specific ////////////////////////
     switch (compressionFormat) {
         case OMX_VIDEO_CodingMPEG4:
         {
-            CHECK_EQ(setupMPEG4EncoderParameters(meta), OK);
+            CHECK_EQ(setupMPEG4EncoderParameters(meta), (status_t)OK);
             break;
         }
 
         case OMX_VIDEO_CodingH263:
-            CHECK_EQ(setupH263EncoderParameters(meta), OK);
+            CHECK_EQ(setupH263EncoderParameters(meta), (status_t)OK);
             break;
 
         case OMX_VIDEO_CodingAVC:
         {
-            CHECK_EQ(setupAVCEncoderParameters(meta), OK);
+            CHECK_EQ(setupAVCEncoderParameters(meta), (status_t)OK);
             break;
         }
 
@@ -1071,7 +1061,7 @@
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoBitrate,
             &bitrateType, sizeof(bitrateType));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     bitrateType.eControlRate = OMX_Video_ControlRateVariable;
     bitrateType.nTargetBitrate = bitRate;
@@ -1079,7 +1069,7 @@
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoBitrate,
             &bitrateType, sizeof(bitrateType));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
     return OK;
 }
 
@@ -1135,7 +1125,7 @@
 status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
     int32_t iFramesInterval, frameRate, bitRate;
     bool success = meta->findInt32(kKeyBitRate, &bitRate);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
     CHECK(success);
     OMX_VIDEO_PARAM_H263TYPE h263type;
@@ -1144,7 +1134,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     h263type.nAllowedPictureTypes =
         OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
@@ -1171,10 +1161,10 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(setupBitRate(bitRate), OK);
-    CHECK_EQ(setupErrorCorrectionParameters(), OK);
+    CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+    CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
 
     return OK;
 }
@@ -1182,7 +1172,7 @@
 status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
     int32_t iFramesInterval, frameRate, bitRate;
     bool success = meta->findInt32(kKeyBitRate, &bitRate);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
     CHECK(success);
     OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
@@ -1191,7 +1181,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     mpeg4type.nSliceHeaderSpacing = 0;
     mpeg4type.bSVH = OMX_FALSE;
@@ -1223,10 +1213,10 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(setupBitRate(bitRate), OK);
-    CHECK_EQ(setupErrorCorrectionParameters(), OK);
+    CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
+    CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
 
     return OK;
 }
@@ -1234,7 +1224,7 @@
 status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
     int32_t iFramesInterval, frameRate, bitRate;
     bool success = meta->findInt32(kKeyBitRate, &bitRate);
-    success = success && meta->findInt32(kKeySampleRate, &frameRate);
+    success = success && meta->findInt32(kKeyFrameRate, &frameRate);
     success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
     CHECK(success);
 
@@ -1244,7 +1234,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     h264type.nAllowedPictureTypes =
         OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
@@ -1290,11 +1280,15 @@
     h264type.bMBAFF = OMX_FALSE;
     h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
 
+    if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) {
+        h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+    }
+
     err = mOMX->setParameter(
             mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(setupBitRate(bitRate), OK);
+    CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
 
     return OK;
 }
@@ -1332,8 +1326,8 @@
         status_t err = mOMX->getParameter(
                 mNode, OMX_IndexParamVideoPortFormat,
                 &format, sizeof(format));
-        CHECK_EQ(err, OK);
-        CHECK_EQ(format.eCompressionFormat, OMX_VIDEO_CodingUnused);
+        CHECK_EQ(err, (status_t)OK);
+        CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
 
         static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00;
 
@@ -1361,7 +1355,7 @@
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
 #if 1
     // XXX Need a (much) better heuristic to compute input buffer sizes.
@@ -1371,7 +1365,7 @@
     }
 #endif
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
     video_def->nFrameWidth = width;
     video_def->nFrameHeight = height;
@@ -1393,8 +1387,8 @@
 
     err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
-    CHECK_EQ(def.eDomain, OMX_PortDomainVideo);
+    CHECK_EQ(err, (status_t)OK);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
 
 #if 0
     def.nBufferSize =
@@ -1415,7 +1409,8 @@
         bool isEncoder,
         const char *mime,
         const char *componentName,
-        const sp<MediaSource> &source)
+        const sp<MediaSource> &source,
+        const sp<ANativeWindow> &nativeWindow)
     : mOMX(omx),
       mOMXLivesLocally(omx->livesLocally(getpid())),
       mNode(node),
@@ -1433,9 +1428,9 @@
       mSeekTimeUs(-1),
       mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC),
       mTargetTimeUs(-1),
-      mSkipTimeUs(-1),
       mLeftOverBuffer(NULL),
-      mPaused(false) {
+      mPaused(false),
+      mNativeWindow(nativeWindow) {
     mPortStatus[kPortIndexInput] = ENABLED;
     mPortStatus[kPortIndexOutput] = ENABLED;
 
@@ -1513,10 +1508,10 @@
 OMXCodec::~OMXCodec() {
     mSource.clear();
 
-    CHECK(mState == LOADED || mState == ERROR);
+    CHECK(mState == LOADED || mState == ERROR || mState == LOADED_TO_IDLE);
 
     status_t err = mOMX->freeNode(mNode);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     mNode = NULL;
     setState(DEAD);
@@ -1533,21 +1528,23 @@
 status_t OMXCodec::init() {
     // mLock is held.
 
-    CHECK_EQ(mState, LOADED);
+    CHECK_EQ((int)mState, (int)LOADED);
 
     status_t err;
     if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) {
         err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
         setState(LOADED_TO_IDLE);
     }
 
     err = allocateBuffers();
-    CHECK_EQ(err, OK);
+    if (err != (status_t)OK) {
+        return err;
+    }
 
     if (mQuirks & kRequiresLoadedToIdleAfterAllocation) {
         err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
 
         setState(LOADED_TO_IDLE);
     }
@@ -1579,6 +1576,10 @@
 }
 
 status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+    if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+        return allocateOutputBuffersFromNativeWindow();
+    }
+
     OMX_PARAM_PORTDEFINITIONTYPE def;
     InitOMXParams(&def);
     def.nPortIndex = portIndex;
@@ -1590,6 +1591,14 @@
         return err;
     }
 
+    if (mIsMetaDataStoredInVideoBuffers && portIndex == kPortIndexInput) {
+        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
+        if (err != OK) {
+            LOGE("Storing meta data in video buffers is not supported");
+            return err;
+        }
+    }
+
     CODEC_LOGI("allocating %lu buffers of size %lu on %s port",
             def.nBufferCountActual, def.nBufferSize,
             portIndex == kPortIndexInput ? "input" : "output");
@@ -1644,7 +1653,7 @@
         }
 
         info.mBuffer = buffer;
-        info.mOwnedByComponent = false;
+        info.mStatus = OWNED_BY_US;
         info.mMem = mem;
         info.mMediaBuffer = NULL;
 
@@ -1671,9 +1680,173 @@
     return OK;
 }
 
-void OMXCodec::on_message(const omx_message &msg) {
-    Mutex::Autolock autoLock(mLock);
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+    // Get the number of buffers needed.
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+    def.nPortIndex = kPortIndexOutput;
 
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+    if (err != OK) {
+        return err;
+    }
+
+    err = native_window_set_buffers_geometry(
+            mNativeWindow.get(),
+            def.format.video.nFrameWidth,
+            def.format.video.nFrameHeight,
+            def.format.video.eColorFormat);
+
+    if (err != 0) {
+        LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+                strerror(-err), -err);
+        return err;
+    }
+
+    // Increase the buffer count by one to allow for the ANativeWindow to hold
+    // on to one of the buffers.
+    def.nBufferCountActual++;
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+    if (err != OK) {
+        return err;
+    }
+
+    // Set up the native window.
+    // XXX TODO: Get the gralloc usage flags from the OMX plugin!
+    err = native_window_set_usage(
+            mNativeWindow.get(), GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+    if (err != 0) {
+        LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+        return err;
+    }
+
+    err = native_window_set_buffer_count(
+            mNativeWindow.get(), def.nBufferCountActual);
+    if (err != 0) {
+        LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+                -err);
+        return err;
+    }
+
+    // XXX TODO: Do something so the ANativeWindow knows that we'll need to get
+    // the same set of buffers.
+
+    CODEC_LOGI("allocating %lu buffers from a native window of size %lu on "
+            "output port", def.nBufferCountActual, def.nBufferSize);
+
+    // Dequeue buffers and send them to OMX
+    OMX_U32 i;
+    for (i = 0; i < def.nBufferCountActual; i++) {
+        android_native_buffer_t* buf;
+        err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+        if (err != 0) {
+            LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+            break;
+        }
+
+        sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+        IOMX::buffer_id bufferId;
+        err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+                &bufferId);
+        if (err != 0) {
+            break;
+        }
+
+        CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+                bufferId, graphicBuffer.get());
+
+        BufferInfo info;
+        info.mData = NULL;
+        info.mSize = def.nBufferSize;
+        info.mBuffer = bufferId;
+        info.mStatus = OWNED_BY_US;
+        info.mMem = NULL;
+        info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+        info.mMediaBuffer->setObserver(this);
+
+        mPortBuffers[kPortIndexOutput].push(info);
+    }
+
+    OMX_U32 cancelStart;
+    OMX_U32 cancelEnd;
+
+    if (err != 0) {
+        // If an error occurred while dequeuing we need to cancel any buffers
+        // that were dequeued.
+        cancelStart = 0;
+        cancelEnd = i;
+    } else {
+        // Return the last two buffers to the native window.
+        // XXX TODO: The number of buffers the native window owns should probably be
+        // queried from it when we put the native window in fixed buffer pool mode
+        // (which needs to be implemented).  Currently it's hard-coded to 2.
+        cancelStart = def.nBufferCountActual - 2;
+        cancelEnd = def.nBufferCountActual;
+    }
+
+    for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+        BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+        cancelBufferToNativeWindow(info);
+    }
+
+    return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+    CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+    int err = mNativeWindow->cancelBuffer(
+        mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get());
+    if (err != 0) {
+      CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+      setState(ERROR);
+      return err;
+    }
+    info->mStatus = OWNED_BY_NATIVE_WINDOW;
+    return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+    // Dequeue the next buffer from the native window.
+    android_native_buffer_t* buf;
+    int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+    if (err != 0) {
+      CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+      setState(ERROR);
+      return 0;
+    }
+
+    // Determine which buffer we just dequeued.
+    Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+    BufferInfo *bufInfo = 0;
+    for (size_t i = 0; i < buffers->size(); i++) {
+      sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+          mMediaBuffer->graphicBuffer();
+      if (graphicBuffer->handle == buf->handle) {
+        bufInfo = &buffers->editItemAt(i);
+        break;
+      }
+    }
+
+    if (bufInfo == 0) {
+        CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+        setState(ERROR);
+        return 0;
+    }
+
+    // The native window no longer owns the buffer.
+    CHECK_EQ((int)bufInfo->mStatus, (int)OWNED_BY_NATIVE_WINDOW);
+    bufInfo->mStatus = OWNED_BY_US;
+
+    return bufInfo;
+}
+
+void OMXCodec::on_message(const omx_message &msg) {
     switch (msg.type) {
         case omx_message::EVENT:
         {
@@ -1697,32 +1870,35 @@
             }
 
             CHECK(i < buffers->size());
-            if (!(*buffers)[i].mOwnedByComponent) {
+            if ((*buffers)[i].mStatus != OWNED_BY_COMPONENT) {
                 LOGW("We already own input buffer %p, yet received "
                      "an EMPTY_BUFFER_DONE.", buffer);
             }
 
-            {
-                BufferInfo *info = &buffers->editItemAt(i);
-                info->mOwnedByComponent = false;
-                if (info->mMediaBuffer != NULL) {
-                    // It is time to release the media buffers storing meta data
-                    info->mMediaBuffer->release();
-                    info->mMediaBuffer = NULL;
+            BufferInfo* info = &buffers->editItemAt(i);
+            info->mStatus = OWNED_BY_US;
+
+            // Buffer could not be released until empty buffer done is called.
+            if (info->mMediaBuffer != NULL) {
+                if (mIsEncoder &&
+                    (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
+                    // If zero-copy mode is enabled this will send the
+                    // input buffer back to the upstream source.
+                    restorePatchedDataPointer(info);
                 }
+
+                info->mMediaBuffer->release();
+                info->mMediaBuffer = NULL;
             }
 
             if (mPortStatus[kPortIndexInput] == DISABLING) {
                 CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
 
-                status_t err =
-                    mOMX->freeBuffer(mNode, kPortIndexInput, buffer);
-                CHECK_EQ(err, OK);
-
-                buffers->removeAt(i);
+                status_t err = freeBuffer(kPortIndexInput, i);
+                CHECK_EQ(err, (status_t)OK);
             } else if (mState != ERROR
                     && mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
-                CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED);
+                CHECK_EQ((int)mPortStatus[kPortIndexInput], (int)ENABLED);
                 drainInputBuffer(&buffers->editItemAt(i));
             }
             break;
@@ -1749,21 +1925,19 @@
             CHECK(i < buffers->size());
             BufferInfo *info = &buffers->editItemAt(i);
 
-            if (!info->mOwnedByComponent) {
+            if (info->mStatus != OWNED_BY_COMPONENT) {
                 LOGW("We already own output buffer %p, yet received "
                      "a FILL_BUFFER_DONE.", buffer);
             }
 
-            info->mOwnedByComponent = false;
+            info->mStatus = OWNED_BY_US;
 
             if (mPortStatus[kPortIndexOutput] == DISABLING) {
                 CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
 
-                status_t err =
-                    mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
-                CHECK_EQ(err, OK);
+                status_t err = freeBuffer(kPortIndexOutput, i);
+                CHECK_EQ(err, (status_t)OK);
 
-                buffers->removeAt(i);
 #if 0
             } else if (mPortStatus[kPortIndexOutput] == ENABLED
                        && (flags & OMX_BUFFERFLAG_EOS)) {
@@ -1772,7 +1946,7 @@
                 mBufferFilled.signal();
 #endif
             } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
-                CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
+                CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
 
                 if (info->mMediaBuffer == NULL) {
                     CHECK(mOMXLivesLocally);
@@ -1791,8 +1965,10 @@
                 }
 
                 MediaBuffer *buffer = info->mMediaBuffer;
+                bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
 
-                if (msg.u.extended_buffer_data.range_offset
+                if (!isGraphicBuffer
+                    && msg.u.extended_buffer_data.range_offset
                         + msg.u.extended_buffer_data.range_length
                             > buffer->size()) {
                     CODEC_LOGE(
@@ -1816,7 +1992,7 @@
                     buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
                 }
 
-                if (mQuirks & kOutputBuffersAreUnreadable) {
+                if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
                     buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
                 }
 
@@ -1855,6 +2031,9 @@
 
                 mFilledBuffers.push_back(i);
                 mBufferFilled.signal();
+                if (mIsEncoder) {
+                    sched_yield();
+                }
             }
 
             break;
@@ -1868,48 +2047,6 @@
     }
 }
 
-void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
-    switch (event) {
-        case OMX_EventCmdComplete:
-        {
-            onCmdComplete((OMX_COMMANDTYPE)data1, data2);
-            break;
-        }
-
-        case OMX_EventError:
-        {
-            CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
-
-            setState(ERROR);
-            break;
-        }
-
-        case OMX_EventPortSettingsChanged:
-        {
-            onPortSettingsChanged(data1);
-            break;
-        }
-
-#if 0
-        case OMX_EventBufferFlag:
-        {
-            CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
-
-            if (data1 == kPortIndexOutput) {
-                mNoMoreOutputData = true;
-            }
-            break;
-        }
-#endif
-
-        default:
-        {
-            CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
-            break;
-        }
-    }
-}
-
 // Has the format changed in any way that the client would have to be aware of?
 static bool formatHasNotablyChanged(
         const sp<MetaData> &from, const sp<MetaData> &to) {
@@ -1954,6 +2091,21 @@
         if (height_from != height_to) {
             return true;
         }
+
+        int32_t left_from, top_from, right_from, bottom_from;
+        CHECK(from->findRect(
+                    kKeyCropRect,
+                    &left_from, &top_from, &right_from, &bottom_from));
+
+        int32_t left_to, top_to, right_to, bottom_to;
+        CHECK(to->findRect(
+                    kKeyCropRect,
+                    &left_to, &top_to, &right_to, &bottom_to));
+
+        if (left_to != left_from || top_to != top_from
+                || right_to != right_from || bottom_to != bottom_from) {
+            return true;
+        }
     } else if (!strcasecmp(mime_from, MEDIA_MIMETYPE_AUDIO_RAW)) {
         int32_t numChannels_from, numChannels_to;
         CHECK(from->findInt32(kKeyChannelCount, &numChannels_from));
@@ -1975,6 +2127,78 @@
     return false;
 }
 
+void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
+    switch (event) {
+        case OMX_EventCmdComplete:
+        {
+            onCmdComplete((OMX_COMMANDTYPE)data1, data2);
+            break;
+        }
+
+        case OMX_EventError:
+        {
+            CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
+
+            setState(ERROR);
+            break;
+        }
+
+        case OMX_EventPortSettingsChanged:
+        {
+            CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+                       data1, data2);
+
+            if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+                onPortSettingsChanged(data1);
+            } else if (data1 == kPortIndexOutput
+                    && data2 == OMX_IndexConfigCommonOutputCrop) {
+
+                sp<MetaData> oldOutputFormat = mOutputFormat;
+                initOutputFormat(mSource->getFormat());
+
+                if (formatHasNotablyChanged(oldOutputFormat, mOutputFormat)) {
+                    mOutputPortSettingsHaveChanged = true;
+
+                    if (mNativeWindow != NULL) {
+                        int32_t left, top, right, bottom;
+                        CHECK(mOutputFormat->findRect(
+                                    kKeyCropRect,
+                                    &left, &top, &right, &bottom));
+
+                        android_native_rect_t crop;
+                        crop.left = left;
+                        crop.top = top;
+                        crop.right = right;
+                        crop.bottom = bottom;
+
+                        CHECK_EQ(0, native_window_set_crop(
+                                    mNativeWindow.get(), &crop));
+                    }
+                }
+            }
+            break;
+        }
+
+#if 0
+        case OMX_EventBufferFlag:
+        {
+            CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
+
+            if (data1 == kPortIndexOutput) {
+                mNoMoreOutputData = true;
+            }
+            break;
+        }
+#endif
+
+        default:
+        {
+            CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
+            break;
+        }
+    }
+}
+
 void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
     switch (cmd) {
         case OMX_CommandStateSet:
@@ -1989,13 +2213,13 @@
             CODEC_LOGV("PORT_DISABLED(%ld)", portIndex);
 
             CHECK(mState == EXECUTING || mState == RECONFIGURING);
-            CHECK_EQ(mPortStatus[portIndex], DISABLING);
-            CHECK_EQ(mPortBuffers[portIndex].size(), 0);
+            CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING);
+            CHECK_EQ(mPortBuffers[portIndex].size(), 0u);
 
             mPortStatus[portIndex] = DISABLED;
 
             if (mState == RECONFIGURING) {
-                CHECK_EQ(portIndex, kPortIndexOutput);
+                CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
 
                 sp<MetaData> oldOutputFormat = mOutputFormat;
                 initOutputFormat(mSource->getFormat());
@@ -2009,7 +2233,7 @@
                 enablePortAsync(portIndex);
 
                 status_t err = allocateBuffersOnPort(portIndex);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
             }
             break;
         }
@@ -2020,12 +2244,12 @@
             CODEC_LOGV("PORT_ENABLED(%ld)", portIndex);
 
             CHECK(mState == EXECUTING || mState == RECONFIGURING);
-            CHECK_EQ(mPortStatus[portIndex], ENABLING);
+            CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING);
 
             mPortStatus[portIndex] = ENABLED;
 
             if (mState == RECONFIGURING) {
-                CHECK_EQ(portIndex, kPortIndexOutput);
+                CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
 
                 setState(EXECUTING);
 
@@ -2040,14 +2264,14 @@
 
             CODEC_LOGV("FLUSH_DONE(%ld)", portIndex);
 
-            CHECK_EQ(mPortStatus[portIndex], SHUTTING_DOWN);
+            CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN);
             mPortStatus[portIndex] = ENABLED;
 
             CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]),
                      mPortBuffers[portIndex].size());
 
             if (mState == RECONFIGURING) {
-                CHECK_EQ(portIndex, kPortIndexOutput);
+                CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
 
                 disablePortAsync(portIndex);
             } else if (mState == EXECUTING_TO_IDLE) {
@@ -2061,7 +2285,7 @@
 
                     status_t err =
                         mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-                    CHECK_EQ(err, OK);
+                    CHECK_EQ(err, (status_t)OK);
                 }
             } else {
                 // We're flushing both ports in preparation for seeking.
@@ -2101,11 +2325,11 @@
                 status_t err = mOMX->sendCommand(
                         mNode, OMX_CommandStateSet, OMX_StateExecuting);
 
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 setState(IDLE_TO_EXECUTING);
             } else {
-                CHECK_EQ(mState, EXECUTING_TO_IDLE);
+                CHECK_EQ((int)mState, (int)EXECUTING_TO_IDLE);
 
                 CHECK_EQ(
                     countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
@@ -2118,13 +2342,13 @@
                 status_t err = mOMX->sendCommand(
                         mNode, OMX_CommandStateSet, OMX_StateLoaded);
 
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 err = freeBuffersOnPort(kPortIndexInput);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 err = freeBuffersOnPort(kPortIndexOutput);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 mPortStatus[kPortIndexInput] = ENABLED;
                 mPortStatus[kPortIndexOutput] = ENABLED;
@@ -2136,7 +2360,7 @@
 
         case OMX_StateExecuting:
         {
-            CHECK_EQ(mState, IDLE_TO_EXECUTING);
+            CHECK_EQ((int)mState, (int)IDLE_TO_EXECUTING);
 
             CODEC_LOGV("Now Executing.");
 
@@ -2152,7 +2376,7 @@
 
         case OMX_StateLoaded:
         {
-            CHECK_EQ(mState, IDLE_TO_LOADED);
+            CHECK_EQ((int)mState, (int)IDLE_TO_LOADED);
 
             CODEC_LOGV("Now Loaded.");
 
@@ -2178,7 +2402,7 @@
 size_t OMXCodec::countBuffersWeOwn(const Vector<BufferInfo> &buffers) {
     size_t n = 0;
     for (size_t i = 0; i < buffers.size(); ++i) {
-        if (!buffers[i].mOwnedByComponent) {
+        if (buffers[i].mStatus != OWNED_BY_COMPONENT) {
             ++n;
         }
     }
@@ -2195,32 +2419,21 @@
     for (size_t i = buffers->size(); i-- > 0;) {
         BufferInfo *info = &buffers->editItemAt(i);
 
-        if (onlyThoseWeOwn && info->mOwnedByComponent) {
+        if (onlyThoseWeOwn && info->mStatus == OWNED_BY_COMPONENT) {
             continue;
         }
 
-        CHECK_EQ(info->mOwnedByComponent, false);
+        CHECK(info->mStatus == OWNED_BY_US
+                || info->mStatus == OWNED_BY_NATIVE_WINDOW);
 
         CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex);
 
-        status_t err =
-            mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+        status_t err = freeBuffer(portIndex, i);
 
         if (err != OK) {
             stickyErr = err;
         }
 
-        if (info->mMediaBuffer != NULL) {
-            info->mMediaBuffer->setObserver(NULL);
-
-            // Make sure nobody but us owns this buffer at this point.
-            CHECK_EQ(info->mMediaBuffer->refcount(), 0);
-
-            info->mMediaBuffer->release();
-            info->mMediaBuffer = NULL;
-        }
-
-        buffers->removeAt(i);
     }
 
     CHECK(onlyThoseWeOwn || buffers->isEmpty());
@@ -2228,11 +2441,42 @@
     return stickyErr;
 }
 
+status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
+    Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+    BufferInfo *info = &buffers->editItemAt(bufIndex);
+
+    status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+
+    if (err == OK && info->mMediaBuffer != NULL) {
+        CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
+        info->mMediaBuffer->setObserver(NULL);
+
+        // Make sure nobody but us owns this buffer at this point.
+        CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+
+        // Cancel the buffer if it belongs to an ANativeWindow.
+        sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+        if (info->mStatus == OWNED_BY_US && graphicBuffer != 0) {
+            err = cancelBufferToNativeWindow(info);
+        }
+
+        info->mMediaBuffer->release();
+        info->mMediaBuffer = NULL;
+    }
+
+    if (err == OK) {
+        buffers->removeAt(bufIndex);
+    }
+
+    return err;
+}
+
 void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
     CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex);
 
-    CHECK_EQ(mState, EXECUTING);
-    CHECK_EQ(portIndex, kPortIndexOutput);
+    CHECK_EQ((int)mState, (int)EXECUTING);
+    CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
     setState(RECONFIGURING);
 
     if (mQuirks & kNeedsFlushBeforeDisable) {
@@ -2252,7 +2496,7 @@
          portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),
          mPortBuffers[portIndex].size());
 
-    CHECK_EQ(mPortStatus[portIndex], ENABLED);
+    CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
     mPortStatus[portIndex] = SHUTTING_DOWN;
 
     if ((mQuirks & kRequiresFlushCompleteEmulation)
@@ -2266,7 +2510,7 @@
 
     status_t err =
         mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     return true;
 }
@@ -2274,12 +2518,13 @@
 void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
     CHECK(mState == EXECUTING || mState == RECONFIGURING);
 
-    CHECK_EQ(mPortStatus[portIndex], ENABLED);
+    CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
     mPortStatus[portIndex] = DISABLING;
 
+    CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex);
     status_t err =
         mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     freeBuffersOnPort(portIndex, true);
 }
@@ -2287,16 +2532,17 @@
 void OMXCodec::enablePortAsync(OMX_U32 portIndex) {
     CHECK(mState == EXECUTING || mState == RECONFIGURING);
 
-    CHECK_EQ(mPortStatus[portIndex], DISABLED);
+    CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED);
     mPortStatus[portIndex] = ENABLING;
 
+    CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex);
     status_t err =
         mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 void OMXCodec::fillOutputBuffers() {
-    CHECK_EQ(mState, EXECUTING);
+    CHECK_EQ((int)mState, (int)EXECUTING);
 
     // This is a workaround for some decoders not properly reporting
     // end-of-output-stream. If we own all input buffers and also own
@@ -2315,7 +2561,10 @@
 
     Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
     for (size_t i = 0; i < buffers->size(); ++i) {
-        fillOutputBuffer(&buffers->editItemAt(i));
+        BufferInfo *info = &buffers->editItemAt(i);
+        if (info->mStatus == OWNED_BY_US) {
+            fillOutputBuffer(&buffers->editItemAt(i));
+        }
     }
 }
 
@@ -2324,15 +2573,17 @@
 
     Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
     for (size_t i = 0; i < buffers->size(); ++i) {
-        drainInputBuffer(&buffers->editItemAt(i));
+        if (!drainInputBuffer(&buffers->editItemAt(i))) {
+            break;
+        }
     }
 }
 
-void OMXCodec::drainInputBuffer(BufferInfo *info) {
-    CHECK_EQ(info->mOwnedByComponent, false);
+bool OMXCodec::drainInputBuffer(BufferInfo *info) {
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
 
     if (mSignalledEOS) {
-        return;
+        return false;
     }
 
     if (mCodecSpecificDataIndex < mCodecSpecificData.size()) {
@@ -2366,16 +2617,16 @@
                 mNode, info->mBuffer, 0, size,
                 OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
                 0);
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
 
-        info->mOwnedByComponent = true;
+        info->mStatus = OWNED_BY_COMPONENT;
 
         ++mCodecSpecificDataIndex;
-        return;
+        return true;
     }
 
     if (mPaused) {
-        return;
+        return false;
     }
 
     status_t err;
@@ -2385,17 +2636,16 @@
 
     size_t offset = 0;
     int32_t n = 0;
+
     for (;;) {
         MediaBuffer *srcBuffer;
-        MediaSource::ReadOptions options;
-        if (mSkipTimeUs >= 0) {
-            options.setSkipFrame(mSkipTimeUs);
-        }
         if (mSeekTimeUs >= 0) {
             if (mLeftOverBuffer) {
                 mLeftOverBuffer->release();
                 mLeftOverBuffer = NULL;
             }
+
+            MediaSource::ReadOptions options;
             options.setSeekTo(mSeekTimeUs, mSeekMode);
 
             mSeekTimeUs = -1;
@@ -2420,13 +2670,14 @@
 
             err = OK;
         } else {
-            err = mSource->read(&srcBuffer, &options);
+            err = mSource->read(&srcBuffer);
         }
 
         if (err != OK) {
             signalEOS = true;
             mFinalStatus = err;
             mSignalledEOS = true;
+            mBufferFilled.signal();
             break;
         }
 
@@ -2443,28 +2694,35 @@
                 srcBuffer = NULL;
 
                 setState(ERROR);
-                return;
+                return false;
             }
 
             mLeftOverBuffer = srcBuffer;
             break;
         }
 
-        // Do not release the media buffer if it stores meta data
-        // instead of YUV data. The release is delayed until
-        // EMPTY_BUFFER_DONE callback is received.
         bool releaseBuffer = true;
         if (mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
             CHECK(mOMXLivesLocally && offset == 0);
-            OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *) info->mBuffer;
-            header->pBuffer = (OMX_U8 *) srcBuffer->data() + srcBuffer->range_offset();
+
+            OMX_BUFFERHEADERTYPE *header =
+                (OMX_BUFFERHEADERTYPE *)info->mBuffer;
+
+            CHECK(header->pBuffer == info->mData);
+
+            header->pBuffer =
+                (OMX_U8 *)srcBuffer->data() + srcBuffer->range_offset();
+
+            releaseBuffer = false;
+            info->mMediaBuffer = srcBuffer;
         } else {
-            if (mQuirks & kStoreMetaDataInInputVideoBuffers) {
+            if (mIsMetaDataStoredInVideoBuffers) {
                 releaseBuffer = false;
                 info->mMediaBuffer = srcBuffer;
             }
             memcpy((uint8_t *)info->mData + offset,
-                    (const uint8_t *)srcBuffer->data() + srcBuffer->range_offset(),
+                    (const uint8_t *)srcBuffer->data()
+                        + srcBuffer->range_offset(),
                     srcBuffer->range_length());
         }
 
@@ -2520,10 +2778,10 @@
 
     if (err != OK) {
         setState(ERROR);
-        return;
+        return false;
     }
 
-    info->mOwnedByComponent = true;
+    info->mStatus = OWNED_BY_COMPONENT;
 
     // This component does not ever signal the EOS flag on output buffers,
     // Thanks for nothing.
@@ -2531,10 +2789,12 @@
         mNoMoreOutputData = true;
         mBufferFilled.signal();
     }
+
+    return true;
 }
 
 void OMXCodec::fillOutputBuffer(BufferInfo *info) {
-    CHECK_EQ(info->mOwnedByComponent, false);
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
 
     if (mNoMoreOutputData) {
         CODEC_LOGV("There is no more output data available, not "
@@ -2542,7 +2802,24 @@
         return;
     }
 
-    CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer);
+    if (info->mMediaBuffer != NULL) {
+        sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+        if (graphicBuffer != 0) {
+            // When using a native buffer we need to lock the buffer before
+            // giving it to OMX.
+            CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
+            int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
+                    graphicBuffer.get());
+            if (err != 0) {
+                CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
+
+                setState(ERROR);
+                return;
+            }
+        }
+    }
+
+    CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
     status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
 
     if (err != OK) {
@@ -2552,19 +2829,20 @@
         return;
     }
 
-    info->mOwnedByComponent = true;
+    info->mStatus = OWNED_BY_COMPONENT;
 }
 
-void OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) {
+bool OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) {
     Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
     for (size_t i = 0; i < buffers->size(); ++i) {
         if ((*buffers)[i].mBuffer == buffer) {
-            drainInputBuffer(&buffers->editItemAt(i));
-            return;
+            return drainInputBuffer(&buffers->editItemAt(i));
         }
     }
 
     CHECK(!"should not be here.");
+
+    return false;
 }
 
 void OMXCodec::fillOutputBuffer(IOMX::buffer_id buffer) {
@@ -2597,10 +2875,10 @@
     def.nPortIndex = portIndex;
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
     def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
     CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
-            &def, sizeof(def)), OK);
+            &def, sizeof(def)), (status_t)OK);
 
     // pcm param
     OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
@@ -2610,7 +2888,7 @@
     err = mOMX->getParameter(
             mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     pcmParams.nChannels = numChannels;
     pcmParams.eNumData = OMX_NumericalDataSigned;
@@ -2631,7 +2909,7 @@
     err = mOMX->setParameter(
             mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(bool isAMRWB, int32_t bps) {
@@ -2688,13 +2966,13 @@
     status_t err =
         mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
 
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
 
     def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitRate);
     err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     ////////////////////////
 
@@ -2723,33 +3001,33 @@
         status_t err = OMX_ErrorNone;
         while (OMX_ErrorNone == err) {
             CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioPortFormat,
-                    &format, sizeof(format)), OK);
+                    &format, sizeof(format)), (status_t)OK);
             if (format.eEncoding == OMX_AUDIO_CodingAAC) {
                 break;
             }
             format.nIndex++;
         }
-        CHECK_EQ(OK, err);
+        CHECK_EQ((status_t)OK, err);
         CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioPortFormat,
-                &format, sizeof(format)), OK);
+                &format, sizeof(format)), (status_t)OK);
 
         // port definition
         OMX_PARAM_PORTDEFINITIONTYPE def;
         InitOMXParams(&def);
         def.nPortIndex = kPortIndexOutput;
         CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamPortDefinition,
-                &def, sizeof(def)), OK);
+                &def, sizeof(def)), (status_t)OK);
         def.format.audio.bFlagErrorConcealment = OMX_TRUE;
         def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
         CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
-                &def, sizeof(def)), OK);
+                &def, sizeof(def)), (status_t)OK);
 
         // profile
         OMX_AUDIO_PARAM_AACPROFILETYPE profile;
         InitOMXParams(&profile);
         profile.nPortIndex = kPortIndexOutput;
         CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioAac,
-                &profile, sizeof(profile)), OK);
+                &profile, sizeof(profile)), (status_t)OK);
         profile.nChannels = numChannels;
         profile.eChannelMode = (numChannels == 1?
                 OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo);
@@ -2762,7 +3040,7 @@
         profile.eAACProfile = OMX_AUDIO_AACObjectLC;
         profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
         CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioAac,
-                &profile, sizeof(profile)), OK);
+                &profile, sizeof(profile)), (status_t)OK);
 
     } else {
         OMX_AUDIO_PARAM_AACPROFILETYPE profile;
@@ -2771,7 +3049,7 @@
 
         status_t err = mOMX->getParameter(
                 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
 
         profile.nChannels = numChannels;
         profile.nSampleRate = sampleRate;
@@ -2779,7 +3057,7 @@
 
         err = mOMX->setParameter(
                 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
-        CHECK_EQ(err, OK);
+        CHECK_EQ(err, (status_t)OK);
     }
 }
 
@@ -2791,10 +3069,10 @@
     OMX_INDEXTYPE index;
     status_t err = mOMX->get_extension_index(
             mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index);
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     err = mOMX->set_config(mNode, index, &format, sizeof(format));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 #endif
 
     OMX_PARAM_PORTDEFINITIONTYPE def;
@@ -2803,13 +3081,13 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainImage);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
 
     OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
 
-    CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused);
+    CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingUnused);
     imageDef->eColorFormat = format;
     imageDef->nFrameWidth = width;
     imageDef->nFrameHeight = height;
@@ -2852,7 +3130,7 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 void OMXCodec::setJPEGInputFormat(
@@ -2863,12 +3141,12 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
-    CHECK_EQ(def.eDomain, OMX_PortDomainImage);
+    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
     OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
 
-    CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingJPEG);
+    CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingJPEG);
     imageDef->nFrameWidth = width;
     imageDef->nFrameHeight = height;
 
@@ -2877,7 +3155,7 @@
 
     err = mOMX->setParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 }
 
 void OMXCodec::addCodecSpecificData(const void *data, size_t size) {
@@ -2978,7 +3256,7 @@
 
                 status_t err =
                     mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
             }
 
             while (mState != LOADED && mState != ERROR) {
@@ -3029,12 +3307,6 @@
     if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
         seeking = true;
     }
-    int64_t skipTimeUs;
-    if (options && options->getSkipFrame(&skipTimeUs)) {
-        mSkipTimeUs = skipTimeUs;
-    } else {
-        mSkipTimeUs = -1;
-    }
 
     if (mInitialBufferSubmit) {
         mInitialBufferSubmit = false;
@@ -3070,7 +3342,7 @@
 
         mFilledBuffers.clear();
 
-        CHECK_EQ(mState, EXECUTING);
+        CHECK_EQ((int)mState, (int)EXECUTING);
 
         bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
         bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
@@ -3089,7 +3361,15 @@
     }
 
     while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
-        mBufferFilled.wait(mLock);
+        if (mIsEncoder) {
+            if (NO_ERROR != mBufferFilled.waitRelative(mLock, 3000000000LL)) {
+                LOGW("Timed out waiting for buffers from video encoder: %d/%d",
+                    countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
+                    countBuffersWeOwn(mPortBuffers[kPortIndexOutput]));
+            }
+        } else {
+            mBufferFilled.wait(mLock);
+        }
     }
 
     if (mState == ERROR) {
@@ -3110,6 +3390,9 @@
     mFilledBuffers.erase(mFilledBuffers.begin());
 
     BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
+    CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
+    info->mStatus = OWNED_BY_CLIENT;
+
     info->mMediaBuffer->add_ref();
     *buffer = info->mMediaBuffer;
 
@@ -3124,8 +3407,37 @@
         BufferInfo *info = &buffers->editItemAt(i);
 
         if (info->mMediaBuffer == buffer) {
-            CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
-            fillOutputBuffer(info);
+            CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
+            CHECK_EQ((int)info->mStatus, (int)OWNED_BY_CLIENT);
+
+            info->mStatus = OWNED_BY_US;
+
+            if (buffer->graphicBuffer() == 0) {
+                fillOutputBuffer(info);
+            } else {
+                sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+                int32_t rendered = 0;
+                if (!metaData->findInt32(kKeyRendered, &rendered)) {
+                    rendered = 0;
+                }
+                if (!rendered) {
+                    status_t err = cancelBufferToNativeWindow(info);
+                    if (err < 0) {
+                        return;
+                    }
+                }
+
+                info->mStatus = OWNED_BY_NATIVE_WINDOW;
+
+                // Dequeue the next buffer from the native window.
+                BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+                if (nextBufInfo == 0) {
+                    return;
+                }
+
+                // Give the buffer to the OMX node to fill.
+                fillOutputBuffer(nextBufInfo);
+            }
             return;
         }
     }
@@ -3351,7 +3663,7 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output");
 
@@ -3417,7 +3729,7 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 printf("  nSamplingRate = %ld\n", params.nSamplingRate);
                 printf("  nChannels = %ld\n", params.nChannels);
@@ -3436,7 +3748,7 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
                 printf("  nChannels = %ld\n", amr.nChannels);
                 printf("  eAMRBandMode = %s\n",
@@ -3458,6 +3770,18 @@
     printf("}\n");
 }
 
+status_t OMXCodec::initNativeWindow() {
+    // Enable use of a GraphicBuffer as the output for this node.  This must
+    // happen before getting the IndexParamPortDefinition parameter because it
+    // will affect the pixel format that the node reports.
+    status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+    if (err != 0) {
+        return err;
+    }
+
+    return OK;
+}
+
 void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
     mOutputFormat = new MetaData;
     mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
@@ -3474,13 +3798,14 @@
 
     status_t err = mOMX->getParameter(
             mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-    CHECK_EQ(err, OK);
+    CHECK_EQ(err, (status_t)OK);
 
     switch (def.eDomain) {
         case OMX_PortDomainImage:
         {
             OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
-            CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused);
+            CHECK_EQ((int)imageDef->eCompressionFormat,
+                     (int)OMX_IMAGE_CodingUnused);
 
             mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
             mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat);
@@ -3500,11 +3825,11 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
-                CHECK_EQ(params.eNumData, OMX_NumericalDataSigned);
-                CHECK_EQ(params.nBitPerSample, 16);
-                CHECK_EQ(params.ePCMMode, OMX_AUDIO_PCMModeLinear);
+                CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+                CHECK_EQ(params.nBitPerSample, 16u);
+                CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
 
                 int32_t numChannels, sampleRate;
                 inputFormat->findInt32(kKeyChannelCount, &numChannels);
@@ -3538,9 +3863,9 @@
 
                 err = mOMX->getParameter(
                         mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
-                CHECK_EQ(err, OK);
+                CHECK_EQ(err, (status_t)OK);
 
-                CHECK_EQ(amr.nChannels, 1);
+                CHECK_EQ(amr.nChannels, 1u);
                 mOutputFormat->setInt32(kKeyChannelCount, 1);
 
                 if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0
@@ -3592,18 +3917,42 @@
                 CHECK(!"Unknown compression format.");
             }
 
-            if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
-                // This component appears to be lying to me.
-                mOutputFormat->setInt32(
-                        kKeyWidth, (video_def->nFrameWidth + 15) & -16);
-                mOutputFormat->setInt32(
-                        kKeyHeight, (video_def->nFrameHeight + 15) & -16);
-            } else {
-                mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
-                mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+            mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+            mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
+            mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
+
+            if (!mIsEncoder) {
+                OMX_CONFIG_RECTTYPE rect;
+                InitOMXParams(&rect);
+                rect.nPortIndex = kPortIndexOutput;
+                status_t err =
+                        mOMX->getConfig(
+                            mNode, OMX_IndexConfigCommonOutputCrop,
+                            &rect, sizeof(rect));
+
+                if (err == OK) {
+                    CHECK_GE(rect.nLeft, 0);
+                    CHECK_GE(rect.nTop, 0);
+                    CHECK_GE(rect.nWidth, 0u);
+                    CHECK_GE(rect.nHeight, 0u);
+                    CHECK_LE(rect.nLeft + rect.nWidth - 1, video_def->nFrameWidth);
+                    CHECK_LE(rect.nTop + rect.nHeight - 1, video_def->nFrameHeight);
+
+                    mOutputFormat->setRect(
+                            kKeyCropRect,
+                            rect.nLeft,
+                            rect.nTop,
+                            rect.nLeft + rect.nWidth - 1,
+                            rect.nTop + rect.nHeight - 1);
+                } else {
+                    mOutputFormat->setRect(
+                            kKeyCropRect,
+                            0, 0,
+                            video_def->nFrameWidth - 1,
+                            video_def->nFrameHeight - 1);
+                }
             }
 
-            mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
             break;
         }
 
@@ -3693,8 +4042,30 @@
             caps->mProfileLevels.push(profileLevel);
         }
 
-        CHECK_EQ(omx->freeNode(node), OK);
+        // Color format query
+        OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+        InitOMXParams(&portFormat);
+        portFormat.nPortIndex = queryDecoders ? 1 : 0;
+        for (portFormat.nIndex = 0;; ++portFormat.nIndex)  {
+            err = omx->getParameter(
+                    node, OMX_IndexParamVideoPortFormat,
+                    &portFormat, sizeof(portFormat));
+            if (err != OK) {
+                break;
+            }
+            caps->mColorFormats.push(portFormat.eColorFormat);
+        }
+
+        CHECK_EQ(omx->freeNode(node), (status_t)OK);
     }
 }
 
+void OMXCodec::restorePatchedDataPointer(BufferInfo *info) {
+    CHECK(mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames));
+    CHECK(mOMXLivesLocally);
+
+    OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)info->mBuffer;
+    header->pBuffer = (OMX_U8 *)info->mData;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 0368fb7..cf622af 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -73,7 +73,7 @@
     // Returns an approximate bitrate in bits per second.
     uint64_t approxBitrate();
 
-    status_t seekToOffset(off_t offset);
+    status_t seekToOffset(off64_t offset);
     status_t readNextPacket(MediaBuffer **buffer);
 
     status_t init();
@@ -91,7 +91,7 @@
     };
 
     sp<DataSource> mSource;
-    off_t mOffset;
+    off64_t mOffset;
     Page mCurrentPage;
     uint64_t mPrevGranulePosition;
     size_t mCurrentPageSize;
@@ -99,7 +99,7 @@
     uint64_t mCurrentPageSamples;
     size_t mNextLaceIndex;
 
-    off_t mFirstDataOffset;
+    off64_t mFirstDataOffset;
 
     vorbis_info mVi;
     vorbis_comment mVc;
@@ -107,8 +107,8 @@
     sp<MetaData> mMeta;
     sp<MetaData> mFileMeta;
 
-    ssize_t readPage(off_t offset, Page *page);
-    status_t findNextPage(off_t startOffset, off_t *pageOffset);
+    ssize_t readPage(off64_t offset, Page *page);
+    status_t findNextPage(off64_t startOffset, off64_t *pageOffset);
 
     status_t verifyHeader(
             MediaBuffer *buffer, uint8_t type);
@@ -116,7 +116,7 @@
     void parseFileMetaData();
     void extractAlbumArt(const void *data, size_t size);
 
-    uint64_t findPrevGranulePosition(off_t pageOffset);
+    uint64_t findPrevGranulePosition(off64_t pageOffset);
 
     MyVorbisExtractor(const MyVorbisExtractor &);
     MyVorbisExtractor &operator=(const MyVorbisExtractor &);
@@ -162,7 +162,7 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
-        off_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
+        off64_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
         LOGV("seeking to offset %ld", pos);
 
         if (mExtractor->mImpl->seekToOffset(pos) != OK) {
@@ -220,7 +220,7 @@
 }
 
 status_t MyVorbisExtractor::findNextPage(
-        off_t startOffset, off_t *pageOffset) {
+        off64_t startOffset, off64_t *pageOffset) {
     *pageOffset = startOffset;
 
     for (;;) {
@@ -250,9 +250,9 @@
 // it (if any) and return its granule position.
 // To do this we back up from the "current" page's offset until we find any
 // page preceding it and then scan forward to just before the current page.
-uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) {
-    off_t prevPageOffset = 0;
-    off_t prevGuess = pageOffset;
+uint64_t MyVorbisExtractor::findPrevGranulePosition(off64_t pageOffset) {
+    off64_t prevPageOffset = 0;
+    off64_t prevGuess = pageOffset;
     for (;;) {
         if (prevGuess >= 5000) {
             prevGuess -= 5000;
@@ -292,14 +292,14 @@
     }
 }
 
-status_t MyVorbisExtractor::seekToOffset(off_t offset) {
+status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
     if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) {
         // Once we know where the actual audio data starts (past the headers)
         // don't ever seek to anywhere before that.
         offset = mFirstDataOffset;
     }
 
-    off_t pageOffset;
+    off64_t pageOffset;
     status_t err = findNextPage(offset, &pageOffset);
 
     if (err != OK) {
@@ -324,7 +324,7 @@
     return OK;
 }
 
-ssize_t MyVorbisExtractor::readPage(off_t offset, Page *page) {
+ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
     uint8_t header[27];
     if (mSource->readAt(offset, header, sizeof(header))
             < (ssize_t)sizeof(header)) {
@@ -410,7 +410,7 @@
         }
 
         if (mNextLaceIndex < mCurrentPage.mNumSegments) {
-            off_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
+            off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
             for (size_t j = 0; j < mNextLaceIndex; ++j) {
                 dataOffset += mCurrentPage.mLace[j];
             }
@@ -609,7 +609,7 @@
             LOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
             LOGV("window-bitrate = %ld", mVi.bitrate_window);
 
-            off_t size;
+            off64_t size;
             if (mSource->getSize(&size) == OK) {
                 uint64_t bps = approxBitrate();
 
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index 7155c61..062ab9b 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -179,7 +179,7 @@
     return OK;
 }
 
-status_t SampleIterator::getChunkOffset(uint32_t chunk, off_t *offset) {
+status_t SampleIterator::getChunkOffset(uint32_t chunk, off64_t *offset) {
     *offset = 0;
 
     if (chunk >= mTable->mNumChunkOffsets) {
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 27faf4f..a9163fc 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -76,7 +76,7 @@
 }
 
 status_t SampleTable::setChunkOffsetParams(
-        uint32_t type, off_t data_offset, size_t data_size) {
+        uint32_t type, off64_t data_offset, size_t data_size) {
     if (mChunkOffsetOffset >= 0) {
         return ERROR_MALFORMED;
     }
@@ -117,7 +117,7 @@
 }
 
 status_t SampleTable::setSampleToChunkParams(
-        off_t data_offset, size_t data_size) {
+        off64_t data_offset, size_t data_size) {
     if (mSampleToChunkOffset >= 0) {
         return ERROR_MALFORMED;
     }
@@ -168,7 +168,7 @@
 }
 
 status_t SampleTable::setSampleSizeParams(
-        uint32_t type, off_t data_offset, size_t data_size) {
+        uint32_t type, off64_t data_offset, size_t data_size) {
     if (mSampleSizeOffset >= 0) {
         return ERROR_MALFORMED;
     }
@@ -228,7 +228,7 @@
 }
 
 status_t SampleTable::setTimeToSampleParams(
-        off_t data_offset, size_t data_size) {
+        off64_t data_offset, size_t data_size) {
     if (mTimeToSample != NULL || data_size < 8) {
         return ERROR_MALFORMED;
     }
@@ -260,7 +260,7 @@
     return OK;
 }
 
-status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
+status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size) {
     if (mSyncSampleOffset >= 0 || data_size < 8) {
         return ERROR_MALFORMED;
     }
@@ -281,7 +281,7 @@
     mNumSyncSamples = U32_AT(&header[4]);
 
     if (mNumSyncSamples < 2) {
-        LOGW("Table of sync samples is empty or has only a single entry!");
+        LOGV("Table of sync samples is empty or has only a single entry!");
     }
 
     mSyncSamples = new uint32_t[mNumSyncSamples];
@@ -419,8 +419,10 @@
 
         ++left;
     }
+    if (left > 0) {
+        --left;
+    }
 
-    --left;
     uint32_t x;
     if (mDataSource->readAt(
                 mSyncSampleOffset + 8 + left * 4, &x, 4) != 4) {
@@ -557,7 +559,7 @@
 
 status_t SampleTable::getMetaDataForSample(
         uint32_t sampleIndex,
-        off_t *offset,
+        off64_t *offset,
         size_t *size,
         uint32_t *decodingTime,
         bool *isSyncSample) {
diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp
index 23b7681..783f2d0 100644
--- a/media/libstagefright/ShoutcastSource.cpp
+++ b/media/libstagefright/ShoutcastSource.cpp
@@ -14,7 +14,6 @@
  * limitations under the License.
  */
 
-#include "include/stagefright_string.h"
 #include "include/HTTPStream.h"
 
 #include <stdlib.h>
@@ -34,7 +33,7 @@
       mBytesUntilMetaData(0),
       mGroup(NULL),
       mStarted(false) {
-    string metaint;
+    AString metaint;
     if (mHttp->find_header_value("icy-metaint", &metaint)) {
         char *end;
         const char *start = metaint.c_str();
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 8580cc4..5d15246 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -39,7 +39,7 @@
         ".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2",
         ".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
         ".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
-        ".mkv", ".mka", ".webm", ".ts"
+        ".mkv", ".mka", ".webm", ".ts", ".fl"
     };
     static const size_t kNumValidExtensions =
         sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
@@ -176,11 +176,11 @@
 char *StagefrightMediaScanner::extractAlbumArt(int fd) {
     LOGV("extractAlbumArt %d", fd);
 
-    off_t size = lseek(fd, 0, SEEK_END);
+    off64_t size = lseek64(fd, 0, SEEK_END);
     if (size < 0) {
         return NULL;
     }
-    lseek(fd, 0, SEEK_SET);
+    lseek64(fd, 0, SEEK_SET);
 
     if (mRetriever->setDataSource(fd, 0, size) == OK
             && mRetriever->setMode(
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index ac208cd..4f483ac 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -108,7 +108,10 @@
 static VideoFrame *extractVideoFrameWithCodecFlags(
         OMXClient *client,
         const sp<MetaData> &trackMeta,
-        const sp<MediaSource> &source, uint32_t flags) {
+        const sp<MediaSource> &source,
+        uint32_t flags,
+        int64_t frameTimeUs,
+        int seekMode) {
     sp<MediaSource> decoder =
         OMXCodec::Create(
                 client->interface(), source->getFormat(), false, source,
@@ -130,11 +133,25 @@
     // and spurious empty buffers.
 
     MediaSource::ReadOptions options;
+    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
+        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {
+
+        LOGE("Unknown seek mode: %d", seekMode);
+        return NULL;
+    }
+
+    MediaSource::ReadOptions::SeekMode mode =
+            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
+
     int64_t thumbNailTime;
-    if (trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) {
-        options.setSeekTo(thumbNailTime);
+    if (frameTimeUs < 0) {
+        if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) {
+            thumbNailTime = 0;
+        }
+        options.setSeekTo(thumbNailTime, mode);
     } else {
         thumbNailTime = -1;
+        options.setSeekTo(frameTimeUs, mode);
     }
 
     MediaBuffer *buffer = NULL;
@@ -191,17 +208,26 @@
     CHECK(meta->findInt32(kKeyWidth, &width));
     CHECK(meta->findInt32(kKeyHeight, &height));
 
+    int32_t crop_left, crop_top, crop_right, crop_bottom;
+    if (!meta->findRect(
+                kKeyCropRect,
+                &crop_left, &crop_top, &crop_right, &crop_bottom)) {
+        crop_left = crop_top = 0;
+        crop_right = width - 1;
+        crop_bottom = height - 1;
+    }
+
     int32_t rotationAngle;
     if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
         rotationAngle = 0;  // By default, no rotation
     }
 
     VideoFrame *frame = new VideoFrame;
-    frame->mWidth = width;
-    frame->mHeight = height;
-    frame->mDisplayWidth = width;
-    frame->mDisplayHeight = height;
-    frame->mSize = width * height * 2;
+    frame->mWidth = crop_right - crop_left + 1;
+    frame->mHeight = crop_bottom - crop_top + 1;
+    frame->mDisplayWidth = frame->mWidth;
+    frame->mDisplayHeight = frame->mHeight;
+    frame->mSize = frame->mWidth * frame->mHeight * 2;
     frame->mData = new uint8_t[frame->mSize];
     frame->mRotationAngle = rotationAngle;
 
@@ -212,25 +238,36 @@
             (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
     CHECK(converter.isValid());
 
-    converter.convert(
-            width, height,
+    err = converter.convert(
             (const uint8_t *)buffer->data() + buffer->range_offset(),
-            0,
-            frame->mData, width * 2);
+            width, height,
+            crop_left, crop_top, crop_right, crop_bottom,
+            frame->mData,
+            frame->mWidth,
+            frame->mHeight,
+            0, 0, frame->mWidth - 1, frame->mHeight - 1);
 
     buffer->release();
     buffer = NULL;
 
     decoder->stop();
 
+    if (err != OK) {
+        LOGE("Colorconverter failed to convert frame.");
+
+        delete frame;
+        frame = NULL;
+    }
+
     return frame;
 }
 
-VideoFrame *StagefrightMetadataRetriever::captureFrame() {
-    LOGV("captureFrame");
+VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
+        int64_t timeUs, int option) {
 
+    LOGV("getFrameAtTime: %lld us option: %d", timeUs, option);
     if (0 == (mMode & METADATA_MODE_FRAME_CAPTURE_ONLY)) {
-        LOGV("captureFrame disabled by mode (0x%08x)", mMode);
+        LOGV("getFrameAtTime disabled by mode (0x%08x)", mMode);
 
         return NULL;
     }
@@ -270,13 +307,15 @@
 
     VideoFrame *frame =
         extractVideoFrameWithCodecFlags(
-                &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs);
+                &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs,
+                timeUs, option);
 
     if (frame == NULL) {
         LOGV("Software decoder failed to extract thumbnail, "
              "trying hardware decoder.");
 
-        frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0);
+        frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0,
+                        timeUs, option);
     }
 
     return frame;
@@ -419,5 +458,4 @@
     }
 }
 
-
 }  // namespace android
diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp
index 4711f7c..88e07b0 100644
--- a/media/libstagefright/ThrottledSource.cpp
+++ b/media/libstagefright/ThrottledSource.cpp
@@ -41,7 +41,7 @@
     return mSource->initCheck();
 }
 
-ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) {
     Mutex::Autolock autoLock(mLock);
 
     ssize_t n = mSource->readAt(offset, data, size);
@@ -72,7 +72,7 @@
     return n;
 }
 
-status_t ThrottledSource::getSize(off_t *size) {
+status_t ThrottledSource::getSize(off64_t *size) {
     return mSource->getSize(size);
 }
 
diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp
new file mode 100644
index 0000000..48bddc2
--- /dev/null
+++ b/media/libstagefright/VBRISeeker.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VBRISeeker"
+#include <utils/Log.h>
+
+#include "include/VBRISeeker.h"
+
+#include "include/MP3Extractor.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static uint32_t U24_AT(const uint8_t *ptr) {
+    return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
+// static
+sp<VBRISeeker> VBRISeeker::CreateFromSource(
+        const sp<DataSource> &source, off64_t post_id3_pos) {
+    off64_t pos = post_id3_pos;
+
+    uint8_t header[4];
+    ssize_t n = source->readAt(pos, header, sizeof(header));
+    if (n < (ssize_t)sizeof(header)) {
+        return NULL;
+    }
+
+    uint32_t tmp = U32_AT(&header[0]);
+    size_t frameSize;
+    int sampleRate;
+    if (!MP3Extractor::get_mp3_frame_size(tmp, &frameSize, &sampleRate)) {
+        return NULL;
+    }
+
+    // VBRI header follows 32 bytes after the header _ends_.
+    pos += sizeof(header) + 32;
+
+    uint8_t vbriHeader[26];
+    n = source->readAt(pos, vbriHeader, sizeof(vbriHeader));
+    if (n < (ssize_t)sizeof(vbriHeader)) {
+        return NULL;
+    }
+
+    if (memcmp(vbriHeader, "VBRI", 4)) {
+        return NULL;
+    }
+
+    size_t numFrames = U32_AT(&vbriHeader[14]);
+
+    int64_t durationUs =
+        numFrames * 1000000ll * (sampleRate >= 32000 ? 1152 : 576) / sampleRate;
+
+    LOGV("duration = %.2f secs", durationUs / 1E6);
+
+    size_t numEntries = U16_AT(&vbriHeader[18]);
+    size_t entrySize = U16_AT(&vbriHeader[22]);
+    size_t scale = U16_AT(&vbriHeader[20]);
+
+    LOGV("%d entries, scale=%d, size_per_entry=%d",
+         numEntries,
+         scale,
+         entrySize);
+
+    size_t totalEntrySize = numEntries * entrySize;
+    uint8_t *buffer = new uint8_t[totalEntrySize];
+
+    n = source->readAt(pos + sizeof(vbriHeader), buffer, totalEntrySize);
+    if (n < (ssize_t)totalEntrySize) {
+        delete[] buffer;
+        buffer = NULL;
+
+        return NULL;
+    }
+
+    sp<VBRISeeker> seeker = new VBRISeeker;
+    seeker->mBasePos = post_id3_pos;
+    seeker->mDurationUs = durationUs;
+
+    off64_t offset = post_id3_pos;
+    for (size_t i = 0; i < numEntries; ++i) {
+        uint32_t numBytes;
+        switch (entrySize) {
+            case 1: numBytes = buffer[i]; break;
+            case 2: numBytes = U16_AT(buffer + 2 * i); break;
+            case 3: numBytes = U24_AT(buffer + 3 * i); break;
+            default:
+            {
+                CHECK_EQ(entrySize, 4u);
+                numBytes = U32_AT(buffer + 4 * i); break;
+            }
+        }
+
+        numBytes *= scale;
+
+        seeker->mSegments.push(numBytes);
+
+        LOGV("entry #%d: %d offset 0x%08lx", i, numBytes, offset);
+        offset += numBytes;
+    }
+
+    delete[] buffer;
+    buffer = NULL;
+
+    LOGI("Found VBRI header.");
+
+    return seeker;
+}
+
+VBRISeeker::VBRISeeker()
+    : mDurationUs(-1) {
+}
+
+bool VBRISeeker::getDuration(int64_t *durationUs) {
+    if (mDurationUs < 0) {
+        return false;
+    }
+
+    *durationUs = mDurationUs;
+
+    return true;
+}
+
+bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+    if (mDurationUs < 0) {
+        return false;
+    }
+
+    int64_t segmentDurationUs = mDurationUs / mSegments.size();
+
+    int64_t nowUs = 0;
+    *pos = mBasePos;
+    size_t segmentIndex = 0;
+    while (segmentIndex < mSegments.size() && nowUs < *timeUs) {
+        nowUs += segmentDurationUs;
+        *pos += mSegments.itemAt(segmentIndex++);
+    }
+
+    LOGV("getOffsetForTime %lld us => 0x%08lx", *timeUs, *pos);
+
+    *timeUs = nowUs;
+
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp
new file mode 100644
index 0000000..ea7b09a
--- /dev/null
+++ b/media/libstagefright/VideoSourceDownSampler.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoSourceDownSampler"
+
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include "OMX_Video.h"
+
+namespace android {
+
+VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+        int32_t width, int32_t height) {
+    LOGV("Construct VideoSourceDownSampler");
+    CHECK(width > 0);
+    CHECK(height > 0);
+
+    mRealVideoSource = videoSource;
+    mWidth = width;
+    mHeight = height;
+
+    mMeta = new MetaData(*(mRealVideoSource->getFormat()));
+    CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth));
+    CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight));
+
+    if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) {
+        // Change meta data for width and height.
+        CHECK(mWidth <= mRealSourceWidth);
+        CHECK(mHeight <= mRealSourceHeight);
+
+        mNeedDownSampling = true;
+        computeDownSamplingParameters();
+        mMeta->setInt32(kKeyWidth, mWidth);
+        mMeta->setInt32(kKeyHeight, mHeight);
+    } else {
+        mNeedDownSampling = false;
+    }
+}
+
+VideoSourceDownSampler::~VideoSourceDownSampler() {
+}
+
+void VideoSourceDownSampler::computeDownSamplingParameters() {
+    mDownSampleSkipX = mRealSourceWidth / mWidth;
+    mDownSampleSkipY = mRealSourceHeight / mHeight;
+
+    mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth;
+    mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight;
+}
+
+void VideoSourceDownSampler::downSampleYUVImage(
+        const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const {
+    // find the YUV format
+    int32_t srcFormat;
+    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+    YUVImage::YUVFormat yuvFormat;
+    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+        yuvFormat = YUVImage::YUV420SemiPlanar;
+    } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+        yuvFormat = YUVImage::YUV420Planar;
+    }
+
+    // allocate mediaBuffer for down sampled image and setup a canvas.
+    *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight));
+    YUVImage yuvDownSampledImage(yuvFormat,
+            mWidth, mHeight,
+            (uint8_t *)(*buffer)->data());
+    YUVCanvas yuvCanvasDownSample(yuvDownSampledImage);
+
+    YUVImage yuvImageSource(yuvFormat,
+            mRealSourceWidth, mRealSourceHeight,
+            (uint8_t *)sourceBuffer.data());
+    yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY,
+            mDownSampleSkipX, mDownSampleSkipY,
+            yuvImageSource);
+}
+
+status_t VideoSourceDownSampler::start(MetaData *params) {
+    LOGV("start");
+    return mRealVideoSource->start();
+}
+
+status_t VideoSourceDownSampler::stop() {
+    LOGV("stop");
+    return mRealVideoSource->stop();
+}
+
+sp<MetaData> VideoSourceDownSampler::getFormat() {
+    LOGV("getFormat");
+    return mMeta;
+}
+
+status_t VideoSourceDownSampler::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    LOGV("read");
+    MediaBuffer *realBuffer;
+    status_t err = mRealVideoSource->read(&realBuffer, options);
+
+    if (mNeedDownSampling) {
+        downSampleYUVImage(*realBuffer, buffer);
+
+        int64_t frameTime;
+        realBuffer->meta_data()->findInt64(kKeyTime, &frameTime);
+        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+
+        // We just want this buffer to be deleted when the encoder releases it.
+        // So don't add a reference to it and set the observer to NULL.
+        (*buffer)->setObserver(NULL);
+
+        // The original buffer is no longer required. Release it.
+        realBuffer->release();
+    } else {
+        *buffer = realBuffer;
+    }
+
+    return err;
+}
+
+status_t VideoSourceDownSampler::pause() {
+    LOGV("pause");
+    return mRealVideoSource->pause();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index aff06bc..9332120 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -51,7 +51,7 @@
             const sp<MetaData> &meta,
             uint16_t waveFormat,
             int32_t bitsPerSample,
-            off_t offset, size_t size);
+            off64_t offset, size_t size);
 
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
@@ -72,11 +72,11 @@
     int32_t mSampleRate;
     int32_t mNumChannels;
     int32_t mBitsPerSample;
-    off_t mOffset;
+    off64_t mOffset;
     size_t mSize;
     bool mStarted;
     MediaBufferGroup *mGroup;
-    off_t mCurrentPos;
+    off64_t mCurrentPos;
 
     WAVSource(const WAVSource &);
     WAVSource &operator=(const WAVSource &);
@@ -139,7 +139,7 @@
 
     size_t totalSize = U32_LE_AT(&header[4]);
 
-    off_t offset = 12;
+    off64_t offset = 12;
     size_t remainingSize = totalSize;
     while (remainingSize >= 8) {
         uint8_t chunkHeader[8];
@@ -251,7 +251,7 @@
         const sp<MetaData> &meta,
         uint16_t waveFormat,
         int32_t bitsPerSample,
-        off_t offset, size_t size)
+        off64_t offset, size_t size)
     : mDataSource(dataSource),
       mMeta(meta),
       mWaveFormat(waveFormat),
@@ -318,7 +318,7 @@
     int64_t seekTimeUs;
     ReadOptions::SeekMode mode;
     if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
-        int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * 2;
+        int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3);
         if (pos > mSize) {
             pos = mSize;
         }
@@ -335,7 +335,7 @@
         mBitsPerSample == 8 ? kMaxFrameSize / 2 : kMaxFrameSize;
 
     size_t maxBytesAvailable =
-        (mCurrentPos - mOffset >= (off_t)mSize)
+        (mCurrentPos - mOffset >= (off64_t)mSize)
             ? 0 : mSize - (mCurrentPos - mOffset);
 
     if (maxBytesToRead > maxBytesAvailable) {
diff --git a/media/libstagefright/WVMExtractor.cpp b/media/libstagefright/WVMExtractor.cpp
new file mode 100644
index 0000000..7c72852
--- /dev/null
+++ b/media/libstagefright/WVMExtractor.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "WVMExtractor"
+#include <utils/Log.h>
+
+#include "include/WVMExtractor.h"
+
+#include <arpa/inet.h>
+#include <utils/String8.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <dlfcn.h>
+
+#include <utils/Errors.h>
+
+/* The extractor lifetime is short - just long enough to get
+ * the media sources constructed - so the shared lib needs to remain open
+ * beyond the lifetime of the extractor.  So keep the handle as a global
+ * rather than a member of the extractor
+ */
+void *gVendorLibHandle = NULL;
+
+namespace android {
+
+static Mutex gWVMutex;
+
+WVMExtractor::WVMExtractor(const sp<DataSource> &source)
+    : mDataSource(source) {
+    {
+        Mutex::Autolock autoLock(gWVMutex);
+        if (gVendorLibHandle == NULL) {
+            gVendorLibHandle = dlopen("libwvm.so", RTLD_NOW);
+        }
+
+        if (gVendorLibHandle == NULL) {
+            LOGE("Failed to open libwvm.so");
+            return;
+        }
+    }
+
+    typedef MediaExtractor *(*GetInstanceFunc)(sp<DataSource>);
+    GetInstanceFunc getInstanceFunc =
+        (GetInstanceFunc) dlsym(gVendorLibHandle,
+                "_ZN7android11GetInstanceENS_2spINS_10DataSourceEEE");
+
+    if (getInstanceFunc) {
+        LOGD("Calling GetInstanceFunc");
+        mImpl = (*getInstanceFunc)(source);
+        CHECK(mImpl != NULL);
+    } else {
+        LOGE("Failed to locate GetInstance in libwvm.so");
+    }
+}
+
+WVMExtractor::~WVMExtractor() {
+}
+
+size_t WVMExtractor::countTracks() {
+    return (mImpl != NULL) ? mImpl->countTracks() : 0;
+}
+
+sp<MediaSource> WVMExtractor::getTrack(size_t index) {
+    if (mImpl == NULL) {
+        return NULL;
+    }
+    return mImpl->getTrack(index);
+}
+
+sp<MetaData> WVMExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+    if (mImpl == NULL) {
+        return NULL;
+    }
+    return mImpl->getTrackMetaData(index, flags);
+}
+
+sp<MetaData> WVMExtractor::getMetaData() {
+    if (mImpl == NULL) {
+        return NULL;
+    }
+    return mImpl->getMetaData();
+}
+
+} //namespace android
+
diff --git a/media/libstagefright/XINGSeeker.cpp b/media/libstagefright/XINGSeeker.cpp
new file mode 100644
index 0000000..616836c
--- /dev/null
+++ b/media/libstagefright/XINGSeeker.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/XINGSeeker.h"
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static bool parse_xing_header(
+        const sp<DataSource> &source, off64_t first_frame_pos,
+        int32_t *frame_number = NULL, int32_t *byte_number = NULL,
+        char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
+        int64_t *duration = NULL);
+
+// static
+sp<XINGSeeker> XINGSeeker::CreateFromSource(
+        const sp<DataSource> &source, off64_t first_frame_pos) {
+    sp<XINGSeeker> seeker = new XINGSeeker;
+
+    seeker->mFirstFramePos = first_frame_pos;
+
+    if (!parse_xing_header(
+                source, first_frame_pos,
+                NULL, &seeker->mSizeBytes, seeker->mTableOfContents,
+                NULL, &seeker->mDurationUs)) {
+        return NULL;
+    }
+
+    LOGI("Found XING header.");
+
+    return seeker;
+}
+
+XINGSeeker::XINGSeeker()
+    : mDurationUs(-1),
+      mSizeBytes(0) {
+}
+
+bool XINGSeeker::getDuration(int64_t *durationUs) {
+    if (mDurationUs < 0) {
+        return false;
+    }
+
+    *durationUs = mDurationUs;
+
+    return true;
+}
+
+bool XINGSeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+    if (mSizeBytes == 0 || mTableOfContents[0] <= 0 || mDurationUs < 0) {
+        return false;
+    }
+
+    float percent = (float)(*timeUs) * 100 / mDurationUs;
+    float fx;
+    if( percent <= 0.0f ) {
+        fx = 0.0f;
+    } else if( percent >= 100.0f ) {
+        fx = 256.0f;
+    } else {
+        int a = (int)percent;
+        float fa, fb;
+        if ( a == 0 ) {
+            fa = 0.0f;
+        } else {
+            fa = (float)mTableOfContents[a-1];
+        }
+        if ( a < 99 ) {
+            fb = (float)mTableOfContents[a];
+        } else {
+            fb = 256.0f;
+        }
+        fx = fa + (fb-fa)*(percent-a);
+    }
+
+    *pos = (int)((1.0f/256.0f)*fx*mSizeBytes) + mFirstFramePos;
+
+    return true;
+}
+
+static bool parse_xing_header(
+        const sp<DataSource> &source, off64_t first_frame_pos,
+        int32_t *frame_number, int32_t *byte_number,
+        char *table_of_contents, int32_t *quality_indicator,
+        int64_t *duration) {
+    if (frame_number) {
+        *frame_number = 0;
+    }
+    if (byte_number) {
+        *byte_number = 0;
+    }
+    if (table_of_contents) {
+        table_of_contents[0] = 0;
+    }
+    if (quality_indicator) {
+        *quality_indicator = 0;
+    }
+    if (duration) {
+        *duration = 0;
+    }
+
+    uint8_t buffer[4];
+    int offset = first_frame_pos;
+    if (source->readAt(offset, &buffer, 4) < 4) { // get header
+        return false;
+    }
+    offset += 4;
+
+    uint8_t id, layer, sr_index, mode;
+    layer = (buffer[1] >> 1) & 3;
+    id = (buffer[1] >> 3) & 3;
+    sr_index = (buffer[2] >> 2) & 3;
+    mode = (buffer[3] >> 6) & 3;
+    if (layer == 0) {
+        return false;
+    }
+    if (id == 1) {
+        return false;
+    }
+    if (sr_index == 3) {
+        return false;
+    }
+    // determine offset of XING header
+    if(id&1) { // mpeg1
+        if (mode != 3) offset += 32;
+        else offset += 17;
+    } else { // mpeg2
+        if (mode != 3) offset += 17;
+        else offset += 9;
+    }
+
+    if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
+        return false;
+    }
+    offset += 4;
+    // Check XING ID
+    if ((buffer[0] != 'X') || (buffer[1] != 'i')
+                || (buffer[2] != 'n') || (buffer[3] != 'g')) {
+        if ((buffer[0] != 'I') || (buffer[1] != 'n')
+                    || (buffer[2] != 'f') || (buffer[3] != 'o')) {
+            return false;
+        }
+    }
+
+    if (source->readAt(offset, &buffer, 4) < 4) { // flags
+        return false;
+    }
+    offset += 4;
+    uint32_t flags = U32_AT(buffer);
+
+    if (flags & 0x0001) {  // Frames field is present
+        if (source->readAt(offset, buffer, 4) < 4) {
+             return false;
+        }
+        if (frame_number) {
+           *frame_number = U32_AT(buffer);
+        }
+        int32_t frame = U32_AT(buffer);
+        // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
+        const int samplesPerFrames[2][3] =
+        {
+            { 384, 1152, 576  }, // MPEG 2, 2.5: layer1, layer2, layer3
+            { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
+        };
+        // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
+        const int samplingRates[4][3] =
+        {
+            { 11025, 12000, 8000,  },    // MPEG 2.5
+            { 0,     0,     0,     },    // reserved
+            { 22050, 24000, 16000, },    // MPEG 2
+            { 44100, 48000, 32000, }     // MPEG 1
+        };
+        if (duration) {
+            *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
+                / samplingRates[id][sr_index];
+        }
+        offset += 4;
+    }
+    if (flags & 0x0002) {  // Bytes field is present
+        if (byte_number) {
+            if (source->readAt(offset, buffer, 4) < 4) {
+                return false;
+            }
+            *byte_number = U32_AT(buffer);
+        }
+        offset += 4;
+    }
+    if (flags & 0x0004) {  // TOC field is present
+       if (table_of_contents) {
+            if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
+                return false;
+            }
+        }
+        offset += 100;
+    }
+    if (flags & 0x0008) {  // Quality indicator field is present
+        if (quality_indicator) {
+            if (source->readAt(offset, buffer, 4) < 4) {
+                return false;
+            }
+            *quality_indicator = U32_AT(buffer);
+        }
+    }
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 478e40c..fa12cf0 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "avc_utils"
+#include <utils/Log.h>
+
 #include "include/avc_utils.h"
 
 #include <media/stagefright/foundation/ABitReader.h>
@@ -218,6 +222,28 @@
     return NULL;
 }
 
+const char *AVCProfileToString(uint8_t profile) {
+    switch (profile) {
+        case kAVCProfileBaseline:
+            return "Baseline";
+        case kAVCProfileMain:
+            return "Main";
+        case kAVCProfileExtended:
+            return "Extended";
+        case kAVCProfileHigh:
+            return "High";
+        case kAVCProfileHigh10:
+            return "High 10";
+        case kAVCProfileHigh422:
+            return "High 422";
+        case kAVCProfileHigh444:
+            return "High 444";
+        case kAVCProfileCAVLC444Intra:
+            return "CAVLC 444 Intra";
+        default:   return "Unknown";
+    }
+}
+
 sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
     const uint8_t *data = accessUnit->data();
     size_t size = accessUnit->size();
@@ -244,6 +270,10 @@
 
     *out++ = 0x01;  // configurationVersion
     memcpy(out, seqParamSet->data() + 1, 3);  // profile/level...
+
+    uint8_t profile = out[0];
+    uint8_t level = out[2];
+
     out += 3;
     *out++ = (0x3f << 2) | 1;  // lengthSize == 2 bytes
     *out++ = 0xe0 | 1;
@@ -271,7 +301,8 @@
     meta->setInt32(kKeyWidth, width);
     meta->setInt32(kKeyHeight, height);
 
-    LOGI("found AVC codec config (%d x %d)", width, height);
+    LOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
+         width, height, AVCProfileToString(profile), level / 10, level % 10);
 
     return meta;
 }
diff --git a/media/libstagefright/codecs/aacdec/AACDecoder.cpp b/media/libstagefright/codecs/aacdec/AACDecoder.cpp
index f58c16d..208431c 100644
--- a/media/libstagefright/codecs/aacdec/AACDecoder.cpp
+++ b/media/libstagefright/codecs/aacdec/AACDecoder.cpp
@@ -21,8 +21,8 @@
 
 #include "pvmp4audiodecoder_api.h"
 
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaData.h>
 
@@ -84,7 +84,7 @@
     sp<MetaData> meta = mSource->getFormat();
     if (meta->findData(kKeyESDS, &type, &data, &size)) {
         ESDS esds((const char *)data, size);
-        CHECK_EQ(esds.InitCheck(), OK);
+        CHECK_EQ(esds.InitCheck(), (status_t)OK);
 
         const void *codec_specific_data;
         size_t codec_specific_data_size;
@@ -197,7 +197,7 @@
     }
 
     MediaBuffer *buffer;
-    CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), OK);
+    CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), (status_t)OK);
 
     mConfig->pInputBuffer =
         (UChar *)mInputBuffer->data() + mInputBuffer->range_offset();
@@ -308,7 +308,7 @@
             mAnchorTimeUs
                 + (mNumSamplesOutput * 1000000) / mConfig->samplingRate);
 
-    mNumSamplesOutput += mConfig->frameLength;
+    mNumSamplesOutput += mConfig->frameLength * mUpsamplingFactor;
 
     *out = buffer;
 
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index d5d8f3e..69e331f 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -149,6 +149,8 @@
 
 LOCAL_C_INCLUDES := frameworks/base/media/libstagefright/include
 
+LOCAL_ARM_MODE := arm
+
 LOCAL_MODULE := libstagefright_aacdec
 
 include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index df9f107..9524884 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -36,6 +36,7 @@
       mStarted(false),
       mBufferGroup(NULL),
       mInputBuffer(NULL),
+      mInputFrame(NULL),
       mEncoderHandle(NULL),
       mApiHandle(NULL),
       mMemOperator(NULL) {
@@ -45,6 +46,7 @@
     CHECK(mApiHandle == NULL && mEncoderHandle == NULL);
     CHECK(mMeta->findInt32(kKeySampleRate, &mSampleRate));
     CHECK(mMeta->findInt32(kKeyChannelCount, &mChannels));
+    CHECK(mChannels <= 2 && mChannels >= 1);
     CHECK(mMeta->findInt32(kKeyBitRate, &mBitRate));
 
     mApiHandle = new VO_AUDIO_CODECAPI;
@@ -145,6 +147,10 @@
     mNumInputSamples = 0;
     mAnchorTimeUs = 0;
     mFrameCount = 0;
+
+    mInputFrame = new int16_t[mChannels * kNumSamplesPerFrame];
+    CHECK(mInputFrame != NULL);
+
     mSource->start(params);
 
     mStarted = true;
@@ -176,6 +182,10 @@
     mApiHandle = NULL;
 
     mStarted = false;
+    if (mInputFrame) {
+        delete[] mInputFrame;
+        mInputFrame = NULL;
+    }
 
     return OK;
 }
@@ -222,7 +232,8 @@
         buffer->meta_data()->setInt32(kKeyIsCodecConfig, false);
     }
 
-    while (mNumInputSamples < kNumSamplesPerFrame) {
+    const int32_t nSamples = mChannels * kNumSamplesPerFrame;
+    while (mNumInputSamples < nSamples) {
         if (mInputBuffer == NULL) {
             if (mSource->read(&mInputBuffer, options) != OK) {
                 if (mNumInputSamples == 0) {
@@ -231,7 +242,7 @@
                 }
                 memset(&mInputFrame[mNumInputSamples],
                        0,
-                       sizeof(int16_t) * (kNumSamplesPerFrame - mNumInputSamples));
+                       sizeof(int16_t) * (nSamples - mNumInputSamples));
                 mNumInputSamples = 0;
                 break;
             }
@@ -250,8 +261,7 @@
         } else {
             readFromSource = false;
         }
-        size_t copy =
-            (kNumSamplesPerFrame - mNumInputSamples) * sizeof(int16_t);
+        size_t copy = (nSamples - mNumInputSamples) * sizeof(int16_t);
 
         if (copy > mInputBuffer->range_length()) {
             copy = mInputBuffer->range_length();
@@ -271,8 +281,8 @@
             mInputBuffer = NULL;
         }
         mNumInputSamples += copy / sizeof(int16_t);
-        if (mNumInputSamples >= kNumSamplesPerFrame) {
-            mNumInputSamples %= kNumSamplesPerFrame;
+        if (mNumInputSamples >= nSamples) {
+            mNumInputSamples %= nSamples;
             break;
         }
     }
@@ -280,7 +290,7 @@
     VO_CODECBUFFER inputData;
     memset(&inputData, 0, sizeof(inputData));
     inputData.Buffer = (unsigned char*) mInputFrame;
-    inputData.Length = kNumSamplesPerFrame * sizeof(int16_t);
+    inputData.Length = nSamples * sizeof(int16_t);
     CHECK(VO_ERR_NONE == mApiHandle->SetInputData(mEncoderHandle,&inputData));
 
     VO_CODECBUFFER outputData;
@@ -289,15 +299,21 @@
     memset(&outputInfo, 0, sizeof(outputInfo));
 
     VO_U32 ret = VO_ERR_NONE;
-    outputData.Buffer = outPtr;
-    outputData.Length = buffer->size();
-    ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
-    CHECK(ret == VO_ERR_NONE || ret == VO_ERR_INPUT_BUFFER_SMALL);
-    CHECK(outputData.Length != 0);
-    buffer->set_range(0, outputData.Length);
+    size_t nOutputBytes = 0;
+    do {
+        outputData.Buffer = outPtr;
+        outputData.Length = buffer->size() - nOutputBytes;
+        ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
+        if (ret == VO_ERR_NONE) {
+            outPtr += outputData.Length;
+            nOutputBytes += outputData.Length;
+        }
+    } while (ret != VO_ERR_INPUT_BUFFER_SMALL);
+    buffer->set_range(0, nOutputBytes);
 
     int64_t mediaTimeUs =
         ((mFrameCount - 1) * 1000000LL * kNumSamplesPerFrame) / mSampleRate;
+
     buffer->meta_data()->setInt64(kKeyTime, mAnchorTimeUs + mediaTimeUs);
     if (readFromSource && wallClockTimeUs != -1) {
         buffer->meta_data()->setInt64(kKeyDriftTime, mediaTimeUs - wallClockTimeUs);
diff --git a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
index 868c514..5bbba35 100644
--- a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
+++ b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp
@@ -73,6 +73,7 @@
     CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height));
     mFormat->setInt32(kKeyWidth, width);
     mFormat->setInt32(kKeyHeight, height);
+    mFormat->setRect(kKeyCropRect, 0, 0, width - 1, height - 1);
     mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
     mFormat->setCString(kKeyDecoderComponent, "AVCDecoder");
 
@@ -418,16 +419,32 @@
                     crop_top = crop_left = 0;
                 }
 
-                int32_t aligned_width = (crop_right - crop_left + 1 + 15) & ~15;
-                int32_t aligned_height = (crop_bottom - crop_top + 1 + 15) & ~15;
+                int32_t prevCropLeft, prevCropTop;
+                int32_t prevCropRight, prevCropBottom;
+                if (!mFormat->findRect(
+                            kKeyCropRect,
+                            &prevCropLeft, &prevCropTop,
+                            &prevCropRight, &prevCropBottom)) {
+                    prevCropLeft = prevCropTop = 0;
+                    prevCropRight = width - 1;
+                    prevCropBottom = height - 1;
+                }
 
                 int32_t oldWidth, oldHeight;
                 CHECK(mFormat->findInt32(kKeyWidth, &oldWidth));
                 CHECK(mFormat->findInt32(kKeyHeight, &oldHeight));
 
-                if (oldWidth != aligned_width || oldHeight != aligned_height) {
-                    mFormat->setInt32(kKeyWidth, aligned_width);
-                    mFormat->setInt32(kKeyHeight, aligned_height);
+                if (oldWidth != width || oldHeight != height
+                        || prevCropLeft != crop_left
+                        || prevCropTop != crop_top
+                        || prevCropRight != crop_right
+                        || prevCropBottom != crop_bottom) {
+                    mFormat->setRect(
+                            kKeyCropRect,
+                            crop_left, crop_top, crop_right, crop_bottom);
+
+                    mFormat->setInt32(kKeyWidth, width);
+                    mFormat->setInt32(kKeyHeight, height);
 
                     err = INFO_FORMAT_CHANGED;
                 } else {
diff --git a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
index 52a391f..e3292e6 100644
--- a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
@@ -33,6 +33,80 @@
 
 namespace android {
 
+static status_t ConvertOmxAvcProfileToAvcSpecProfile(
+        int32_t omxProfile, AVCProfile* pvProfile) {
+    LOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile);
+    switch (omxProfile) {
+        case OMX_VIDEO_AVCProfileBaseline:
+            *pvProfile = AVC_BASELINE;
+            return OK;
+        default:
+            LOGE("Unsupported omx profile: %d", omxProfile);
+    }
+    return BAD_VALUE;
+}
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+        int32_t omxLevel, AVCLevel *pvLevel) {
+    LOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel);
+    AVCLevel level = AVC_LEVEL5_1;
+    switch (omxLevel) {
+        case OMX_VIDEO_AVCLevel1:
+            level = AVC_LEVEL1_B;
+            break;
+        case OMX_VIDEO_AVCLevel1b:
+            level = AVC_LEVEL1;
+            break;
+        case OMX_VIDEO_AVCLevel11:
+            level = AVC_LEVEL1_1;
+            break;
+        case OMX_VIDEO_AVCLevel12:
+            level = AVC_LEVEL1_2;
+            break;
+        case OMX_VIDEO_AVCLevel13:
+            level = AVC_LEVEL1_3;
+            break;
+        case OMX_VIDEO_AVCLevel2:
+            level = AVC_LEVEL2;
+            break;
+        case OMX_VIDEO_AVCLevel21:
+            level = AVC_LEVEL2_1;
+            break;
+        case OMX_VIDEO_AVCLevel22:
+            level = AVC_LEVEL2_2;
+            break;
+        case OMX_VIDEO_AVCLevel3:
+            level = AVC_LEVEL3;
+            break;
+        case OMX_VIDEO_AVCLevel31:
+            level = AVC_LEVEL3_1;
+            break;
+        case OMX_VIDEO_AVCLevel32:
+            level = AVC_LEVEL3_2;
+            break;
+        case OMX_VIDEO_AVCLevel4:
+            level = AVC_LEVEL4;
+            break;
+        case OMX_VIDEO_AVCLevel41:
+            level = AVC_LEVEL4_1;
+            break;
+        case OMX_VIDEO_AVCLevel42:
+            level = AVC_LEVEL4_2;
+            break;
+        case OMX_VIDEO_AVCLevel5:
+            level = AVC_LEVEL5;
+            break;
+        case OMX_VIDEO_AVCLevel51:
+            level = AVC_LEVEL5_1;
+            break;
+        default:
+            LOGE("Unknown omx level: %d", omxLevel);
+            return BAD_VALUE;
+    }
+    *pvLevel = level;
+    return OK;
+}
+
 inline static void ConvertYUV420SemiPlanarToYUV420Planar(
         uint8_t *inyuv, uint8_t* outyuv,
         int32_t width, int32_t height) {
@@ -104,7 +178,7 @@
       mInputFrameData(NULL),
       mGroup(NULL) {
 
-    LOGV("Construct software AVCEncoder");
+    LOGI("Construct software AVCEncoder");
 
     mHandle = new tagAVCHandle;
     memset(mHandle, 0, sizeof(tagAVCHandle));
@@ -133,7 +207,7 @@
     LOGV("initCheck");
     CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
     CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
-    CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+    CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
     CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
 
     // XXX: Add more color format support
@@ -231,10 +305,16 @@
     mEncParams->level = AVC_LEVEL3_2;
     int32_t profile, level;
     if (meta->findInt32(kKeyVideoProfile, &profile)) {
-        mEncParams->profile = (AVCProfile) profile;
+        if (OK != ConvertOmxAvcProfileToAvcSpecProfile(
+                        profile, &mEncParams->profile)) {
+            return BAD_VALUE;
+        }
     }
     if (meta->findInt32(kKeyVideoLevel, &level)) {
-        mEncParams->level = (AVCLevel) level;
+        if (OK != ConvertOmxAvcLevelToAvcSpecLevel(
+                        level, &mEncParams->level)) {
+            return BAD_VALUE;
+        }
     }
 
 
@@ -242,7 +322,7 @@
     mFormat->setInt32(kKeyWidth, mVideoWidth);
     mFormat->setInt32(kKeyHeight, mVideoHeight);
     mFormat->setInt32(kKeyBitRate, mVideoBitRate);
-    mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+    mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
     mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
     mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
     mFormat->setCString(kKeyDecoderComponent, "AVCEncoder");
diff --git a/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp b/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp
index 0f08f6e..38778fb 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp
@@ -132,7 +132,10 @@
     }
 
     MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle);
-    CHECK_EQ(mode, actualMode);
+    if (mode != actualMode) {
+        PVCleanUpVideoDecoder(mHandle);
+        return UNKNOWN_ERROR;
+    }
 
     PVSetPostProcType((VideoDecControls *) mHandle, 0);
 
diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
index a011137..15ed219 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
@@ -32,6 +32,109 @@
 
 namespace android {
 
+static status_t ConvertOmxProfileLevel(
+        MP4EncodingMode mode,
+        int32_t omxProfile,
+        int32_t omxLevel,
+        ProfileLevelType* pvProfileLevel) {
+    LOGV("ConvertOmxProfileLevel: %d/%d/%d", mode, omxProfile, omxLevel);
+    ProfileLevelType profileLevel;
+    if (mode == H263_MODE) {
+        switch (omxProfile) {
+            case OMX_VIDEO_H263ProfileBaseline:
+                if (omxLevel > OMX_VIDEO_H263Level45) {
+                    LOGE("Unsupported level (%d) for H263", omxLevel);
+                    return BAD_VALUE;
+                } else {
+                    LOGW("PV does not support level configuration for H263");
+                    profileLevel = CORE_PROFILE_LEVEL2;
+                    break;
+                }
+                break;
+            default:
+                LOGE("Unsupported profile (%d) for H263", omxProfile);
+                return BAD_VALUE;
+        }
+    } else {  // MPEG4
+        switch (omxProfile) {
+            case OMX_VIDEO_MPEG4ProfileSimple:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level0b:
+                        profileLevel = SIMPLE_PROFILE_LEVEL0;
+                        break;
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = SIMPLE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = SIMPLE_PROFILE_LEVEL2;
+                        break;
+                    case OMX_VIDEO_MPEG4Level3:
+                        profileLevel = SIMPLE_PROFILE_LEVEL3;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 simple profile",
+                            omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            case OMX_VIDEO_MPEG4ProfileSimpleScalable:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level0b:
+                        profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL0;
+                        break;
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL2;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 simple "
+                             "scalable profile", omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            case OMX_VIDEO_MPEG4ProfileCore:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = CORE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = CORE_PROFILE_LEVEL2;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 core "
+                             "profile", omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            case OMX_VIDEO_MPEG4ProfileCoreScalable:
+                switch (omxLevel) {
+                    case OMX_VIDEO_MPEG4Level1:
+                        profileLevel = CORE_SCALABLE_PROFILE_LEVEL1;
+                        break;
+                    case OMX_VIDEO_MPEG4Level2:
+                        profileLevel = CORE_SCALABLE_PROFILE_LEVEL2;
+                        break;
+                    case OMX_VIDEO_MPEG4Level3:
+                        profileLevel = CORE_SCALABLE_PROFILE_LEVEL3;
+                        break;
+                    default:
+                        LOGE("Unsupported level (%d) for MPEG4 core "
+                             "scalable profile", omxLevel);
+                        return BAD_VALUE;
+                }
+                break;
+            default:
+                LOGE("Unsupported MPEG4 profile (%d)", omxProfile);
+                return BAD_VALUE;
+        }
+    }
+
+    *pvProfileLevel = profileLevel;
+    return OK;
+}
+
 inline static void ConvertYUV420SemiPlanarToYUV420Planar(
         uint8_t *inyuv, uint8_t* outyuv,
         int32_t width, int32_t height) {
@@ -75,7 +178,7 @@
       mInputFrameData(NULL),
       mGroup(NULL) {
 
-    LOGV("Construct software M4vH263Encoder");
+    LOGI("Construct software M4vH263Encoder");
 
     mHandle = new tagvideoEncControls;
     memset(mHandle, 0, sizeof(tagvideoEncControls));
@@ -97,7 +200,7 @@
     LOGV("initCheck");
     CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
     CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
-    CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+    CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
     CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
 
     // XXX: Add more color format support
@@ -150,9 +253,14 @@
     // If profile and level setting is not correct, failure
     // is reported when the encoder is initialized.
     mEncParams->profile_level = CORE_PROFILE_LEVEL2;
-    int32_t profileLevel;
-    if (meta->findInt32(kKeyVideoLevel, &profileLevel)) {
-        mEncParams->profile_level = (ProfileLevelType)profileLevel;
+    int32_t profile, level;
+    if (meta->findInt32(kKeyVideoProfile, &profile) &&
+        meta->findInt32(kKeyVideoLevel, &level)) {
+        if (OK != ConvertOmxProfileLevel(
+                        mEncParams->encMode, profile, level,
+                        &mEncParams->profile_level)) {
+            return BAD_VALUE;
+        }
     }
 
     mEncParams->packetSize = 32;
@@ -191,7 +299,7 @@
     mFormat->setInt32(kKeyWidth, mVideoWidth);
     mFormat->setInt32(kKeyHeight, mVideoHeight);
     mFormat->setInt32(kKeyBitRate, mVideoBitRate);
-    mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+    mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
     mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
 
     mFormat->setCString(kKeyMIMEType, mime);
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index fb56a93..753500e 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -53,5 +53,7 @@
 
 LOCAL_MODULE := libstagefright_mp3dec
 
+LOCAL_ARM_MODE := arm
+
 include $(BUILD_STATIC_LIBRARY)
 
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0dcbd73..62ba40f 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -6,17 +6,9 @@
         SoftwareRenderer.cpp
 
 LOCAL_C_INCLUDES := \
-        $(TOP)/frameworks/base/include/media/stagefright/openmax
-
-LOCAL_SHARED_LIBRARIES :=       \
-        libbinder               \
-        libmedia                \
-        libutils                \
-        libui                   \
-        libcutils				\
-        libsurfaceflinger_client\
-        libcamera_client
+        $(TOP)/frameworks/base/include/media/stagefright/openmax \
+        $(TOP)/hardware/msm7k
 
 LOCAL_MODULE:= libstagefright_color_conversion
 
-include $(BUILD_SHARED_LIBRARY)
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 5b16997..d518c97 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -16,6 +16,7 @@
 
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaErrors.h>
 
 namespace android {
 
@@ -50,31 +51,68 @@
     }
 }
 
-void ColorConverter::convert(
+ColorConverter::BitmapParams::BitmapParams(
+        void *bits,
         size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(mDstFormat, OMX_COLOR_Format16bitRGB565);
+        size_t cropLeft, size_t cropTop,
+        size_t cropRight, size_t cropBottom)
+    : mBits(bits),
+      mWidth(width),
+      mHeight(height),
+      mCropLeft(cropLeft),
+      mCropTop(cropTop),
+      mCropRight(cropRight),
+      mCropBottom(cropBottom) {
+}
+
+size_t ColorConverter::BitmapParams::cropWidth() const {
+    return mCropRight - mCropLeft + 1;
+}
+
+size_t ColorConverter::BitmapParams::cropHeight() const {
+    return mCropBottom - mCropTop + 1;
+}
+
+status_t ColorConverter::convert(
+        const void *srcBits,
+        size_t srcWidth, size_t srcHeight,
+        size_t srcCropLeft, size_t srcCropTop,
+        size_t srcCropRight, size_t srcCropBottom,
+        void *dstBits,
+        size_t dstWidth, size_t dstHeight,
+        size_t dstCropLeft, size_t dstCropTop,
+        size_t dstCropRight, size_t dstCropBottom) {
+    if (mDstFormat != OMX_COLOR_Format16bitRGB565) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    BitmapParams src(
+            const_cast<void *>(srcBits),
+            srcWidth, srcHeight,
+            srcCropLeft, srcCropTop, srcCropRight, srcCropBottom);
+
+    BitmapParams dst(
+            dstBits,
+            dstWidth, dstHeight,
+            dstCropLeft, dstCropTop, dstCropRight, dstCropBottom);
+
+    status_t err;
 
     switch (mSrcFormat) {
         case OMX_COLOR_FormatYUV420Planar:
-            convertYUV420Planar(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertYUV420Planar(src, dst);
             break;
 
         case OMX_COLOR_FormatCbYCrY:
-            convertCbYCrY(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertCbYCrY(src, dst);
             break;
 
         case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
-            convertQCOMYUV420SemiPlanar(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertQCOMYUV420SemiPlanar(src, dst);
             break;
 
         case OMX_COLOR_FormatYUV420SemiPlanar:
-            convertYUV420SemiPlanar(
-                    width, height, srcBits, srcSkip, dstBits, dstSkip);
+            err = convertYUV420SemiPlanar(src, dst);
             break;
 
         default:
@@ -83,28 +121,34 @@
             break;
         }
     }
+
+    return err;
 }
 
-void ColorConverter::convertCbYCrY(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
+status_t ColorConverter::convertCbYCrY(
+        const BitmapParams &src, const BitmapParams &dst) {
+    // XXX Untested
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
+    if (!((src.mCropLeft & 1) == 0
+        && src.cropWidth() == dst.cropWidth()
+        && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
 
-    const uint8_t *src = (const uint8_t *)srcBits;
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
-            signed y1 = (signed)src[2 * x + 1] - 16;
-            signed y2 = (signed)src[2 * x + 3] - 16;
-            signed u = (signed)src[2 * x] - 128;
-            signed v = (signed)src[2 * x + 2] - 128;
+    const uint8_t *src_ptr = (const uint8_t *)src.mBits
+        + (src.mCropTop * dst.mWidth + src.mCropLeft) * 2;
+
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
+            signed y1 = (signed)src_ptr[2 * x + 1] - 16;
+            signed y2 = (signed)src_ptr[2 * x + 3] - 16;
+            signed u = (signed)src_ptr[2 * x] - 128;
+            signed v = (signed)src_ptr[2 * x + 2] - 128;
 
             signed u_b = u * 517;
             signed u_g = -u * 100;
@@ -134,32 +178,39 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src += width * 2;
-        dst_ptr += dstSkip / 4;
+        src_ptr += src.mWidth * 2;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
-void ColorConverter::convertYUV420Planar(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
+status_t ColorConverter::convertYUV420Planar(
+        const BitmapParams &src, const BitmapParams &dst) {
+    if (!((dst.mWidth & 3) == 0
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
-    const uint8_t *src_y = (const uint8_t *)srcBits;
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + width * height;
+        (const uint8_t *)src_y + src.mWidth * src.mHeight
+        + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2;
 
     const uint8_t *src_v =
-        (const uint8_t *)src_u + (width / 2) * (height / 2);
+        src_u + (src.mWidth / 2) * (src.mHeight / 2);
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
             // B = 1.164 * (Y - 16) + 2.018 * (U - 128)
             // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
             // R = 1.164 * (Y - 16) + 1.596 * (V - 128)
@@ -212,35 +263,42 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src_y += width;
+        src_y += src.mWidth;
 
         if (y & 1) {
-            src_u += width / 2;
-            src_v += width / 2;
+            src_u += src.mWidth / 2;
+            src_v += src.mWidth / 2;
         }
 
-        dst_ptr += dstSkip / 4;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
-void ColorConverter::convertQCOMYUV420SemiPlanar(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
-
+status_t ColorConverter::convertQCOMYUV420SemiPlanar(
+        const BitmapParams &src, const BitmapParams &dst) {
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
-    const uint8_t *src_y = (const uint8_t *)srcBits;
+    if (!((dst.mWidth & 3) == 0
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + width * height;
+        (const uint8_t *)src_y + src.mWidth * src.mHeight
+        + src.mCropTop * src.mWidth + src.mCropLeft;
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
             signed y1 = (signed)src_y[x] - 16;
             signed y2 = (signed)src_y[x + 1] - 16;
 
@@ -275,34 +333,43 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src_y += width;
+        src_y += src.mWidth;
 
         if (y & 1) {
-            src_u += width;
+            src_u += src.mWidth;
         }
 
-        dst_ptr += dstSkip / 4;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
-void ColorConverter::convertYUV420SemiPlanar(
-        size_t width, size_t height,
-        const void *srcBits, size_t srcSkip,
-        void *dstBits, size_t dstSkip) {
-    CHECK_EQ(srcSkip, 0);  // Doesn't really make sense for YUV formats.
-    CHECK(dstSkip >= width * 2);
-    CHECK((dstSkip & 3) == 0);
+status_t ColorConverter::convertYUV420SemiPlanar(
+        const BitmapParams &src, const BitmapParams &dst) {
+    // XXX Untested
 
     uint8_t *kAdjustedClip = initClip();
 
-    uint32_t *dst_ptr = (uint32_t *)dstBits;
-    const uint8_t *src_y = (const uint8_t *)srcBits;
+    if (!((dst.mWidth & 3) == 0
+            && (src.mCropLeft & 1) == 0
+            && src.cropWidth() == dst.cropWidth()
+            && src.cropHeight() == dst.cropHeight())) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    uint32_t *dst_ptr = (uint32_t *)dst.mBits
+        + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2;
+
+    const uint8_t *src_y =
+        (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft;
 
     const uint8_t *src_u =
-        (const uint8_t *)src_y + width * height;
+        (const uint8_t *)src_y + src.mWidth * src.mHeight
+        + src.mCropTop * src.mWidth + src.mCropLeft;
 
-    for (size_t y = 0; y < height; ++y) {
-        for (size_t x = 0; x < width; x += 2) {
+    for (size_t y = 0; y < src.cropHeight(); ++y) {
+        for (size_t x = 0; x < src.cropWidth(); x += 2) {
             signed y1 = (signed)src_y[x] - 16;
             signed y2 = (signed)src_y[x + 1] - 16;
 
@@ -337,14 +404,16 @@
             dst_ptr[x / 2] = (rgb2 << 16) | rgb1;
         }
 
-        src_y += width;
+        src_y += src.mWidth;
 
         if (y & 1) {
-            src_u += width;
+            src_u += src.mWidth;
         }
 
-        dst_ptr += dstSkip / 4;
+        dst_ptr += dst.mWidth / 2;
     }
+
+    return OK;
 }
 
 uint8_t *ColorConverter::initClip() {
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 93ec79d..70408d7 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -21,91 +21,129 @@
 
 #include <binder/MemoryHeapBase.h>
 #include <binder/MemoryHeapPmem.h>
-#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/ISurface.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MetaData.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
 
 namespace android {
 
 SoftwareRenderer::SoftwareRenderer(
-        OMX_COLOR_FORMATTYPE colorFormat,
-        const sp<ISurface> &surface,
-        size_t displayWidth, size_t displayHeight,
-        size_t decodedWidth, size_t decodedHeight,
-        int32_t rotationDegrees)
-    : mInitCheck(NO_INIT),
-      mColorFormat(colorFormat),
-      mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
-      mISurface(surface),
-      mDisplayWidth(displayWidth),
-      mDisplayHeight(displayHeight),
-      mDecodedWidth(decodedWidth),
-      mDecodedHeight(decodedHeight),
-      mFrameSize(mDecodedWidth * mDecodedHeight * 2),  // RGB565
-      mIndex(0) {
-    mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize);
-    if (mMemoryHeap->heapID() < 0) {
-        LOGI("Creating physical memory heap failed, reverting to regular heap.");
-        mMemoryHeap = new MemoryHeapBase(2 * mFrameSize);
-    } else {
-        sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap);
-        pmemHeap->slap();
-        mMemoryHeap = pmemHeap;
+        const sp<Surface> &surface, const sp<MetaData> &meta)
+    : mConverter(NULL),
+      mYUVMode(None),
+      mSurface(surface) {
+    int32_t tmp;
+    CHECK(meta->findInt32(kKeyColorFormat, &tmp));
+    mColorFormat = (OMX_COLOR_FORMATTYPE)tmp;
+
+    CHECK(meta->findInt32(kKeyWidth, &mWidth));
+    CHECK(meta->findInt32(kKeyHeight, &mHeight));
+
+    if (!meta->findRect(
+                kKeyCropRect,
+                &mCropLeft, &mCropTop, &mCropRight, &mCropBottom)) {
+        mCropLeft = mCropTop = 0;
+        mCropRight = mWidth - 1;
+        mCropBottom = mHeight - 1;
     }
 
-    CHECK(mISurface.get() != NULL);
-    CHECK(mDecodedWidth > 0);
-    CHECK(mDecodedHeight > 0);
-    CHECK(mMemoryHeap->heapID() >= 0);
-    CHECK(mConverter.isValid());
+    int32_t rotationDegrees;
+    if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
 
-    uint32_t orientation;
+    int halFormat;
+    switch (mColorFormat) {
+        default:
+            halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+            mConverter = new ColorConverter(
+                    mColorFormat, OMX_COLOR_Format16bitRGB565);
+            CHECK(mConverter->isValid());
+            break;
+    }
+
+    CHECK(mSurface.get() != NULL);
+    CHECK(mWidth > 0);
+    CHECK(mHeight > 0);
+    CHECK(mConverter == NULL || mConverter->isValid());
+
+    CHECK_EQ(0,
+            native_window_set_usage(
+            mSurface.get(),
+            GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+            | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
+
+    CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2));
+
+    // Width must be multiple of 32???
+    CHECK_EQ(0, native_window_set_buffers_geometry(
+                mSurface.get(),
+                mCropRight - mCropLeft + 1,
+                mCropBottom - mCropTop + 1,
+                halFormat));
+
+    uint32_t transform;
     switch (rotationDegrees) {
-        case 0: orientation = ISurface::BufferHeap::ROT_0; break;
-        case 90: orientation = ISurface::BufferHeap::ROT_90; break;
-        case 180: orientation = ISurface::BufferHeap::ROT_180; break;
-        case 270: orientation = ISurface::BufferHeap::ROT_270; break;
-        default: orientation = ISurface::BufferHeap::ROT_0; break;
+        case 0: transform = 0; break;
+        case 90: transform = HAL_TRANSFORM_ROT_90; break;
+        case 180: transform = HAL_TRANSFORM_ROT_180; break;
+        case 270: transform = HAL_TRANSFORM_ROT_270; break;
+        default: transform = 0; break;
     }
 
-    ISurface::BufferHeap bufferHeap(
-            mDisplayWidth, mDisplayHeight,
-            mDecodedWidth, mDecodedHeight,
-            PIXEL_FORMAT_RGB_565,
-            orientation, 0,
-            mMemoryHeap);
-
-    status_t err = mISurface->registerBuffers(bufferHeap);
-
-    if (err != OK) {
-        LOGW("ISurface failed to register buffers (0x%08x)", err);
+    if (transform) {
+        CHECK_EQ(0, native_window_set_buffers_transform(
+                    mSurface.get(), transform));
     }
-
-    mInitCheck = err;
 }
 
 SoftwareRenderer::~SoftwareRenderer() {
-    mISurface->unregisterBuffers();
-}
-
-status_t SoftwareRenderer::initCheck() const {
-    return mInitCheck;
+    delete mConverter;
+    mConverter = NULL;
 }
 
 void SoftwareRenderer::render(
         const void *data, size_t size, void *platformPrivate) {
-    if (mInitCheck != OK) {
+    android_native_buffer_t *buf;
+    int err;
+    if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {
+        LOGW("Surface::dequeueBuffer returned error %d", err);
         return;
     }
 
-    size_t offset = mIndex * mFrameSize;
-    void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+    CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));
 
-    mConverter.convert(
-            mDecodedWidth, mDecodedHeight,
-            data, 0, dst, 2 * mDecodedWidth);
+    GraphicBufferMapper &mapper = GraphicBufferMapper::get();
 
-    mISurface->postBuffer(offset);
-    mIndex = 1 - mIndex;
+    Rect bounds(mWidth, mHeight);
+
+    void *dst;
+    CHECK_EQ(0, mapper.lock(
+                buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+
+    if (mConverter) {
+        mConverter->convert(
+                data,
+                mWidth, mHeight,
+                mCropLeft, mCropTop, mCropRight, mCropBottom,
+                dst,
+                buf->stride, buf->height,
+                0, 0,
+                mCropRight - mCropLeft,
+                mCropBottom - mCropTop);
+    } else {
+        TRESPASS();
+    }
+
+    CHECK_EQ(0, mapper.unlock(buf->handle));
+
+    if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {
+        LOGW("Surface::queueBuffer returned error %d", err);
+    }
+    buf = NULL;
 }
 
 }  // namespace android
diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
new file mode 100644
index 0000000..30286d8
--- /dev/null
+++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
@@ -0,0 +1,97 @@
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+AState::AState(const sp<AState> &parentState)
+    : mParentState(parentState) {
+}
+
+AState::~AState() {
+}
+
+sp<AState> AState::parentState() {
+    return mParentState;
+}
+
+void AState::stateEntered() {
+}
+
+void AState::stateExited() {
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+AHierarchicalStateMachine::AHierarchicalStateMachine() {
+}
+
+AHierarchicalStateMachine::~AHierarchicalStateMachine() {
+}
+
+void AHierarchicalStateMachine::onMessageReceived(const sp<AMessage> &msg) {
+    sp<AState> save = mState;
+
+    sp<AState> cur = mState;
+    while (cur != NULL && !cur->onMessageReceived(msg)) {
+        // If you claim not to have handled the message you shouldn't
+        // have called setState...
+        CHECK(save == mState);
+
+        cur = cur->parentState();
+    }
+
+    if (cur != NULL) {
+        return;
+    }
+
+    LOGW("Warning message %s unhandled in root state.",
+         msg->debugString().c_str());
+}
+
+void AHierarchicalStateMachine::changeState(const sp<AState> &state) {
+    if (state == mState) {
+        // Quick exit for the easy case.
+        return;
+    }
+
+    Vector<sp<AState> > A;
+    sp<AState> cur = mState;
+    for (;;) {
+        A.push(cur);
+        if (cur == NULL) {
+            break;
+        }
+        cur = cur->parentState();
+    }
+
+    Vector<sp<AState> > B;
+    cur = state;
+    for (;;) {
+        B.push(cur);
+        if (cur == NULL) {
+            break;
+        }
+        cur = cur->parentState();
+    }
+
+    // Remove the common tail.
+    while (A.size() > 0 && B.size() > 0 && A.top() == B.top()) {
+        A.pop();
+        B.pop();
+    }
+
+    mState = state;
+
+    for (size_t i = 0; i < A.size(); ++i) {
+        A.editItemAt(i)->stateExited();
+    }
+
+    for (size_t i = B.size(); i-- > 0;) {
+        B.editItemAt(i)->stateEntered();
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 26c6d42..b592c3f 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -23,6 +23,8 @@
 #include "ALooperRoster.h"
 #include "AString.h"
 
+#include <binder/Parcel.h>
+
 namespace android {
 
 AMessage::AMessage(uint32_t what, ALooper::handler_id target)
@@ -169,6 +171,18 @@
     item->u.refValue = obj.get();
 }
 
+void AMessage::setRect(
+        const char *name,
+        int32_t left, int32_t top, int32_t right, int32_t bottom) {
+    Item *item = allocateItem(name);
+    item->mType = kTypeRect;
+
+    item->u.rectValue.mLeft = left;
+    item->u.rectValue.mTop = top;
+    item->u.rectValue.mRight = right;
+    item->u.rectValue.mBottom = bottom;
+}
+
 bool AMessage::findString(const char *name, AString *value) const {
     const Item *item = findItem(name, kTypeString);
     if (item) {
@@ -196,6 +210,22 @@
     return false;
 }
 
+bool AMessage::findRect(
+        const char *name,
+        int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const {
+    const Item *item = findItem(name, kTypeRect);
+    if (item == NULL) {
+        return false;
+    }
+
+    *left = item->u.rectValue.mLeft;
+    *top = item->u.rectValue.mTop;
+    *right = item->u.rectValue.mRight;
+    *bottom = item->u.rectValue.mBottom;
+
+    return true;
+}
+
 void AMessage::post(int64_t delayUs) {
     extern ALooperRoster gLooperRoster;
 
@@ -222,13 +252,22 @@
             }
 
             case kTypeObject:
-            case kTypeMessage:
             {
                 to->u.refValue = from->u.refValue;
                 to->u.refValue->incStrong(msg.get());
                 break;
             }
 
+            case kTypeMessage:
+            {
+                sp<AMessage> copy =
+                    static_cast<AMessage *>(from->u.refValue)->dup();
+
+                to->u.refValue = copy.get();
+                to->u.refValue->incStrong(msg.get());
+                break;
+            }
+
             default:
             {
                 to->u = from->u;
@@ -341,4 +380,136 @@
     return s;
 }
 
+// static
+sp<AMessage> AMessage::FromParcel(const Parcel &parcel) {
+    int32_t what = parcel.readInt32();
+    sp<AMessage> msg = new AMessage(what);
+
+    msg->mNumItems = static_cast<size_t>(parcel.readInt32());
+
+    for (size_t i = 0; i < msg->mNumItems; ++i) {
+        Item *item = &msg->mItems[i];
+
+        item->mName = AAtomizer::Atomize(parcel.readCString());
+        item->mType = static_cast<Type>(parcel.readInt32());
+
+        switch (item->mType) {
+            case kTypeInt32:
+            {
+                item->u.int32Value = parcel.readInt32();
+                break;
+            }
+
+            case kTypeInt64:
+            {
+                item->u.int64Value = parcel.readInt64();
+                break;
+            }
+
+            case kTypeSize:
+            {
+                item->u.sizeValue = static_cast<size_t>(parcel.readInt32());
+                break;
+            }
+
+            case kTypeFloat:
+            {
+                item->u.floatValue = parcel.readFloat();
+                break;
+            }
+
+            case kTypeDouble:
+            {
+                item->u.doubleValue = parcel.readDouble();
+                break;
+            }
+
+            case kTypeString:
+            {
+                item->u.stringValue = new AString(parcel.readCString());
+                break;
+            }
+
+            case kTypeMessage:
+            {
+                sp<AMessage> subMsg = AMessage::FromParcel(parcel);
+                subMsg->incStrong(msg.get());
+
+                item->u.refValue = subMsg.get();
+                break;
+            }
+
+            default:
+            {
+                LOGE("This type of object cannot cross process boundaries.");
+                TRESPASS();
+            }
+        }
+    }
+
+    return msg;
+}
+
+void AMessage::writeToParcel(Parcel *parcel) const {
+    parcel->writeInt32(static_cast<int32_t>(mWhat));
+    parcel->writeInt32(static_cast<int32_t>(mNumItems));
+
+    for (size_t i = 0; i < mNumItems; ++i) {
+        const Item &item = mItems[i];
+
+        parcel->writeCString(item.mName);
+        parcel->writeInt32(static_cast<int32_t>(item.mType));
+
+        switch (item.mType) {
+            case kTypeInt32:
+            {
+                parcel->writeInt32(item.u.int32Value);
+                break;
+            }
+
+            case kTypeInt64:
+            {
+                parcel->writeInt64(item.u.int64Value);
+                break;
+            }
+
+            case kTypeSize:
+            {
+                parcel->writeInt32(static_cast<int32_t>(item.u.sizeValue));
+                break;
+            }
+
+            case kTypeFloat:
+            {
+                parcel->writeFloat(item.u.floatValue);
+                break;
+            }
+
+            case kTypeDouble:
+            {
+                parcel->writeDouble(item.u.doubleValue);
+                break;
+            }
+
+            case kTypeString:
+            {
+                parcel->writeCString(item.u.stringValue->c_str());
+                break;
+            }
+
+            case kTypeMessage:
+            {
+                static_cast<AMessage *>(item.u.refValue)->writeToParcel(parcel);
+                break;
+            }
+
+            default:
+            {
+                LOGE("This type of object cannot cross process boundaries.");
+                TRESPASS();
+            }
+        }
+    }
+}
+
 }  // namespace android
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index ffa7db0..4e07f6f 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -1,16 +1,17 @@
 LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
 
-LOCAL_SRC_FILES:=               \
-    AAtomizer.cpp               \
-    ABitReader.cpp              \
-    ABuffer.cpp                 \
-    AHandler.cpp                \
-    ALooper.cpp                 \
-    ALooperRoster.cpp           \
-    AMessage.cpp                \
-    AString.cpp                 \
-    base64.cpp                  \
+LOCAL_SRC_FILES:=                 \
+    AAtomizer.cpp                 \
+    ABitReader.cpp                \
+    ABuffer.cpp                   \
+    AHandler.cpp                  \
+    AHierarchicalStateMachine.cpp \
+    ALooper.cpp                   \
+    ALooperRoster.cpp             \
+    AMessage.cpp                  \
+    AString.cpp                   \
+    base64.cpp                    \
     hexdump.cpp
 
 LOCAL_C_INCLUDES:= \
@@ -18,14 +19,7 @@
 
 LOCAL_SHARED_LIBRARIES := \
         libbinder         \
-        libmedia          \
         libutils          \
-        libcutils         \
-        libui             \
-        libsonivox        \
-        libvorbisidec     \
-        libsurfaceflinger_client \
-        libcamera_client
 
 LOCAL_CFLAGS += -Wno-multichar
 
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index cc7dd4f..9225e41 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -2,14 +2,16 @@
 
 include $(CLEAR_VARS)
 
-LOCAL_SRC_FILES:=       \
-        LiveSource.cpp  \
-        M3UParser.cpp   \
+LOCAL_SRC_FILES:=               \
+        LiveDataSource.cpp      \
+        LiveSession.cpp         \
+        M3UParser.cpp           \
 
 LOCAL_C_INCLUDES:= \
 	$(JNI_H_INCLUDE) \
 	$(TOP)/frameworks/base/include/media/stagefright/openmax \
-        $(TOP)/frameworks/base/media/libstagefright
+        $(TOP)/frameworks/base/media/libstagefright \
+        $(TOP)/external/openssl/include
 
 LOCAL_MODULE:= libstagefright_httplive
 
diff --git a/media/libstagefright/httplive/LiveDataSource.cpp b/media/libstagefright/httplive/LiveDataSource.cpp
new file mode 100644
index 0000000..5f5c6d4
--- /dev/null
+++ b/media/libstagefright/httplive/LiveDataSource.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "LiveDataSource"
+#include <utils/Log.h>
+
+#include "LiveDataSource.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#define SAVE_BACKUP     0
+
+namespace android {
+
+LiveDataSource::LiveDataSource()
+    : mOffset(0),
+      mFinalResult(OK),
+      mBackupFile(NULL) {
+#if SAVE_BACKUP
+    mBackupFile = fopen("/data/misc/backup.ts", "wb");
+    CHECK(mBackupFile != NULL);
+#endif
+}
+
+LiveDataSource::~LiveDataSource() {
+    if (mBackupFile != NULL) {
+        fclose(mBackupFile);
+        mBackupFile = NULL;
+    }
+}
+
+status_t LiveDataSource::initCheck() const {
+    return OK;
+}
+
+size_t LiveDataSource::countQueuedBuffers() {
+    Mutex::Autolock autoLock(mLock);
+
+    return mBufferQueue.size();
+}
+
+ssize_t LiveDataSource::readAtNonBlocking(
+        off64_t offset, void *data, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (offset != mOffset) {
+        LOGE("Attempt at reading non-sequentially from LiveDataSource.");
+        return -EPIPE;
+    }
+
+    size_t totalAvailable = 0;
+    for (List<sp<ABuffer> >::iterator it = mBufferQueue.begin();
+         it != mBufferQueue.end(); ++it) {
+        sp<ABuffer> buffer = *it;
+
+        totalAvailable += buffer->size();
+
+        if (totalAvailable >= size) {
+            break;
+        }
+    }
+
+    if (totalAvailable < size) {
+        return mFinalResult == OK ? -EWOULDBLOCK : mFinalResult;
+    }
+
+    return readAt_l(offset, data, size);
+}
+
+ssize_t LiveDataSource::readAt(off64_t offset, void *data, size_t size) {
+    Mutex::Autolock autoLock(mLock);
+    return readAt_l(offset, data, size);
+}
+
+ssize_t LiveDataSource::readAt_l(off64_t offset, void *data, size_t size) {
+    if (offset != mOffset) {
+        LOGE("Attempt at reading non-sequentially from LiveDataSource.");
+        return -EPIPE;
+    }
+
+    size_t sizeDone = 0;
+
+    while (sizeDone < size) {
+        while (mBufferQueue.empty() && mFinalResult == OK) {
+            mCondition.wait(mLock);
+        }
+
+        if (mBufferQueue.empty()) {
+            if (sizeDone > 0) {
+                mOffset += sizeDone;
+                return sizeDone;
+            }
+
+            return mFinalResult;
+        }
+
+        sp<ABuffer> buffer = *mBufferQueue.begin();
+
+        size_t copy = size - sizeDone;
+
+        if (copy > buffer->size()) {
+            copy = buffer->size();
+        }
+
+        memcpy((uint8_t *)data + sizeDone, buffer->data(), copy);
+
+        sizeDone += copy;
+
+        buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
+
+        if (buffer->size() == 0) {
+            mBufferQueue.erase(mBufferQueue.begin());
+        }
+    }
+
+    mOffset += sizeDone;
+
+    return sizeDone;
+}
+
+void LiveDataSource::queueBuffer(const sp<ABuffer> &buffer) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mFinalResult != OK) {
+        return;
+    }
+
+#if SAVE_BACKUP
+    if (mBackupFile != NULL) {
+        CHECK_EQ(fwrite(buffer->data(), 1, buffer->size(), mBackupFile),
+                 buffer->size());
+    }
+#endif
+
+    mBufferQueue.push_back(buffer);
+    mCondition.broadcast();
+}
+
+void LiveDataSource::queueEOS(status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    Mutex::Autolock autoLock(mLock);
+
+    mFinalResult = finalResult;
+    mCondition.broadcast();
+}
+
+void LiveDataSource::reset() {
+    Mutex::Autolock autoLock(mLock);
+
+    // XXX FIXME: If we've done a partial read and waiting for more buffers,
+    // we'll mix old and new data...
+
+    mFinalResult = OK;
+    mBufferQueue.clear();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/httplive/LiveDataSource.h b/media/libstagefright/httplive/LiveDataSource.h
new file mode 100644
index 0000000..b7be637
--- /dev/null
+++ b/media/libstagefright/httplive/LiveDataSource.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIVE_DATA_SOURCE_H_
+
+#define LIVE_DATA_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/DataSource.h>
+#include <utils/threads.h>
+#include <utils/List.h>
+
+namespace android {
+
+struct ABuffer;
+
+struct LiveDataSource : public DataSource {
+    LiveDataSource();
+
+    virtual status_t initCheck() const;
+
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+    ssize_t readAtNonBlocking(off64_t offset, void *data, size_t size);
+
+    void queueBuffer(const sp<ABuffer> &buffer);
+    void queueEOS(status_t finalResult);
+    void reset();
+
+    size_t countQueuedBuffers();
+
+protected:
+    virtual ~LiveDataSource();
+
+private:
+    Mutex mLock;
+    Condition mCondition;
+
+    off64_t mOffset;
+    List<sp<ABuffer> > mBufferQueue;
+    status_t mFinalResult;
+
+    FILE *mBackupFile;
+
+    ssize_t readAt_l(off64_t offset, void *data, size_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(LiveDataSource);
+};
+
+}  // namespace android
+
+#endif  // LIVE_DATA_SOURCE_H_
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
new file mode 100644
index 0000000..5979be6
--- /dev/null
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -0,0 +1,703 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "LiveSession"
+#include <utils/Log.h>
+
+#include "include/LiveSession.h"
+
+#include "LiveDataSource.h"
+
+#include "include/M3UParser.h"
+#include "include/NuHTTPDataSource.h"
+
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <ctype.h>
+#include <openssl/aes.h>
+
+namespace android {
+
+const int64_t LiveSession::kMaxPlaylistAgeUs = 15000000ll;
+
+LiveSession::LiveSession()
+    : mDataSource(new LiveDataSource),
+      mHTTPDataSource(new NuHTTPDataSource),
+      mPrevBandwidthIndex(-1),
+      mLastPlaylistFetchTimeUs(-1),
+      mSeqNumber(-1),
+      mSeekTimeUs(-1),
+      mNumRetries(0),
+      mDurationUs(-1),
+      mSeekDone(false),
+      mMonitorQueueGeneration(0) {
+}
+
+LiveSession::~LiveSession() {
+}
+
+sp<DataSource> LiveSession::getDataSource() {
+    return mDataSource;
+}
+
+void LiveSession::connect(const char *url) {
+    sp<AMessage> msg = new AMessage(kWhatConnect, id());
+    msg->setString("url", url);
+    msg->post();
+}
+
+void LiveSession::disconnect() {
+    (new AMessage(kWhatDisconnect, id()))->post();
+}
+
+void LiveSession::seekTo(int64_t timeUs) {
+    Mutex::Autolock autoLock(mLock);
+    mSeekDone = false;
+
+    sp<AMessage> msg = new AMessage(kWhatSeek, id());
+    msg->setInt64("timeUs", timeUs);
+    msg->post();
+
+    while (!mSeekDone) {
+        mCondition.wait(mLock);
+    }
+}
+
+void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatConnect:
+            onConnect(msg);
+            break;
+
+        case kWhatDisconnect:
+            onDisconnect();
+            break;
+
+        case kWhatMonitorQueue:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mMonitorQueueGeneration) {
+                // Stale event
+                break;
+            }
+
+            onMonitorQueue();
+            break;
+        }
+
+        case kWhatSeek:
+            onSeek(msg);
+            break;
+
+        default:
+            TRESPASS();
+            break;
+    }
+}
+
+// static
+int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) {
+    if (a->mBandwidth < b->mBandwidth) {
+        return -1;
+    } else if (a->mBandwidth == b->mBandwidth) {
+        return 0;
+    }
+
+    return 1;
+}
+
+void LiveSession::onConnect(const sp<AMessage> &msg) {
+    AString url;
+    CHECK(msg->findString("url", &url));
+
+    LOGI("onConnect '%s'", url.c_str());
+
+    mMasterURL = url;
+
+    sp<M3UParser> playlist = fetchPlaylist(url.c_str());
+    CHECK(playlist != NULL);
+
+    if (playlist->isVariantPlaylist()) {
+        for (size_t i = 0; i < playlist->size(); ++i) {
+            BandwidthItem item;
+
+            sp<AMessage> meta;
+            playlist->itemAt(i, &item.mURI, &meta);
+
+            unsigned long bandwidth;
+            CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth));
+
+            mBandwidthItems.push(item);
+        }
+
+        CHECK_GT(mBandwidthItems.size(), 0u);
+
+        mBandwidthItems.sort(SortByBandwidth);
+
+        char value[PROPERTY_VALUE_MAX];
+        if (property_get("media.httplive.disable-nuplayer", value, NULL)
+                && (!strcasecmp(value, "true") || !strcmp(value, "1"))) {
+            // The "legacy" player cannot deal with audio format changes,
+            // some streams use different audio encoding parameters for
+            // their lowest bandwidth stream.
+            if (mBandwidthItems.size() > 1) {
+                // XXX Remove the lowest bitrate stream for now...
+                mBandwidthItems.removeAt(0);
+            }
+        }
+    }
+
+    postMonitorQueue();
+}
+
+void LiveSession::onDisconnect() {
+    LOGI("onDisconnect");
+
+    mDataSource->queueEOS(ERROR_END_OF_STREAM);
+}
+
+status_t LiveSession::fetchFile(const char *url, sp<ABuffer> *out) {
+    *out = NULL;
+
+    sp<DataSource> source;
+
+    if (!strncasecmp(url, "file://", 7)) {
+        source = new FileSource(url + 7);
+    } else if (strncasecmp(url, "http://", 7)) {
+        return ERROR_UNSUPPORTED;
+    } else {
+        status_t err = mHTTPDataSource->connect(url);
+
+        if (err != OK) {
+            return err;
+        }
+
+        source = mHTTPDataSource;
+    }
+
+    off64_t size;
+    status_t err = source->getSize(&size);
+
+    if (err != OK) {
+        size = 65536;
+    }
+
+    sp<ABuffer> buffer = new ABuffer(size);
+    buffer->setRange(0, 0);
+
+    for (;;) {
+        size_t bufferRemaining = buffer->capacity() - buffer->size();
+
+        if (bufferRemaining == 0) {
+            bufferRemaining = 32768;
+
+            LOGV("increasing download buffer to %d bytes",
+                 buffer->size() + bufferRemaining);
+
+            sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining);
+            memcpy(copy->data(), buffer->data(), buffer->size());
+            copy->setRange(0, buffer->size());
+
+            buffer = copy;
+        }
+
+        ssize_t n = source->readAt(
+                buffer->size(), buffer->data() + buffer->size(),
+                bufferRemaining);
+
+        if (n < 0) {
+            return n;
+        }
+
+        if (n == 0) {
+            break;
+        }
+
+        buffer->setRange(0, buffer->size() + (size_t)n);
+    }
+
+    *out = buffer;
+
+    return OK;
+}
+
+sp<M3UParser> LiveSession::fetchPlaylist(const char *url) {
+    sp<ABuffer> buffer;
+    status_t err = fetchFile(url, &buffer);
+
+    if (err != OK) {
+        return NULL;
+    }
+
+    sp<M3UParser> playlist =
+        new M3UParser(url, buffer->data(), buffer->size());
+
+    if (playlist->initCheck() != OK) {
+        return NULL;
+    }
+
+    return playlist;
+}
+
+static double uniformRand() {
+    return (double)rand() / RAND_MAX;
+}
+
+size_t LiveSession::getBandwidthIndex() {
+    if (mBandwidthItems.size() == 0) {
+        return 0;
+    }
+
+#if 1
+    int32_t bandwidthBps;
+    if (mHTTPDataSource != NULL
+            && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) {
+        LOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+    } else {
+        LOGV("no bandwidth estimate.");
+        return 0;  // Pick the lowest bandwidth stream by default.
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("media.httplive.max-bw", value, NULL)) {
+        char *end;
+        long maxBw = strtoul(value, &end, 10);
+        if (end > value && *end == '\0') {
+            if (maxBw > 0 && bandwidthBps > maxBw) {
+                LOGV("bandwidth capped to %ld bps", maxBw);
+                bandwidthBps = maxBw;
+            }
+        }
+    }
+
+    // Consider only 80% of the available bandwidth usable.
+    bandwidthBps = (bandwidthBps * 8) / 10;
+
+    // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+
+    size_t index = mBandwidthItems.size() - 1;
+    while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth
+                            > (size_t)bandwidthBps) {
+        --index;
+    }
+#elif 0
+    // Change bandwidth at random()
+    size_t index = uniformRand() * mBandwidthItems.size();
+#elif 0
+    // There's a 50% chance to stay on the current bandwidth and
+    // a 50% chance to switch to the next higher bandwidth (wrapping around
+    // to lowest)
+    const size_t kMinIndex = 0;
+
+    size_t index;
+    if (mPrevBandwidthIndex < 0) {
+        index = kMinIndex;
+    } else if (uniformRand() < 0.5) {
+        index = (size_t)mPrevBandwidthIndex;
+    } else {
+        index = mPrevBandwidthIndex + 1;
+        if (index == mBandwidthItems.size()) {
+            index = kMinIndex;
+        }
+    }
+#elif 0
+    // Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec
+
+    size_t index = mBandwidthItems.size() - 1;
+    while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth > 1200000) {
+        --index;
+    }
+#else
+    size_t index = mBandwidthItems.size() - 1;  // Highest bandwidth stream
+#endif
+
+    return index;
+}
+
+void LiveSession::onDownloadNext() {
+    size_t bandwidthIndex = getBandwidthIndex();
+
+rinse_repeat:
+    int64_t nowUs = ALooper::GetNowUs();
+
+    if (mLastPlaylistFetchTimeUs < 0
+            || (ssize_t)bandwidthIndex != mPrevBandwidthIndex
+            || (!mPlaylist->isComplete()
+                && mLastPlaylistFetchTimeUs + kMaxPlaylistAgeUs <= nowUs)) {
+        AString url;
+        if (mBandwidthItems.size() > 0) {
+            url = mBandwidthItems.editItemAt(bandwidthIndex).mURI;
+        } else {
+            url = mMasterURL;
+        }
+
+        bool firstTime = (mPlaylist == NULL);
+
+        mPlaylist = fetchPlaylist(url.c_str());
+        if (mPlaylist == NULL) {
+            LOGE("failed to load playlist at url '%s'", url.c_str());
+            mDataSource->queueEOS(ERROR_IO);
+            return;
+        }
+
+        if (firstTime) {
+            Mutex::Autolock autoLock(mLock);
+
+            int32_t targetDuration;
+            if (!mPlaylist->isComplete()
+                    || !mPlaylist->meta()->findInt32(
+                    "target-duration", &targetDuration)) {
+                mDurationUs = -1;
+            } else {
+                mDurationUs = 1000000ll * targetDuration * mPlaylist->size();
+            }
+        }
+
+        mLastPlaylistFetchTimeUs = ALooper::GetNowUs();
+    }
+
+    int32_t firstSeqNumberInPlaylist;
+    if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+                "media-sequence", &firstSeqNumberInPlaylist)) {
+        firstSeqNumberInPlaylist = 0;
+    }
+
+    bool explicitDiscontinuity = false;
+    bool bandwidthChanged = false;
+
+    if (mSeekTimeUs >= 0) {
+        int32_t targetDuration;
+        if (mPlaylist->isComplete() &&
+                mPlaylist->meta()->findInt32(
+                    "target-duration", &targetDuration)) {
+            int64_t seekTimeSecs = (mSeekTimeUs + 500000ll) / 1000000ll;
+            int64_t index = seekTimeSecs / targetDuration;
+
+            if (index >= 0 && index < mPlaylist->size()) {
+                int32_t newSeqNumber = firstSeqNumberInPlaylist + index;
+
+                if (newSeqNumber != mSeqNumber) {
+                    LOGI("seeking to seq no %d", newSeqNumber);
+
+                    mSeqNumber = newSeqNumber;
+
+                    mDataSource->reset();
+
+                    // reseting the data source will have had the
+                    // side effect of discarding any previously queued
+                    // bandwidth change discontinuity.
+                    // Therefore we'll need to treat these explicit
+                    // discontinuities as involving a bandwidth change
+                    // even if they aren't directly.
+                    explicitDiscontinuity = true;
+                    bandwidthChanged = true;
+                }
+            }
+        }
+
+        mSeekTimeUs = -1;
+
+        Mutex::Autolock autoLock(mLock);
+        mSeekDone = true;
+        mCondition.broadcast();
+    }
+
+    if (mSeqNumber < 0) {
+        if (mPlaylist->isComplete()) {
+            mSeqNumber = firstSeqNumberInPlaylist;
+        } else {
+            mSeqNumber = firstSeqNumberInPlaylist + mPlaylist->size() / 2;
+        }
+    }
+
+    int32_t lastSeqNumberInPlaylist =
+        firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+
+    if (mSeqNumber < firstSeqNumberInPlaylist
+            || mSeqNumber > lastSeqNumberInPlaylist) {
+        if (mSeqNumber < firstSeqNumberInPlaylist
+                && mPrevBandwidthIndex != (ssize_t)bandwidthIndex) {
+            // Go back to the previous bandwidth.
+
+            LOGI("new bandwidth does not have the sequence number "
+                 "we're looking for, switching back to previous bandwidth");
+
+            mLastPlaylistFetchTimeUs = -1;
+            bandwidthIndex = mPrevBandwidthIndex;
+            goto rinse_repeat;
+        }
+
+        if (!mPlaylist->isComplete()
+                && mSeqNumber > lastSeqNumberInPlaylist
+                && mNumRetries < kMaxNumRetries) {
+            ++mNumRetries;
+
+            mLastPlaylistFetchTimeUs = -1;
+            postMonitorQueue(3000000ll);
+            return;
+        }
+
+        LOGE("Cannot find sequence number %d in playlist "
+             "(contains %d - %d)",
+             mSeqNumber, firstSeqNumberInPlaylist,
+             firstSeqNumberInPlaylist + mPlaylist->size() - 1);
+
+        mDataSource->queueEOS(ERROR_END_OF_STREAM);
+        return;
+    }
+
+    mNumRetries = 0;
+
+    AString uri;
+    sp<AMessage> itemMeta;
+    CHECK(mPlaylist->itemAt(
+                mSeqNumber - firstSeqNumberInPlaylist,
+                &uri,
+                &itemMeta));
+
+    int32_t val;
+    if (itemMeta->findInt32("discontinuity", &val) && val != 0) {
+        explicitDiscontinuity = true;
+    }
+
+    sp<ABuffer> buffer;
+    status_t err = fetchFile(uri.c_str(), &buffer);
+    if (err != OK) {
+        LOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
+        mDataSource->queueEOS(err);
+        return;
+    }
+
+    CHECK(buffer != NULL);
+
+    CHECK_EQ((status_t)OK,
+             decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer));
+
+    if (buffer->size() == 0 || buffer->data()[0] != 0x47) {
+        // Not a transport stream???
+
+        LOGE("This doesn't look like a transport stream...");
+
+        mDataSource->queueEOS(ERROR_UNSUPPORTED);
+        return;
+    }
+
+    if ((size_t)mPrevBandwidthIndex != bandwidthIndex) {
+        bandwidthChanged = true;
+    }
+
+    if (mPrevBandwidthIndex < 0) {
+        // Don't signal a bandwidth change at the very beginning of
+        // playback.
+        bandwidthChanged = false;
+    }
+
+    if (explicitDiscontinuity || bandwidthChanged) {
+        // Signal discontinuity.
+
+        LOGI("queueing discontinuity (explicit=%d, bandwidthChanged=%d)",
+             explicitDiscontinuity, bandwidthChanged);
+
+        sp<ABuffer> tmp = new ABuffer(188);
+        memset(tmp->data(), 0, tmp->size());
+        tmp->data()[1] = bandwidthChanged;
+
+        mDataSource->queueBuffer(tmp);
+    }
+
+    mDataSource->queueBuffer(buffer);
+
+    mPrevBandwidthIndex = bandwidthIndex;
+    ++mSeqNumber;
+
+    postMonitorQueue();
+}
+
+void LiveSession::onMonitorQueue() {
+    if (mSeekTimeUs >= 0
+            || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) {
+        onDownloadNext();
+    } else {
+        postMonitorQueue(1000000ll);
+    }
+}
+
+status_t LiveSession::decryptBuffer(
+        size_t playlistIndex, const sp<ABuffer> &buffer) {
+    sp<AMessage> itemMeta;
+    bool found = false;
+    AString method;
+
+    for (ssize_t i = playlistIndex; i >= 0; --i) {
+        AString uri;
+        CHECK(mPlaylist->itemAt(i, &uri, &itemMeta));
+
+        if (itemMeta->findString("cipher-method", &method)) {
+            found = true;
+            break;
+        }
+    }
+
+    if (!found) {
+        method = "NONE";
+    }
+
+    if (method == "NONE") {
+        return OK;
+    } else if (!(method == "AES-128")) {
+        LOGE("Unsupported cipher method '%s'", method.c_str());
+        return ERROR_UNSUPPORTED;
+    }
+
+    AString keyURI;
+    if (!itemMeta->findString("cipher-uri", &keyURI)) {
+        LOGE("Missing key uri");
+        return ERROR_MALFORMED;
+    }
+
+    ssize_t index = mAESKeyForURI.indexOfKey(keyURI);
+
+    sp<ABuffer> key;
+    if (index >= 0) {
+        key = mAESKeyForURI.valueAt(index);
+    } else {
+        key = new ABuffer(16);
+
+        sp<NuHTTPDataSource> keySource = new NuHTTPDataSource;
+        status_t err = keySource->connect(keyURI.c_str());
+
+        if (err == OK) {
+            size_t offset = 0;
+            while (offset < 16) {
+                ssize_t n = keySource->readAt(
+                        offset, key->data() + offset, 16 - offset);
+                if (n <= 0) {
+                    err = ERROR_IO;
+                    break;
+                }
+
+                offset += n;
+            }
+        }
+
+        if (err != OK) {
+            LOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());
+            return ERROR_IO;
+        }
+
+        mAESKeyForURI.add(keyURI, key);
+    }
+
+    AES_KEY aes_key;
+    if (AES_set_decrypt_key(key->data(), 128, &aes_key) != 0) {
+        LOGE("failed to set AES decryption key.");
+        return UNKNOWN_ERROR;
+    }
+
+    unsigned char aes_ivec[16];
+
+    AString iv;
+    if (itemMeta->findString("cipher-iv", &iv)) {
+        if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
+                || iv.size() != 16 * 2 + 2) {
+            LOGE("malformed cipher IV '%s'.", iv.c_str());
+            return ERROR_MALFORMED;
+        }
+
+        memset(aes_ivec, 0, sizeof(aes_ivec));
+        for (size_t i = 0; i < 16; ++i) {
+            char c1 = tolower(iv.c_str()[2 + 2 * i]);
+            char c2 = tolower(iv.c_str()[3 + 2 * i]);
+            if (!isxdigit(c1) || !isxdigit(c2)) {
+                LOGE("malformed cipher IV '%s'.", iv.c_str());
+                return ERROR_MALFORMED;
+            }
+            uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10;
+            uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10;
+
+            aes_ivec[i] = nibble1 << 4 | nibble2;
+        }
+    } else {
+        memset(aes_ivec, 0, sizeof(aes_ivec));
+        aes_ivec[15] = mSeqNumber & 0xff;
+        aes_ivec[14] = (mSeqNumber >> 8) & 0xff;
+        aes_ivec[13] = (mSeqNumber >> 16) & 0xff;
+        aes_ivec[12] = (mSeqNumber >> 24) & 0xff;
+    }
+
+    AES_cbc_encrypt(
+            buffer->data(), buffer->data(), buffer->size(),
+            &aes_key, aes_ivec, AES_DECRYPT);
+
+    // hexdump(buffer->data(), buffer->size());
+
+    size_t n = buffer->size();
+    CHECK_GT(n, 0u);
+
+    size_t pad = buffer->data()[n - 1];
+
+    CHECK_GT(pad, 0u);
+    CHECK_LE(pad, 16u);
+    CHECK_GE((size_t)n, pad);
+    for (size_t i = 0; i < pad; ++i) {
+        CHECK_EQ((unsigned)buffer->data()[n - 1 - i], pad);
+    }
+
+    n -= pad;
+
+    buffer->setRange(buffer->offset(), n);
+
+    return OK;
+}
+
+void LiveSession::postMonitorQueue(int64_t delayUs) {
+    sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id());
+    msg->setInt32("generation", ++mMonitorQueueGeneration);
+    msg->post(delayUs);
+}
+
+void LiveSession::onSeek(const sp<AMessage> &msg) {
+    int64_t timeUs;
+    CHECK(msg->findInt64("timeUs", &timeUs));
+
+    mSeekTimeUs = timeUs;
+    postMonitorQueue();
+}
+
+status_t LiveSession::getDuration(int64_t *durationUs) {
+    Mutex::Autolock autoLock(mLock);
+    *durationUs = mDurationUs;
+
+    return OK;
+}
+
+bool LiveSession::isSeekable() {
+    int64_t durationUs;
+    return getDuration(&durationUs) == OK && durationUs >= 0;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/httplive/LiveSource.cpp b/media/libstagefright/httplive/LiveSource.cpp
deleted file mode 100644
index 29c7b04..0000000
--- a/media/libstagefright/httplive/LiveSource.cpp
+++ /dev/null
@@ -1,428 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "LiveSource"
-#include <utils/Log.h>
-
-#include "include/LiveSource.h"
-#include "include/M3UParser.h"
-#include "include/NuHTTPDataSource.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-LiveSource::LiveSource(const char *url)
-    : mMasterURL(url),
-      mInitCheck(NO_INIT),
-      mDurationUs(-1),
-      mPlaylistIndex(0),
-      mLastFetchTimeUs(-1),
-      mSource(new NuHTTPDataSource),
-      mSourceSize(0),
-      mOffsetBias(0),
-      mSignalDiscontinuity(false),
-      mPrevBandwidthIndex(-1) {
-    if (switchToNext()) {
-        mInitCheck = OK;
-
-        determineSeekability();
-    }
-}
-
-LiveSource::~LiveSource() {
-}
-
-status_t LiveSource::initCheck() const {
-    return mInitCheck;
-}
-
-// static
-int LiveSource::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) {
-    if (a->mBandwidth < b->mBandwidth) {
-        return -1;
-    } else if (a->mBandwidth == b->mBandwidth) {
-        return 0;
-    }
-
-    return 1;
-}
-
-static double uniformRand() {
-    return (double)rand() / RAND_MAX;
-}
-
-bool LiveSource::loadPlaylist(bool fetchMaster) {
-    mSignalDiscontinuity = false;
-
-    mPlaylist.clear();
-    mPlaylistIndex = 0;
-
-    if (fetchMaster) {
-        mPrevBandwidthIndex = -1;
-
-        sp<ABuffer> buffer;
-        status_t err = fetchM3U(mMasterURL.c_str(), &buffer);
-
-        if (err != OK) {
-            return false;
-        }
-
-        mPlaylist = new M3UParser(
-                mMasterURL.c_str(), buffer->data(), buffer->size());
-
-        if (mPlaylist->initCheck() != OK) {
-            return false;
-        }
-
-        if (mPlaylist->isVariantPlaylist()) {
-            for (size_t i = 0; i < mPlaylist->size(); ++i) {
-                BandwidthItem item;
-
-                sp<AMessage> meta;
-                mPlaylist->itemAt(i, &item.mURI, &meta);
-
-                unsigned long bandwidth;
-                CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth));
-
-                mBandwidthItems.push(item);
-            }
-            mPlaylist.clear();
-
-            // fall through
-            if (mBandwidthItems.size() == 0) {
-                return false;
-            }
-
-            mBandwidthItems.sort(SortByBandwidth);
-
-            for (size_t i = 0; i < mBandwidthItems.size(); ++i) {
-                const BandwidthItem &item = mBandwidthItems.itemAt(i);
-                LOGV("item #%d: %s", i, item.mURI.c_str());
-            }
-        }
-    }
-
-    if (mBandwidthItems.size() > 0) {
-#if 0
-        // Change bandwidth at random()
-        size_t index = uniformRand() * mBandwidthItems.size();
-#elif 0
-        // There's a 50% chance to stay on the current bandwidth and
-        // a 50% chance to switch to the next higher bandwidth (wrapping around
-        // to lowest)
-        size_t index;
-        if (uniformRand() < 0.5) {
-            index = mPrevBandwidthIndex < 0 ? 0 : (size_t)mPrevBandwidthIndex;
-        } else {
-            if (mPrevBandwidthIndex < 0) {
-                index = 0;
-            } else {
-                index = mPrevBandwidthIndex + 1;
-                if (index == mBandwidthItems.size()) {
-                    index = 0;
-                }
-            }
-        }
-#else
-        // Stay on the lowest bandwidth available.
-        size_t index = mBandwidthItems.size() - 1;  // Highest bandwidth stream
-#endif
-
-        mURL = mBandwidthItems.editItemAt(index).mURI;
-
-        if (mPrevBandwidthIndex >= 0 && (size_t)mPrevBandwidthIndex != index) {
-            // If we switched streams because of bandwidth changes,
-            // we'll signal this discontinuity by inserting a
-            // special transport stream packet into the stream.
-            mSignalDiscontinuity = true;
-        }
-
-        mPrevBandwidthIndex = index;
-    } else {
-        mURL = mMasterURL;
-    }
-
-    if (mPlaylist == NULL) {
-        sp<ABuffer> buffer;
-        status_t err = fetchM3U(mURL.c_str(), &buffer);
-
-        if (err != OK) {
-            return false;
-        }
-
-        mPlaylist = new M3UParser(mURL.c_str(), buffer->data(), buffer->size());
-
-        if (mPlaylist->initCheck() != OK) {
-            return false;
-        }
-
-        if (mPlaylist->isVariantPlaylist()) {
-            return false;
-        }
-    }
-
-    if (!mPlaylist->meta()->findInt32(
-                "media-sequence", &mFirstItemSequenceNumber)) {
-        mFirstItemSequenceNumber = 0;
-    }
-
-    return true;
-}
-
-static int64_t getNowUs() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL);
-
-    return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
-}
-
-bool LiveSource::switchToNext() {
-    mSignalDiscontinuity = false;
-
-    mOffsetBias += mSourceSize;
-    mSourceSize = 0;
-
-    if (mLastFetchTimeUs < 0 || getNowUs() >= mLastFetchTimeUs + 15000000ll
-        || mPlaylistIndex == mPlaylist->size()) {
-        int32_t nextSequenceNumber =
-            mPlaylistIndex + mFirstItemSequenceNumber;
-
-        if (!loadPlaylist(mLastFetchTimeUs < 0)) {
-            LOGE("failed to reload playlist");
-            return false;
-        }
-
-        if (mLastFetchTimeUs < 0) {
-            mPlaylistIndex = 0;
-        } else {
-            if (nextSequenceNumber < mFirstItemSequenceNumber
-                    || nextSequenceNumber
-                            >= mFirstItemSequenceNumber + (int32_t)mPlaylist->size()) {
-                LOGE("Cannot find sequence number %d in new playlist",
-                     nextSequenceNumber);
-
-                return false;
-            }
-
-            mPlaylistIndex = nextSequenceNumber - mFirstItemSequenceNumber;
-        }
-
-        mLastFetchTimeUs = getNowUs();
-    }
-
-    AString uri;
-    sp<AMessage> itemMeta;
-    CHECK(mPlaylist->itemAt(mPlaylistIndex, &uri, &itemMeta));
-    LOGV("switching to %s", uri.c_str());
-
-    if (mSource->connect(uri.c_str()) != OK
-            || mSource->getSize(&mSourceSize) != OK) {
-        return false;
-    }
-
-    int32_t val;
-    if (itemMeta->findInt32("discontinuity", &val) && val != 0) {
-        mSignalDiscontinuity = true;
-    }
-
-    mPlaylistIndex++;
-    return true;
-}
-
-static const ssize_t kHeaderSize = 188;
-
-ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) {
-    CHECK(offset >= mOffsetBias);
-    offset -= mOffsetBias;
-
-    off_t delta = mSignalDiscontinuity ? kHeaderSize : 0;
-
-    if (offset >= mSourceSize + delta) {
-        CHECK_EQ(offset, mSourceSize + delta);
-
-        offset -= mSourceSize + delta;
-        if (!switchToNext()) {
-            return ERROR_END_OF_STREAM;
-        }
-
-        if (mSignalDiscontinuity) {
-            LOGV("switchToNext changed streams");
-        } else {
-            LOGV("switchToNext stayed within the same stream");
-        }
-
-        mOffsetBias += delta;
-
-        delta = mSignalDiscontinuity ? kHeaderSize : 0;
-    }
-
-    if (offset < delta) {
-        size_t avail = delta - offset;
-        memset(data, 0, avail);
-        return avail;
-    }
-
-    size_t numRead = 0;
-    while (numRead < size) {
-        ssize_t n = mSource->readAt(
-                offset + numRead - delta,
-                (uint8_t *)data + numRead, size - numRead);
-
-        if (n <= 0) {
-            break;
-        }
-
-        numRead += n;
-    }
-
-    return numRead;
-}
-
-status_t LiveSource::fetchM3U(const char *url, sp<ABuffer> *out) {
-    *out = NULL;
-
-    sp<DataSource> source;
-
-    if (!strncasecmp(url, "file://", 7)) {
-        source = new FileSource(url + 7);
-    } else {
-        CHECK(!strncasecmp(url, "http://", 7));
-
-        status_t err = mSource->connect(url);
-
-        if (err != OK) {
-            return err;
-        }
-
-        source = mSource;
-    }
-
-    off_t size;
-    status_t err = source->getSize(&size);
-
-    if (err != OK) {
-        size = 65536;
-    }
-
-    sp<ABuffer> buffer = new ABuffer(size);
-    buffer->setRange(0, 0);
-
-    for (;;) {
-        size_t bufferRemaining = buffer->capacity() - buffer->size();
-
-        if (bufferRemaining == 0) {
-            bufferRemaining = 32768;
-
-            LOGV("increasing download buffer to %d bytes",
-                 buffer->size() + bufferRemaining);
-
-            sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining);
-            memcpy(copy->data(), buffer->data(), buffer->size());
-            copy->setRange(0, buffer->size());
-
-            buffer = copy;
-        }
-
-        ssize_t n = source->readAt(
-                buffer->size(), buffer->data() + buffer->size(),
-                bufferRemaining);
-
-        if (n < 0) {
-            return err;
-        }
-
-        if (n == 0) {
-            break;
-        }
-
-        buffer->setRange(0, buffer->size() + (size_t)n);
-    }
-
-    *out = buffer;
-
-    return OK;
-}
-
-bool LiveSource::seekTo(int64_t seekTimeUs) {
-    LOGV("seek to %lld us", seekTimeUs);
-
-    if (!mPlaylist->isComplete()) {
-        return false;
-    }
-
-    int32_t targetDuration;
-    if (!mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
-        return false;
-    }
-
-    int64_t seekTimeSecs = (seekTimeUs + 500000ll) / 1000000ll;
-
-    int64_t index = seekTimeSecs / targetDuration;
-
-    if (index < 0 || index >= mPlaylist->size()) {
-        return false;
-    }
-
-    size_t newPlaylistIndex = mFirstItemSequenceNumber + index;
-
-    if (newPlaylistIndex == mPlaylistIndex) {
-        return false;
-    }
-
-    mPlaylistIndex = newPlaylistIndex;
-
-    switchToNext();
-    mOffsetBias = 0;
-
-    LOGV("seeking to index %lld", index);
-
-    return true;
-}
-
-bool LiveSource::getDuration(int64_t *durationUs) const {
-    if (mDurationUs >= 0) {
-        *durationUs = mDurationUs;
-        return true;
-    }
-
-    *durationUs = 0;
-    return false;
-}
-
-bool LiveSource::isSeekable() const {
-    return mDurationUs >= 0;
-}
-
-void LiveSource::determineSeekability() {
-    mDurationUs = -1;
-
-    if (!mPlaylist->isComplete()) {
-        return;
-    }
-
-    int32_t targetDuration;
-    if (!mPlaylist->meta()->findInt32("target-duration", &targetDuration)) {
-        return;
-    }
-
-    mDurationUs = targetDuration * 1000000ll * mPlaylist->size();
-}
-
-}  // namespace android
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 90f3d6d..95f6741 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
+#define LOG_TAG "M3UParser"
+#include <utils/Log.h>
+
 #include "include/M3UParser.h"
 
 #include <media/stagefright/foundation/AMessage.h>
@@ -80,14 +84,18 @@
     out->clear();
 
     if (strncasecmp("http://", baseURL, 7)
+            && strncasecmp("https://", baseURL, 8)
             && strncasecmp("file://", baseURL, 7)) {
         // Base URL must be absolute
         return false;
     }
 
-    if (!strncasecmp("http://", url, 7)) {
+    if (!strncasecmp("http://", url, 7) || !strncasecmp("https://", url, 8)) {
         // "url" is already an absolute URL, ignore base URL.
         out->setTo(url);
+
+        LOGV("base:'%s', url:'%s' => '%s'", baseURL, url, out->c_str());
+
         return true;
     }
 
@@ -108,6 +116,8 @@
         out->append(url);
     }
 
+    LOGV("base:'%s', url:'%s' => '%s'", baseURL, url, out->c_str());
+
     return true;
 }
 
@@ -158,6 +168,11 @@
                     return ERROR_MALFORMED;
                 }
                 err = parseMetaData(line, &mMeta, "media-sequence");
+            } else if (line.startsWith("#EXT-X-KEY")) {
+                if (mIsVariantPlaylist) {
+                    return ERROR_MALFORMED;
+                }
+                err = parseCipherInfo(line, &itemMeta, mBaseURI);
             } else if (line.startsWith("#EXT-X-ENDLIST")) {
                 mIsComplete = true;
             } else if (line.startsWith("#EXTINF")) {
@@ -292,6 +307,75 @@
 }
 
 // static
+status_t M3UParser::parseCipherInfo(
+        const AString &line, sp<AMessage> *meta, const AString &baseURI) {
+    ssize_t colonPos = line.find(":");
+
+    if (colonPos < 0) {
+        return ERROR_MALFORMED;
+    }
+
+    size_t offset = colonPos + 1;
+
+    while (offset < line.size()) {
+        ssize_t end = line.find(",", offset);
+        if (end < 0) {
+            end = line.size();
+        }
+
+        AString attr(line, offset, end - offset);
+        attr.trim();
+
+        offset = end + 1;
+
+        ssize_t equalPos = attr.find("=");
+        if (equalPos < 0) {
+            continue;
+        }
+
+        AString key(attr, 0, equalPos);
+        key.trim();
+
+        AString val(attr, equalPos + 1, attr.size() - equalPos - 1);
+        val.trim();
+
+        LOGV("key=%s value=%s", key.c_str(), val.c_str());
+
+        key.tolower();
+
+        if (key == "method" || key == "uri" || key == "iv") {
+            if (meta->get() == NULL) {
+                *meta = new AMessage;
+            }
+
+            if (key == "uri") {
+                if (val.size() >= 2
+                        && val.c_str()[0] == '"'
+                        && val.c_str()[val.size() - 1] == '"') {
+                    // Remove surrounding quotes.
+                    AString tmp(val, 1, val.size() - 2);
+                    val = tmp;
+                }
+
+                AString absURI;
+                if (MakeURL(baseURI.c_str(), val.c_str(), &absURI)) {
+                    val = absURI;
+                } else {
+                    LOGE("failed to make absolute url for '%s'.",
+                         val.c_str());
+                }
+            }
+
+            key.insert(AString("cipher-"), 0);
+
+            (*meta)->setString(key.c_str(), val.c_str(), val.size());
+        }
+    }
+
+    return OK;
+}
+
+// static
 status_t M3UParser::ParseInt32(const char *s, int32_t *x) {
     char *end;
     long lval = strtol(s, &end, 10);
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index da340f7..45e018d 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -149,7 +149,25 @@
     }
 
     if (header.version_major == 4) {
-        if (!removeUnsynchronizationV2_4()) {
+        void *copy = malloc(size);
+        memcpy(copy, mData, size);
+
+        bool success = removeUnsynchronizationV2_4(false /* iTunesHack */);
+        if (!success) {
+            memcpy(mData, copy, size);
+            mSize = size;
+
+            success = removeUnsynchronizationV2_4(true /* iTunesHack */);
+
+            if (success) {
+                LOGV("Had to apply the iTunes hack to parse this ID3 tag");
+            }
+        }
+
+        free(copy);
+        copy = NULL;
+
+        if (!success) {
             free(mData);
             mData = NULL;
 
@@ -261,7 +279,7 @@
     }
 }
 
-bool ID3::removeUnsynchronizationV2_4() {
+bool ID3::removeUnsynchronizationV2_4(bool iTunesHack) {
     size_t oldSize = mSize;
 
     size_t offset = 0;
@@ -271,7 +289,9 @@
         }
 
         size_t dataSize;
-        if (!ParseSyncsafeInteger(&mData[offset + 4], &dataSize)) {
+        if (iTunesHack) {
+            dataSize = U32_AT(&mData[offset + 4]);
+        } else if (!ParseSyncsafeInteger(&mData[offset + 4], &dataSize)) {
             return false;
         }
 
@@ -308,7 +328,7 @@
             flags &= ~2;
         }
 
-        if (flags != prevFlags) {
+        if (flags != prevFlags || iTunesHack) {
             WriteSyncsafeInteger(&mData[offset + 4], dataSize);
             mData[offset + 8] = flags >> 8;
             mData[offset + 9] = flags & 0xff;
@@ -769,8 +789,8 @@
 bool ID3::parseV1(const sp<DataSource> &source) {
     const size_t V1_TAG_SIZE = 128;
 
-    off_t size;
-    if (source->getSize(&size) != OK || size < (off_t)V1_TAG_SIZE) {
+    off64_t size;
+    if (source->getSize(&size) != OK || size < (off64_t)V1_TAG_SIZE) {
         return false;
     }
 
diff --git a/media/libstagefright/include/AACEncoder.h b/media/libstagefright/include/AACEncoder.h
index ecc533f..3d5fc60 100644
--- a/media/libstagefright/include/AACEncoder.h
+++ b/media/libstagefright/include/AACEncoder.h
@@ -60,7 +60,7 @@
             kNumSamplesPerFrame = 1024,
         };
 
-        int16_t           mInputFrame[kNumSamplesPerFrame];
+        int16_t           *mInputFrame;
 
         uint8_t           mAudioSpecificConfigData[2]; // auido specific data
         void             *mEncoderHandle;
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index e352928..17b83c1 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -45,10 +45,11 @@
 class DrmManagerClinet;
 class DecryptHandle;
 
+struct LiveSession;
+
 struct AwesomeRenderer : public RefBase {
     AwesomeRenderer() {}
 
-    virtual status_t initCheck() const = 0;
     virtual void render(MediaBuffer *buffer) = 0;
 
 private:
@@ -68,6 +69,8 @@
 
     status_t setDataSource(int fd, int64_t offset, int64_t length);
 
+    status_t setDataSource(const sp<IStreamSource> &source);
+
     void reset();
 
     status_t prepare();
@@ -80,7 +83,7 @@
 
     bool isPlaying() const;
 
-    void setISurface(const sp<ISurface> &isurface);
+    void setSurface(const sp<Surface> &surface);
     void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
     status_t setLooping(bool shouldLoop);
 
@@ -89,11 +92,6 @@
 
     status_t seekTo(int64_t timeUs);
 
-    status_t getVideoDimensions(int32_t *width, int32_t *height) const;
-
-    status_t suspend();
-    status_t resume();
-
     // This is a mask of MediaExtractor::Flags.
     uint32_t flags() const;
 
@@ -115,6 +113,11 @@
         AUDIO_AT_EOS        = 256,
         VIDEO_AT_EOS        = 512,
         AUTO_LOOPING        = 1024,
+
+        // We are basically done preparing but are currently buffering
+        // sufficient data to begin playback and finish the preparation phase
+        // for good.
+        PREPARING_CONNECTED = 2048,
     };
 
     mutable Mutex mLock;
@@ -125,7 +128,7 @@
     bool mQueueStarted;
     wp<MediaPlayerBase> mListener;
 
-    sp<ISurface> mISurface;
+    sp<Surface> mSurface;
     sp<MediaPlayerBase::AudioSink> mAudioSink;
 
     SystemTimeSource mSystemTimeSource;
@@ -148,8 +151,8 @@
 
     uint32_t mFlags;
     uint32_t mExtractorFlags;
+    uint32_t mSinceLastDropped;
 
-    int32_t mVideoWidth, mVideoHeight;
     int64_t mTimeSourceDeltaUs;
     int64_t mVideoTimeUs;
 
@@ -183,7 +186,6 @@
     void postCheckAudioStatusEvent_l();
     status_t play_l();
 
-    MediaBuffer *mLastVideoBuffer;
     MediaBuffer *mVideoBuffer;
 
     sp<NuHTTPDataSource> mConnectingDataSource;
@@ -194,31 +196,7 @@
     sp<ARTPSession> mRTPSession;
     sp<UDPPusher> mRTPPusher, mRTCPPusher;
 
-    struct SuspensionState {
-        String8 mUri;
-        KeyedVector<String8, String8> mUriHeaders;
-        sp<DataSource> mFileSource;
-
-        uint32_t mFlags;
-        int64_t mPositionUs;
-
-        void *mLastVideoFrame;
-        size_t mLastVideoFrameSize;
-        int32_t mColorFormat;
-        int32_t mVideoWidth, mVideoHeight;
-        int32_t mDecodedWidth, mDecodedHeight;
-
-        SuspensionState()
-            : mLastVideoFrame(NULL) {
-        }
-
-        ~SuspensionState() {
-            if (mLastVideoFrame) {
-                free(mLastVideoFrame);
-                mLastVideoFrame = NULL;
-            }
-        }
-    } *mSuspensionState;
+    sp<LiveSession> mLiveSession;
 
     DrmManagerClient *mDrmManagerClient;
     DecryptHandle *mDecryptHandle;
@@ -233,7 +211,8 @@
     void partial_reset_l();
     status_t seekTo_l(int64_t timeUs);
     status_t pause_l(bool at_eos = false);
-    status_t initRenderer_l();
+    void initRenderer_l();
+    void notifyVideoSize_l();
     void seekAudioIfNecessary_l();
 
     void cancelPlayerEvents(bool keepBufferingGoing = false);
@@ -267,6 +246,7 @@
     bool getBitrate(int64_t *bitrate);
 
     void finishSeekIfNecessary(int64_t videoTimeUs);
+    void ensureCacheIsFetching_l();
 
     AwesomePlayer(const AwesomePlayer &);
     AwesomePlayer &operator=(const AwesomePlayer &);
diff --git a/media/libstagefright/include/HTTPStream.h b/media/libstagefright/include/HTTPStream.h
index 793798f..545cd0c 100644
--- a/media/libstagefright/include/HTTPStream.h
+++ b/media/libstagefright/include/HTTPStream.h
@@ -18,10 +18,9 @@
 
 #define HTTP_STREAM_H_
 
-#include "stagefright_string.h"
-
 #include <sys/types.h>
 
+#include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/MediaErrors.h>
 #include <utils/KeyedVector.h>
 #include <utils/threads.h>
@@ -50,7 +49,7 @@
     static const char *kStatusKey;
 
     bool find_header_value(
-            const string &key, string *value) const;
+            const AString &key, AString *value) const;
 
     // Pass a negative value to disable the timeout.
     void setReceiveTimeout(int seconds);
@@ -70,7 +69,7 @@
     Mutex mLock;
     int mSocket;
 
-    KeyedVector<string, string> mHeaders;
+    KeyedVector<AString, AString> mHeaders;
 
     HTTPStream(const HTTPStream &);
     HTTPStream &operator=(const HTTPStream &);
diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h
index 7ddbb41..98c82a4 100644
--- a/media/libstagefright/include/ID3.h
+++ b/media/libstagefright/include/ID3.h
@@ -80,7 +80,7 @@
     bool parseV1(const sp<DataSource> &source);
     bool parseV2(const sp<DataSource> &source);
     void removeUnsynchronization();
-    bool removeUnsynchronizationV2_4();
+    bool removeUnsynchronizationV2_4(bool iTunesHack);
 
     static bool ParseSyncsafeInteger(const uint8_t encoded[4], size_t *x);
 
diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h
new file mode 100644
index 0000000..41f5ad0
--- /dev/null
+++ b/media/libstagefright/include/LiveSession.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LIVE_SESSION_H_
+
+#define LIVE_SESSION_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct DataSource;
+struct LiveDataSource;
+struct M3UParser;
+struct NuHTTPDataSource;
+
+struct LiveSession : public AHandler {
+    LiveSession();
+
+    sp<DataSource> getDataSource();
+
+    void connect(const char *url);
+    void disconnect();
+
+    // Blocks until seek is complete.
+    void seekTo(int64_t timeUs);
+
+    status_t getDuration(int64_t *durationUs);
+    bool isSeekable();
+
+protected:
+    virtual ~LiveSession();
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kMaxNumQueuedFragments = 3,
+        kMaxNumRetries         = 5,
+    };
+
+    static const int64_t kMaxPlaylistAgeUs;
+
+    enum {
+        kWhatConnect        = 'conn',
+        kWhatDisconnect     = 'disc',
+        kWhatMonitorQueue   = 'moni',
+        kWhatSeek           = 'seek',
+    };
+
+    struct BandwidthItem {
+        AString mURI;
+        unsigned long mBandwidth;
+    };
+
+    sp<LiveDataSource> mDataSource;
+
+    sp<NuHTTPDataSource> mHTTPDataSource;
+
+    AString mMasterURL;
+    Vector<BandwidthItem> mBandwidthItems;
+
+    KeyedVector<AString, sp<ABuffer> > mAESKeyForURI;
+
+    ssize_t mPrevBandwidthIndex;
+    int64_t mLastPlaylistFetchTimeUs;
+    sp<M3UParser> mPlaylist;
+    int32_t mSeqNumber;
+    int64_t mSeekTimeUs;
+    int32_t mNumRetries;
+
+    Mutex mLock;
+    Condition mCondition;
+    int64_t mDurationUs;
+    bool mSeekDone;
+
+    int32_t mMonitorQueueGeneration;
+
+    void onConnect(const sp<AMessage> &msg);
+    void onDisconnect();
+    void onDownloadNext();
+    void onMonitorQueue();
+    void onSeek(const sp<AMessage> &msg);
+
+    status_t fetchFile(const char *url, sp<ABuffer> *out);
+    sp<M3UParser> fetchPlaylist(const char *url);
+    size_t getBandwidthIndex();
+
+    status_t decryptBuffer(
+            size_t playlistIndex, const sp<ABuffer> &buffer);
+
+    void postMonitorQueue(int64_t delayUs = 0);
+
+    static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);
+
+    DISALLOW_EVIL_CONSTRUCTORS(LiveSession);
+};
+
+}  // namespace android
+
+#endif  // LIVE_SESSION_H_
diff --git a/media/libstagefright/include/LiveSource.h b/media/libstagefright/include/LiveSource.h
deleted file mode 100644
index 55dd45e..0000000
--- a/media/libstagefright/include/LiveSource.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LIVE_SOURCE_H_
-
-#define LIVE_SOURCE_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/DataSource.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct ABuffer;
-struct NuHTTPDataSource;
-struct M3UParser;
-
-struct LiveSource : public DataSource {
-    LiveSource(const char *url);
-
-    virtual status_t initCheck() const;
-
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
-
-    virtual uint32_t flags() {
-        return kWantsPrefetching;
-    }
-
-    bool getDuration(int64_t *durationUs) const;
-
-    bool isSeekable() const;
-    bool seekTo(int64_t seekTimeUs);
-
-protected:
-    virtual ~LiveSource();
-
-private:
-    struct BandwidthItem {
-        AString mURI;
-        unsigned long mBandwidth;
-    };
-    Vector<BandwidthItem> mBandwidthItems;
-
-    AString mMasterURL;
-    AString mURL;
-    status_t mInitCheck;
-    int64_t mDurationUs;
-
-    sp<M3UParser> mPlaylist;
-    int32_t mFirstItemSequenceNumber;
-    size_t mPlaylistIndex;
-    int64_t mLastFetchTimeUs;
-
-    sp<NuHTTPDataSource> mSource;
-    off_t mSourceSize;
-    off_t mOffsetBias;
-
-    bool mSignalDiscontinuity;
-    ssize_t mPrevBandwidthIndex;
-
-    status_t fetchM3U(const char *url, sp<ABuffer> *buffer);
-
-    static int SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b);
-
-    bool switchToNext();
-    bool loadPlaylist(bool fetchMaster);
-    void determineSeekability();
-
-    DISALLOW_EVIL_CONSTRUCTORS(LiveSource);
-};
-
-}  // namespace android
-
-#endif  // LIVE_SOURCE_H_
diff --git a/media/libstagefright/include/M3UParser.h b/media/libstagefright/include/M3UParser.h
index bd9eebe..63895b4 100644
--- a/media/libstagefright/include/M3UParser.h
+++ b/media/libstagefright/include/M3UParser.h
@@ -66,6 +66,9 @@
     static status_t parseStreamInf(
             const AString &line, sp<AMessage> *meta);
 
+    static status_t parseCipherInfo(
+            const AString &line, sp<AMessage> *meta, const AString &baseURI);
+
     static status_t ParseInt32(const char *s, int32_t *x);
 
     DISALLOW_EVIL_CONSTRUCTORS(M3UParser);
diff --git a/media/libstagefright/include/MP3Extractor.h b/media/libstagefright/include/MP3Extractor.h
index 30136e7..728980e 100644
--- a/media/libstagefright/include/MP3Extractor.h
+++ b/media/libstagefright/include/MP3Extractor.h
@@ -24,6 +24,7 @@
 
 struct AMessage;
 class DataSource;
+struct MP3Seeker;
 class String8;
 
 class MP3Extractor : public MediaExtractor {
@@ -37,15 +38,19 @@
 
     virtual sp<MetaData> getMetaData();
 
+    static bool get_mp3_frame_size(
+            uint32_t header, size_t *frame_size,
+            int *out_sampling_rate = NULL, int *out_channels = NULL,
+            int *out_bitrate = NULL);
+
 private:
     status_t mInitCheck;
 
     sp<DataSource> mDataSource;
-    off_t mFirstFramePos;
+    off64_t mFirstFramePos;
     sp<MetaData> mMeta;
     uint32_t mFixedHeader;
-    int32_t mByteNumber; // total number of bytes in this MP3
-    char mTableOfContents[99]; // TOC entries in XING header
+    sp<MP3Seeker> mSeeker;
 
     MP3Extractor(const MP3Extractor &);
     MP3Extractor &operator=(const MP3Extractor &);
diff --git a/media/libstagefright/include/MP3Seeker.h b/media/libstagefright/include/MP3Seeker.h
new file mode 100644
index 0000000..599542e
--- /dev/null
+++ b/media/libstagefright/include/MP3Seeker.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MP3_SEEKER_H_
+
+#define MP3_SEEKER_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct MP3Seeker : public RefBase {
+    MP3Seeker() {}
+
+    virtual bool getDuration(int64_t *durationUs) = 0;
+
+    // Given a request seek time in "*timeUs", find the byte offset closest
+    // to that position and return it in "*pos". Update "*timeUs" to reflect
+    // the actual time that seekpoint represents.
+    virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos) = 0;
+
+protected:
+    virtual ~MP3Seeker() {}
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(MP3Seeker);
+};
+
+}  // namespace android
+
+#endif  // MP3_SEEKER_H_
+
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index d83b538..efe7496 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -15,7 +15,7 @@
 struct DataSource;
 struct MPEG2TSSource;
 struct String8;
-struct LiveSource;
+struct LiveSession;
 
 struct MPEG2TSExtractor : public MediaExtractor {
     MPEG2TSExtractor(const sp<DataSource> &source);
@@ -28,7 +28,7 @@
 
     virtual uint32_t flags() const;
 
-    void setLiveSource(const sp<LiveSource> &liveSource);
+    void setLiveSession(const sp<LiveSession> &liveSession);
     void seekTo(int64_t seekTimeUs);
 
 private:
@@ -37,13 +37,13 @@
     mutable Mutex mLock;
 
     sp<DataSource> mDataSource;
-    sp<LiveSource> mLiveSource;
+    sp<LiveSession> mLiveSession;
 
     sp<ATSParser> mParser;
 
     Vector<sp<AnotherPacketSource> > mSourceImpls;
 
-    off_t mOffset;
+    off64_t mOffset;
 
     void init();
     status_t feedMore();
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index bc2e4dc..04e8a6a 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -67,8 +67,8 @@
     Vector<uint32_t> mPath;
 
     status_t readMetaData();
-    status_t parseChunk(off_t *offset, int depth);
-    status_t parseMetaData(off_t offset, size_t size);
+    status_t parseChunk(off64_t *offset, int depth);
+    status_t parseMetaData(off64_t offset, size_t size);
 
     status_t updateAudioTrackInfoFromESDS_MPEG4Audio(
             const void *esds_data, size_t esds_size);
@@ -86,9 +86,9 @@
     SINF *mFirstSINF;
 
     bool mIsDrm;
-    status_t parseDrmSINF(off_t *offset, off_t data_offset);
+    status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
 
-    status_t parseTrackHeader(off_t data_offset, off_t data_size);
+    status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
 
     MPEG4Extractor(const MPEG4Extractor &);
     MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 1fb2088..78719c1 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -32,18 +32,20 @@
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
 
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
+    virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+    virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+    virtual String8 getUri();
     ////////////////////////////////////////////////////////////////////////////
 
     size_t cachedSize();
     size_t approxDataRemaining(bool *eos);
 
-    void suspend();
-    void clearCacheAndResume();
+    void resumeFetchingIfNecessary();
 
 protected:
     virtual ~NuCachedSource2();
@@ -54,7 +56,7 @@
     enum {
         kPageSize            = 65536,
         kHighWaterThreshold  = 5 * 1024 * 1024,
-        kLowWaterThreshold   = 512 * 1024,
+        kLowWaterThreshold   = 1024 * 1024,
 
         // Read data after a 15 sec timeout whether we're actively
         // fetching or not.
@@ -64,7 +66,6 @@
     enum {
         kWhatFetchMore  = 'fetc',
         kWhatRead       = 'read',
-        kWhatSuspend    = 'susp',
     };
 
     sp<DataSource> mSource;
@@ -76,25 +77,23 @@
     Condition mCondition;
 
     PageCache *mCache;
-    off_t mCacheOffset;
+    off64_t mCacheOffset;
     status_t mFinalStatus;
-    off_t mLastAccessPos;
+    off64_t mLastAccessPos;
     sp<AMessage> mAsyncResult;
     bool mFetching;
     int64_t mLastFetchTimeUs;
-    bool mSuspended;
 
     void onMessageReceived(const sp<AMessage> &msg);
     void onFetch();
     void onRead(const sp<AMessage> &msg);
-    void onSuspend();
 
     void fetchInternal();
-    ssize_t readInternal(off_t offset, void *data, size_t size);
-    status_t seekInternal_l(off_t offset);
+    ssize_t readInternal(off64_t offset, void *data, size_t size);
+    status_t seekInternal_l(off64_t offset);
 
     size_t approxDataRemaining_l(bool *eos);
-    void restartPrefetcherIfNecessary_l();
+    void restartPrefetcherIfNecessary_l(bool ignoreLowWaterThreshold = false);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuCachedSource2);
 };
diff --git a/media/libstagefright/include/NuHTTPDataSource.h b/media/libstagefright/include/NuHTTPDataSource.h
index 3c88cc2..c8e93be 100644
--- a/media/libstagefright/include/NuHTTPDataSource.h
+++ b/media/libstagefright/include/NuHTTPDataSource.h
@@ -3,6 +3,7 @@
 #define NU_HTTP_DATA_SOURCE_H_
 
 #include <media/stagefright/DataSource.h>
+#include <utils/List.h>
 #include <utils/String8.h>
 #include <utils/threads.h>
 
@@ -16,16 +17,24 @@
     status_t connect(
             const char *uri,
             const KeyedVector<String8, String8> *headers = NULL,
-            off_t offset = 0);
+            off64_t offset = 0);
 
     void disconnect();
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
-    virtual status_t getSize(off_t *size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
+    // Returns true if bandwidth could successfully be estimated,
+    // false otherwise.
+    bool estimateBandwidth(int32_t *bandwidth_bps);
+
+    virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+    virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+    virtual String8 getUri();
+
 protected:
     virtual ~NuHTTPDataSource();
 
@@ -36,6 +45,11 @@
         CONNECTED
     };
 
+    struct BandwidthEntry {
+        int64_t mDelayUs;
+        size_t mNumBytes;
+    };
+
     Mutex mLock;
 
     State mState;
@@ -44,10 +58,11 @@
     unsigned mPort;
     String8 mPath;
     String8 mHeaders;
+    String8 mUri;
 
     HTTPStream mHTTP;
-    off_t mOffset;
-    off_t mContentLength;
+    off64_t mOffset;
+    off64_t mContentLength;
     bool mContentLengthValid;
     bool mHasChunkedTransferEncoding;
 
@@ -55,18 +70,27 @@
     // chunk header (or -1 if no more chunks).
     ssize_t mChunkDataBytesLeft;
 
+    List<BandwidthEntry> mBandwidthHistory;
+    size_t mNumBandwidthHistoryItems;
+    int64_t mTotalTransferTimeUs;
+    size_t mTotalTransferBytes;
+
+    DecryptHandle *mDecryptHandle;
+    DrmManagerClient *mDrmManagerClient;
+
     status_t connect(
-            const char *uri, const String8 &headers, off_t offset);
+            const char *uri, const String8 &headers, off64_t offset);
 
     status_t connect(
             const char *host, unsigned port, const char *path,
             const String8 &headers,
-            off_t offset);
+            off64_t offset);
 
     // Read up to "size" bytes of data, respect transfer encoding.
     ssize_t internalRead(void *data, size_t size);
 
     void applyTimeoutResponse();
+    void addBandwidthMeasurement_l(size_t numBytes, int64_t delayUs);
 
     static void MakeFullHeaders(
             const KeyedVector<String8, String8> *overrides,
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 72ab5aa..5fed98a 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -59,10 +59,20 @@
             node_id node, OMX_INDEXTYPE index,
             const void *params, size_t size);
 
+    virtual status_t enableGraphicBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
+    virtual status_t storeMetaDataInBuffers(
+            node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
     virtual status_t useBuffer(
             node_id node, OMX_U32 port_index, const sp<IMemory> &params,
             buffer_id *buffer);
 
+    virtual status_t useGraphicBuffer(
+            node_id node, OMX_U32 port_index,
+            const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+
     virtual status_t allocateBuffer(
             node_id node, OMX_U32 port_index, size_t size,
             buffer_id *buffer, void **buffer_data);
@@ -87,14 +97,6 @@
             const char *parameter_name,
             OMX_INDEXTYPE *index);
 
-    virtual sp<IOMXRenderer> createRenderer(
-            const sp<ISurface> &surface,
-            const char *componentName,
-            OMX_COLOR_FORMATTYPE colorFormat,
-            size_t encodedWidth, size_t encodedHeight,
-            size_t displayWidth, size_t displayHeight,
-            int32_t rotationDegrees);
-
     virtual void binderDied(const wp<IBinder> &the_late_who);
 
     OMX_ERRORTYPE OnEvent(
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b5b31ac..86c102c 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -49,10 +49,17 @@
     status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
     status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
 
+    status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+    status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
     status_t useBuffer(
             OMX_U32 portIndex, const sp<IMemory> &params,
             OMX::buffer_id *buffer);
 
+    status_t useGraphicBuffer(
+            OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+            OMX::buffer_id *buffer);
+
     status_t allocateBuffer(
             OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
             void **buffer_data);
@@ -125,4 +132,3 @@
 }  // namespace android
 
 #endif  // OMX_NODE_INSTANCE_H_
-
diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h
index a5eaed9..b5a043c 100644
--- a/media/libstagefright/include/SampleIterator.h
+++ b/media/libstagefright/include/SampleIterator.h
@@ -27,7 +27,7 @@
 
     uint32_t getChunkIndex() const { return mCurrentChunkIndex; }
     uint32_t getDescIndex() const { return mChunkDesc; }
-    off_t getSampleOffset() const { return mCurrentSampleOffset; }
+    off64_t getSampleOffset() const { return mCurrentSampleOffset; }
     size_t getSampleSize() const { return mCurrentSampleSize; }
     uint32_t getSampleTime() const { return mCurrentSampleTime; }
 
@@ -48,7 +48,7 @@
     uint32_t mChunkDesc;
 
     uint32_t mCurrentChunkIndex;
-    off_t mCurrentChunkOffset;
+    off64_t mCurrentChunkOffset;
     Vector<size_t> mCurrentChunkSampleSizes;
 
     uint32_t mTimeToSampleIndex;
@@ -58,13 +58,13 @@
     uint32_t mTTSDuration;
 
     uint32_t mCurrentSampleIndex;
-    off_t mCurrentSampleOffset;
+    off64_t mCurrentSampleOffset;
     size_t mCurrentSampleSize;
     uint32_t mCurrentSampleTime;
 
     void reset();
     status_t findChunkRange(uint32_t sampleIndex);
-    status_t getChunkOffset(uint32_t chunk, off_t *offset);
+    status_t getChunkOffset(uint32_t chunk, off64_t *offset);
     status_t findSampleTime(uint32_t sampleIndex, uint32_t *time);
 
     SampleIterator(const SampleIterator &);
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index f830690..c5e8136 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -36,17 +36,17 @@
 
     // type can be 'stco' or 'co64'.
     status_t setChunkOffsetParams(
-            uint32_t type, off_t data_offset, size_t data_size);
+            uint32_t type, off64_t data_offset, size_t data_size);
 
-    status_t setSampleToChunkParams(off_t data_offset, size_t data_size);
+    status_t setSampleToChunkParams(off64_t data_offset, size_t data_size);
 
     // type can be 'stsz' or 'stz2'.
     status_t setSampleSizeParams(
-            uint32_t type, off_t data_offset, size_t data_size);
+            uint32_t type, off64_t data_offset, size_t data_size);
 
-    status_t setTimeToSampleParams(off_t data_offset, size_t data_size);
+    status_t setTimeToSampleParams(off64_t data_offset, size_t data_size);
 
-    status_t setSyncSampleParams(off_t data_offset, size_t data_size);
+    status_t setSyncSampleParams(off64_t data_offset, size_t data_size);
 
     ////////////////////////////////////////////////////////////////////////////
 
@@ -58,7 +58,7 @@
 
     status_t getMetaDataForSample(
             uint32_t sampleIndex,
-            off_t *offset,
+            off64_t *offset,
             size_t *size,
             uint32_t *decodingTime,
             bool *isSyncSample = NULL);
@@ -89,14 +89,14 @@
     sp<DataSource> mDataSource;
     Mutex mLock;
 
-    off_t mChunkOffsetOffset;
+    off64_t mChunkOffsetOffset;
     uint32_t mChunkOffsetType;
     uint32_t mNumChunkOffsets;
 
-    off_t mSampleToChunkOffset;
+    off64_t mSampleToChunkOffset;
     uint32_t mNumSampleToChunkOffsets;
 
-    off_t mSampleSizeOffset;
+    off64_t mSampleSizeOffset;
     uint32_t mSampleSizeFieldSize;
     uint32_t mDefaultSampleSize;
     uint32_t mNumSampleSizes;
@@ -104,7 +104,7 @@
     uint32_t mTimeToSampleCount;
     uint32_t *mTimeToSample;
 
-    off_t mSyncSampleOffset;
+    off64_t mSyncSampleOffset;
     uint32_t mNumSyncSamples;
     uint32_t *mSyncSamples;
     size_t mLastSyncSampleIndex;
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 89d7cc4..90d3fe1 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -19,40 +19,34 @@
 #define SOFTWARE_RENDERER_H_
 
 #include <media/stagefright/ColorConverter.h>
-#include <media/stagefright/VideoRenderer.h>
 #include <utils/RefBase.h>
 
 namespace android {
 
-class ISurface;
-class MemoryHeapBase;
+struct MetaData;
+class Surface;
 
-class SoftwareRenderer : public VideoRenderer {
+class SoftwareRenderer {
 public:
     SoftwareRenderer(
-            OMX_COLOR_FORMATTYPE colorFormat,
-            const sp<ISurface> &surface,
-            size_t displayWidth, size_t displayHeight,
-            size_t decodedWidth, size_t decodedHeight,
-            int32_t rotationDegrees = 0);
+            const sp<Surface> &surface, const sp<MetaData> &meta);
 
-    virtual ~SoftwareRenderer();
+    ~SoftwareRenderer();
 
-    status_t initCheck() const;
-
-    virtual void render(
+    void render(
             const void *data, size_t size, void *platformPrivate);
 
 private:
-    status_t mInitCheck;
+    enum YUVMode {
+        None,
+    };
+
     OMX_COLOR_FORMATTYPE mColorFormat;
-    ColorConverter mConverter;
-    sp<ISurface> mISurface;
-    size_t mDisplayWidth, mDisplayHeight;
-    size_t mDecodedWidth, mDecodedHeight;
-    size_t mFrameSize;
-    sp<MemoryHeapBase> mMemoryHeap;
-    int mIndex;
+    ColorConverter *mConverter;
+    YUVMode mYUVMode;
+    sp<Surface> mSurface;
+    int32_t mWidth, mHeight;
+    int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
 
     SoftwareRenderer(const SoftwareRenderer &);
     SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b80387f..07b1ec8 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -35,7 +35,7 @@
     virtual status_t setDataSource(const char *url);
     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
 
-    virtual VideoFrame *captureFrame();
+    virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option);
     virtual MediaAlbumArt *extractAlbumArt();
     virtual const char *extractMetadata(int keyCode);
 
diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h
index 88164b3..8928a4a 100644
--- a/media/libstagefright/include/ThrottledSource.h
+++ b/media/libstagefright/include/ThrottledSource.h
@@ -30,9 +30,9 @@
 
     virtual status_t initCheck() const;
 
-    virtual ssize_t readAt(off_t offset, void *data, size_t size);
+    virtual ssize_t readAt(off64_t offset, void *data, size_t size);
 
-    virtual status_t getSize(off_t *size);
+    virtual status_t getSize(off64_t *size);
     virtual uint32_t flags();
 
 private:
diff --git a/media/libstagefright/include/VBRISeeker.h b/media/libstagefright/include/VBRISeeker.h
new file mode 100644
index 0000000..1a2bf9f
--- /dev/null
+++ b/media/libstagefright/include/VBRISeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VBRI_SEEKER_H_
+
+#define VBRI_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+#include <utils/Vector.h>
+
+namespace android {
+
+struct DataSource;
+
+struct VBRISeeker : public MP3Seeker {
+    static sp<VBRISeeker> CreateFromSource(
+            const sp<DataSource> &source, off64_t post_id3_pos);
+
+    virtual bool getDuration(int64_t *durationUs);
+    virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+    off64_t mBasePos;
+    int64_t mDurationUs;
+    Vector<uint32_t> mSegments;
+
+    VBRISeeker();
+
+    DISALLOW_EVIL_CONSTRUCTORS(VBRISeeker);
+};
+
+}  // namespace android
+
+#endif  // VBRI_SEEKER_H_
+
+
diff --git a/media/libstagefright/include/WAVExtractor.h b/media/libstagefright/include/WAVExtractor.h
index df6d3e7..9de197f 100644
--- a/media/libstagefright/include/WAVExtractor.h
+++ b/media/libstagefright/include/WAVExtractor.h
@@ -48,7 +48,7 @@
     uint16_t mNumChannels;
     uint32_t mSampleRate;
     uint16_t mBitsPerSample;
-    off_t mDataOffset;
+    off64_t mDataOffset;
     size_t mDataSize;
     sp<MetaData> mTrackMeta;
 
diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h
new file mode 100644
index 0000000..0da45a8
--- /dev/null
+++ b/media/libstagefright/include/WVMExtractor.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WVM_EXTRACTOR_H_
+
+#define WVM_EXTRACTOR_H_
+
+#include <media/stagefright/MediaExtractor.h>
+
+namespace android {
+
+class DataSource;
+
+class WVMExtractor : public MediaExtractor {
+public:
+    WVMExtractor(const sp<DataSource> &source);
+
+    virtual size_t countTracks();
+    virtual sp<MediaSource> getTrack(size_t index);
+    virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+    virtual sp<MetaData> getMetaData();
+
+protected:
+    virtual ~WVMExtractor();
+
+private:
+    sp<DataSource> mDataSource;
+    sp<MediaExtractor> mImpl;
+
+    WVMExtractor(const WVMExtractor &);
+    WVMExtractor &operator=(const WVMExtractor &);
+
+};
+
+}  // namespace android
+
+#endif  // DRM_EXTRACTOR_H_
+
diff --git a/media/libstagefright/include/XINGSeeker.h b/media/libstagefright/include/XINGSeeker.h
new file mode 100644
index 0000000..d5a484e
--- /dev/null
+++ b/media/libstagefright/include/XINGSeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef XING_SEEKER_H_
+
+#define XING_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+namespace android {
+
+struct DataSource;
+
+struct XINGSeeker : public MP3Seeker {
+    static sp<XINGSeeker> CreateFromSource(
+            const sp<DataSource> &source, off64_t first_frame_pos);
+
+    virtual bool getDuration(int64_t *durationUs);
+    virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+    int64_t mFirstFramePos;
+    int64_t mDurationUs;
+    int32_t mSizeBytes;
+
+    // TOC entries in XING header. Skip the first one since it's always 0.
+    char mTableOfContents[99];
+
+    XINGSeeker();
+
+    DISALLOW_EVIL_CONSTRUCTORS(XINGSeeker);
+};
+
+}  // namespace android
+
+#endif  // XING_SEEKER_H_
+
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index 62cfc36..3aeb07f 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -24,6 +24,17 @@
 
 struct ABitReader;
 
+enum {
+    kAVCProfileBaseline      = 0x42,
+    kAVCProfileMain          = 0x4d,
+    kAVCProfileExtended      = 0x58,
+    kAVCProfileHigh          = 0x64,
+    kAVCProfileHigh10        = 0x6e,
+    kAVCProfileHigh422       = 0x7a,
+    kAVCProfileHigh444       = 0xf4,
+    kAVCProfileCAVLC444Intra = 0x2c
+};
+
 void FindAVCDimensions(
         const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height);
 
@@ -39,6 +50,8 @@
 
 bool IsIDR(const sp<ABuffer> &accessUnit);
 
+const char *AVCProfileToString(uint8_t profile);
+
 }  // namespace android
 
 #endif  // AVC_UTILS_H_
diff --git a/media/libstagefright/include/stagefright_string.h b/media/libstagefright/include/stagefright_string.h
deleted file mode 100644
index 5dc7116..0000000
--- a/media/libstagefright/include/stagefright_string.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STRING_H_
-
-#define STRING_H_
-
-#include <utils/String8.h>
-
-namespace android {
-
-class string {
-public:
-    typedef size_t size_type;
-    static size_type npos;
-
-    string();
-    string(const char *s);
-    string(const char *s, size_t length);
-    string(const string &from, size_type start, size_type length = npos);
-
-    const char *c_str() const;
-    size_type size() const;
-
-    void clear();
-    void erase(size_type from, size_type length);
-
-    size_type find(char c) const;
-
-    bool operator<(const string &other) const;
-    bool operator==(const string &other) const;
-
-    string &operator+=(char c);
-
-private:
-    String8 mString;
-};
-
-}  // namespace android
-
-#endif  // STRING_H_
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index d16476d..e0ac49f 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -22,13 +22,15 @@
 
 #include "mkvparser.hpp"
 
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaSource.h>
 #include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
 #include <utils/String8.h>
 
 namespace android {
@@ -56,7 +58,7 @@
     }
 
     virtual int Length(long long* total, long long* available) {
-        off_t size;
+        off64_t size;
         if (mSource->getSize(&size) != OK) {
             return -1;
         }
@@ -81,46 +83,6 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
-#include <ctype.h>
-static void hexdump(const void *_data, size_t size) {
-    const uint8_t *data = (const uint8_t *)_data;
-    size_t offset = 0;
-    while (offset < size) {
-        printf("0x%04x  ", offset);
-
-        size_t n = size - offset;
-        if (n > 16) {
-            n = 16;
-        }
-
-        for (size_t i = 0; i < 16; ++i) {
-            if (i == 8) {
-                printf(" ");
-            }
-
-            if (offset + i < size) {
-                printf("%02x ", data[offset + i]);
-            } else {
-                printf("   ");
-            }
-        }
-
-        printf(" ");
-
-        for (size_t i = 0; i < n; ++i) {
-            if (isprint(data[offset + i])) {
-                printf("%c", data[offset + i]);
-            } else {
-                printf(".");
-            }
-        }
-
-        printf("\n");
-
-        offset += 16;
-    }
-}
-
 struct BlockIterator {
     BlockIterator(mkvparser::Segment *segment, unsigned long trackNum);
 
@@ -167,6 +129,7 @@
     size_t mTrackIndex;
     Type mType;
     BlockIterator mBlockIter;
+    size_t mNALSizeLen;  // for type AVC
 
     status_t advance();
 
@@ -180,13 +143,26 @@
       mTrackIndex(index),
       mType(OTHER),
       mBlockIter(mExtractor->mSegment,
-                 mExtractor->mTracks.itemAt(index).mTrackNum) {
+                 mExtractor->mTracks.itemAt(index).mTrackNum),
+      mNALSizeLen(0) {
+    sp<MetaData> meta = mExtractor->mTracks.itemAt(index).mMeta;
+
     const char *mime;
-    CHECK(mExtractor->mTracks.itemAt(index).mMeta->
-            findCString(kKeyMIMEType, &mime));
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
 
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mType = AVC;
+
+        uint32_t dummy;
+        const uint8_t *avcc;
+        size_t avccSize;
+        CHECK(meta->findData(
+                    kKeyAVCC, &dummy, (const void **)&avcc, &avccSize));
+
+        CHECK_GE(avccSize, 5u);
+
+        mNALSizeLen = 1 + (avcc[4] & 3);
+        LOGV("mNALSizeLen = %d", mNALSizeLen);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         mType = AAC;
     }
@@ -276,6 +252,10 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
+static unsigned U24_AT(const uint8_t *ptr) {
+    return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
 status_t MatroskaSource::read(
         MediaBuffer **out, const ReadOptions *options) {
     *out = NULL;
@@ -286,6 +266,7 @@
         mBlockIter.seek(seekTimeUs);
     }
 
+again:
     if (mBlockIter.eos()) {
         return ERROR_END_OF_STREAM;
     }
@@ -294,38 +275,70 @@
     size_t size = block->GetSize();
     int64_t timeUs = mBlockIter.blockTimeUs();
 
-    MediaBuffer *buffer = new MediaBuffer(size + 2);
+    // In the case of AVC content, each NAL unit is prefixed by
+    // mNALSizeLen bytes of length. We want to prefix the data with
+    // a four-byte 0x00000001 startcode instead of the length prefix.
+    // mNALSizeLen ranges from 1 through 4 bytes, so add an extra
+    // 3 bytes of padding to the buffer start.
+    static const size_t kPadding = 3;
+
+    MediaBuffer *buffer = new MediaBuffer(size + kPadding);
     buffer->meta_data()->setInt64(kKeyTime, timeUs);
     buffer->meta_data()->setInt32(kKeyIsSyncFrame, block->IsKey());
 
     long res = block->Read(
-            mExtractor->mReader, (unsigned char *)buffer->data() + 2);
+            mExtractor->mReader, (unsigned char *)buffer->data() + kPadding);
 
     if (res != 0) {
         return ERROR_END_OF_STREAM;
     }
 
-    buffer->set_range(2, size);
+    buffer->set_range(kPadding, size);
 
     if (mType == AVC) {
-        CHECK(size >= 2);
+        CHECK_GE(size, mNALSizeLen);
 
         uint8_t *data = (uint8_t *)buffer->data();
 
-        unsigned NALsize = data[2] << 8 | data[3];
-        CHECK_EQ(size, NALsize + 2);
+        size_t NALsize;
+        switch (mNALSizeLen) {
+            case 1: NALsize = data[kPadding]; break;
+            case 2: NALsize = U16_AT(&data[kPadding]); break;
+            case 3: NALsize = U24_AT(&data[kPadding]); break;
+            case 4: NALsize = U32_AT(&data[kPadding]); break;
+            default:
+                TRESPASS();
+        }
 
-        memcpy(data, "\x00\x00\x00\x01", 4);
-        buffer->set_range(0, size + 2);
+        CHECK_GE(size, NALsize + mNALSizeLen);
+        if (size > NALsize + mNALSizeLen) {
+            LOGW("discarding %d bytes of data.", size - NALsize - mNALSizeLen);
+        }
+
+        // actual data starts at &data[kPadding + mNALSizeLen]
+
+        memcpy(&data[mNALSizeLen - 1], "\x00\x00\x00\x01", 4);
+        buffer->set_range(mNALSizeLen - 1, NALsize + 4);
     } else if (mType == AAC) {
         // There's strange junk at the beginning...
 
-        const uint8_t *data = (const uint8_t *)buffer->data() + 2;
+        const uint8_t *data = (const uint8_t *)buffer->data() + kPadding;
+
+        // hexdump(data, size);
+
         size_t offset = 0;
         while (offset < size && data[offset] != 0x21) {
             ++offset;
         }
-        buffer->set_range(2 + offset, size - offset);
+
+        if (size == offset) {
+            buffer->release();
+
+            mBlockIter.advance();
+            goto again;
+        }
+
+        buffer->set_range(kPadding + offset, size - offset);
     }
 
     *out = buffer;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index c88c6c1..d8ab080 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -43,19 +43,25 @@
 static const size_t kTSPacketSize = 188;
 
 struct ATSParser::Program : public RefBase {
-    Program(unsigned programMapPID);
+    Program(ATSParser *parser, unsigned programMapPID);
 
     bool parsePID(
             unsigned pid, unsigned payload_unit_start_indicator,
             ABitReader *br);
 
-    void signalDiscontinuity(bool isASeek);
+    void signalDiscontinuity(DiscontinuityType type);
+    void signalEOS(status_t finalResult);
 
     sp<MediaSource> getSource(SourceType type);
 
     int64_t convertPTSToTimestamp(uint64_t PTS);
 
+    bool PTSTimeDeltaEstablished() const {
+        return mFirstPTSValid;
+    }
+
 private:
+    ATSParser *mParser;
     unsigned mProgramMapPID;
     KeyedVector<unsigned, sp<Stream> > mStreams;
     bool mFirstPTSValid;
@@ -73,7 +79,8 @@
             unsigned payload_unit_start_indicator,
             ABitReader *br);
 
-    void signalDiscontinuity(bool isASeek);
+    void signalDiscontinuity(DiscontinuityType type);
+    void signalEOS(status_t finalResult);
 
     sp<MediaSource> getSource(SourceType type);
 
@@ -105,8 +112,9 @@
 
 ////////////////////////////////////////////////////////////////////////////////
 
-ATSParser::Program::Program(unsigned programMapPID)
-    : mProgramMapPID(programMapPID),
+ATSParser::Program::Program(ATSParser *parser, unsigned programMapPID)
+    : mParser(parser),
+      mProgramMapPID(programMapPID),
       mFirstPTSValid(false),
       mFirstPTS(0) {
 }
@@ -135,9 +143,15 @@
     return true;
 }
 
-void ATSParser::Program::signalDiscontinuity(bool isASeek) {
+void ATSParser::Program::signalDiscontinuity(DiscontinuityType type) {
     for (size_t i = 0; i < mStreams.size(); ++i) {
-        mStreams.editValueAt(i)->signalDiscontinuity(isASeek);
+        mStreams.editValueAt(i)->signalDiscontinuity(type);
+    }
+}
+
+void ATSParser::Program::signalEOS(status_t finalResult) {
+    for (size_t i = 0; i < mStreams.size(); ++i) {
+        mStreams.editValueAt(i)->signalEOS(finalResult);
     }
 }
 
@@ -155,7 +169,7 @@
 
     unsigned section_length = br->getBits(12);
     LOGV("  section_length = %u", section_length);
-    CHECK((section_length & 0xc00) == 0);
+    CHECK_EQ(section_length & 0xc00, 0u);
     CHECK_LE(section_length, 1021u);
 
     MY_LOGV("  program_number = %u", br->getBits(16));
@@ -170,7 +184,7 @@
 
     unsigned program_info_length = br->getBits(12);
     LOGV("  program_info_length = %u", program_info_length);
-    CHECK((program_info_length & 0xc00) == 0);
+    CHECK_EQ(program_info_length & 0xc00, 0u);
 
     br->skipBits(program_info_length * 8);  // skip descriptors
 
@@ -194,7 +208,7 @@
 
         unsigned ES_info_length = br->getBits(12);
         LOGV("    ES_info_length = %u", ES_info_length);
-        CHECK((ES_info_length & 0xc00) == 0);
+        CHECK_EQ(ES_info_length & 0xc00, 0u);
 
         CHECK_GE(infoBytesRemaining - 5, ES_info_length);
 
@@ -238,10 +252,15 @@
 }
 
 sp<MediaSource> ATSParser::Program::getSource(SourceType type) {
+    size_t index = (type == MPEG2ADTS_AUDIO) ? 0 : 0;
+
     for (size_t i = 0; i < mStreams.size(); ++i) {
         sp<MediaSource> source = mStreams.editValueAt(i)->getSource(type);
         if (source != NULL) {
-            return source;
+            if (index == 0) {
+                return source;
+            }
+            --index;
         }
     }
 
@@ -269,11 +288,13 @@
     : mProgram(program),
       mElementaryPID(elementaryPID),
       mStreamType(streamType),
-      mBuffer(new ABuffer(128 * 1024)),
+      mBuffer(new ABuffer(192 * 1024)),
       mPayloadStarted(false),
       mQueue(streamType == 0x1b
               ? ElementaryStreamQueue::H264 : ElementaryStreamQueue::AAC) {
     mBuffer->setRange(0, 0);
+
+    LOGV("new stream PID 0x%02x, type 0x%02x", elementaryPID, streamType);
 }
 
 ATSParser::Stream::~Stream() {
@@ -298,7 +319,7 @@
     }
 
     size_t payloadSizeBits = br->numBitsLeft();
-    CHECK((payloadSizeBits % 8) == 0);
+    CHECK_EQ(payloadSizeBits % 8, 0u);
 
     CHECK_LE(mBuffer->size() + payloadSizeBits / 8, mBuffer->capacity());
 
@@ -306,24 +327,44 @@
     mBuffer->setRange(0, mBuffer->size() + payloadSizeBits / 8);
 }
 
-void ATSParser::Stream::signalDiscontinuity(bool isASeek) {
-    LOGV("Stream discontinuity");
+void ATSParser::Stream::signalDiscontinuity(DiscontinuityType type) {
     mPayloadStarted = false;
     mBuffer->setRange(0, 0);
 
-    mQueue.clear();
+    switch (type) {
+        case DISCONTINUITY_HTTPLIVE:
+        {
+            mQueue.clear(true);
 
-    if (isASeek) {
-        // This is only a "minor" discontinuity, we stay within the same
-        // bitstream.
+            if (mStreamType == 0x1b && mSource != NULL) {
+                // Don't signal discontinuities on audio streams.
+                mSource->queueDiscontinuity(type);
+            }
+            break;
+        }
 
-        mSource->clear();
-        return;
+        case DISCONTINUITY_SEEK:
+        case DISCONTINUITY_FORMATCHANGE:
+        {
+            bool isASeek = (type == DISCONTINUITY_SEEK);
+
+            mQueue.clear(!isASeek);
+
+            if (mSource != NULL) {
+                mSource->queueDiscontinuity(type);
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+            break;
     }
+}
 
-    if (mStreamType == 0x1b && mSource != NULL) {
-        // Don't signal discontinuities on audio streams.
-        mSource->queueDiscontinuity();
+void ATSParser::Stream::signalEOS(status_t finalResult) {
+    if (mSource != NULL) {
+        mSource->signalEOS(finalResult);
     }
 }
 
@@ -468,7 +509,7 @@
                     br->data(), br->numBitsLeft() / 8);
 
             size_t payloadSizeBits = br->numBitsLeft();
-            CHECK((payloadSizeBits % 8) == 0);
+            CHECK_EQ(payloadSizeBits % 8, 0u);
 
             LOGV("There's %d bytes of payload.", payloadSizeBits / 8);
         }
@@ -503,7 +544,10 @@
     int64_t timeUs = mProgram->convertPTSToTimestamp(PTS);
 
     status_t err = mQueue.appendData(data, size, timeUs);
-    CHECK_EQ(err, (status_t)OK);
+
+    if (err != OK) {
+        return;
+    }
 
     sp<ABuffer> accessUnit;
     while ((accessUnit = mQueue.dequeueAccessUnit()) != NULL) {
@@ -513,12 +557,17 @@
             if (meta != NULL) {
                 LOGV("created source!");
                 mSource = new AnotherPacketSource(meta);
+
                 mSource->queueAccessUnit(accessUnit);
             }
         } else if (mQueue.getFormat() != NULL) {
             // After a discontinuity we invalidate the queue's format
             // and won't enqueue any access units to the source until
             // the queue has reestablished the new format.
+
+            if (mSource->getFormat() == NULL) {
+                mSource->setFormat(mQueue.getFormat());
+            }
             mSource->queueAccessUnit(accessUnit);
         }
     }
@@ -548,9 +597,17 @@
     parseTS(&br);
 }
 
-void ATSParser::signalDiscontinuity(bool isASeek) {
+void ATSParser::signalDiscontinuity(DiscontinuityType type) {
     for (size_t i = 0; i < mPrograms.size(); ++i) {
-        mPrograms.editItemAt(i)->signalDiscontinuity(isASeek);
+        mPrograms.editItemAt(i)->signalDiscontinuity(type);
+    }
+}
+
+void ATSParser::signalEOS(status_t finalResult) {
+    CHECK_NE(finalResult, (status_t)OK);
+
+    for (size_t i = 0; i < mPrograms.size(); ++i) {
+        mPrograms.editItemAt(i)->signalEOS(finalResult);
     }
 }
 
@@ -568,7 +625,7 @@
 
     unsigned section_length = br->getBits(12);
     LOGV("  section_length = %u", section_length);
-    CHECK((section_length & 0xc00) == 0);
+    CHECK_EQ(section_length & 0xc00, 0u);
 
     MY_LOGV("  transport_stream_id = %u", br->getBits(16));
     MY_LOGV("  reserved = %u", br->getBits(2));
@@ -593,7 +650,7 @@
 
             LOGV("    program_map_PID = 0x%04x", programMapPID);
 
-            mPrograms.push(new Program(programMapPID));
+            mPrograms.push(new Program(this, programMapPID));
         }
     }
 
@@ -680,4 +737,12 @@
     return NULL;
 }
 
+bool ATSParser::PTSTimeDeltaEstablished() {
+    if (mPrograms.isEmpty()) {
+        return false;
+    }
+
+    return mPrograms.editItemAt(0)->PTSTimeDeltaEstablished();
+}
+
 }  // namespace android
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 11b1de4..fe31981 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -21,19 +21,28 @@
 #include <sys/types.h>
 
 #include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <utils/Vector.h>
 #include <utils/RefBase.h>
 
 namespace android {
 
 struct ABitReader;
+struct ABuffer;
 struct MediaSource;
 
 struct ATSParser : public RefBase {
+    enum DiscontinuityType {
+        DISCONTINUITY_HTTPLIVE,
+        DISCONTINUITY_SEEK,
+        DISCONTINUITY_FORMATCHANGE
+    };
+
     ATSParser();
 
     void feedTSPacket(const void *data, size_t size);
-    void signalDiscontinuity(bool isASeek = false);
+    void signalDiscontinuity(DiscontinuityType type);
+    void signalEOS(status_t finalResult);
 
     enum SourceType {
         AVC_VIDEO,
@@ -41,6 +50,8 @@
     };
     sp<MediaSource> getSource(SourceType type);
 
+    bool PTSTimeDeltaEstablished();
+
 protected:
     virtual ~ATSParser();
 
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index ea747c8..0ad883b 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -33,6 +33,11 @@
       mEOSResult(OK) {
 }
 
+void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
+    CHECK(mFormat == NULL);
+    mFormat = meta;
+}
+
 AnotherPacketSource::~AnotherPacketSource() {
 }
 
@@ -48,6 +53,34 @@
     return mFormat;
 }
 
+status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
+    buffer->clear();
+
+    Mutex::Autolock autoLock(mLock);
+    while (mEOSResult == OK && mBuffers.empty()) {
+        mCondition.wait(mLock);
+    }
+
+    if (!mBuffers.empty()) {
+        *buffer = *mBuffers.begin();
+        mBuffers.erase(mBuffers.begin());
+
+        int32_t discontinuity;
+        if ((*buffer)->meta()->findInt32("discontinuity", &discontinuity)) {
+
+            if (discontinuity == ATSParser::DISCONTINUITY_FORMATCHANGE) {
+                mFormat.clear();
+            }
+
+            return INFO_DISCONTINUITY;
+        }
+
+        return OK;
+    }
+
+    return mEOSResult;
+}
+
 status_t AnotherPacketSource::read(
         MediaBuffer **out, const ReadOptions *) {
     *out = NULL;
@@ -62,13 +95,15 @@
         mBuffers.erase(mBuffers.begin());
 
         int32_t discontinuity;
-        if (buffer->meta()->findInt32("discontinuity", &discontinuity)
-                && discontinuity) {
+        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+            if (discontinuity == ATSParser::DISCONTINUITY_FORMATCHANGE) {
+                mFormat.clear();
+            }
+
             return INFO_DISCONTINUITY;
         } else {
-            uint64_t timeUs;
-            CHECK(buffer->meta()->findInt64(
-                        "time", (int64_t *)&timeUs));
+            int64_t timeUs;
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
             MediaBuffer *mediaBuffer = new MediaBuffer(buffer->size());
             mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
@@ -92,7 +127,7 @@
     }
 
     int64_t timeUs;
-    CHECK(buffer->meta()->findInt64("time", &timeUs));
+    CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
     LOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", timeUs, timeUs / 1E6);
 
     Mutex::Autolock autoLock(mLock);
@@ -100,22 +135,30 @@
     mCondition.signal();
 }
 
-void AnotherPacketSource::queueDiscontinuity() {
+void AnotherPacketSource::queueDiscontinuity(
+        ATSParser::DiscontinuityType type) {
     sp<ABuffer> buffer = new ABuffer(0);
-    buffer->meta()->setInt32("discontinuity", true);
+    buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));
 
     Mutex::Autolock autoLock(mLock);
 
+#if 0
+    if (type == ATSParser::DISCONTINUITY_SEEK
+            || type == ATSParser::DISCONTINUITY_FORMATCHANGE) {
+        // XXX Fix this: This will also clear any pending discontinuities,
+        // If there's a pending DISCONTINUITY_FORMATCHANGE and the new
+        // discontinuity is "just" a DISCONTINUITY_SEEK, this will effectively
+        // downgrade the type of discontinuity received by the client.
+
+        mBuffers.clear();
+        mEOSResult = OK;
+    }
+#endif
+
     mBuffers.push_back(buffer);
     mCondition.signal();
 }
 
-void AnotherPacketSource::clear() {
-    Mutex::Autolock autoLock(mLock);
-    mBuffers.clear();
-    mEOSResult = OK;
-}
-
 void AnotherPacketSource::signalEOS(status_t result) {
     CHECK(result != OK);
 
@@ -134,4 +177,19 @@
     return false;
 }
 
+status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) {
+    *timeUs = 0;
+
+    Mutex::Autolock autoLock(mLock);
+
+    if (mBuffers.empty()) {
+        return mEOSResult != OK ? mEOSResult : -EWOULDBLOCK;
+    }
+
+    sp<ABuffer> buffer = *mBuffers.begin();
+    CHECK(buffer->meta()->findInt64("timeUs", timeUs));
+
+    return OK;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index 6999175..6fe93f8 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -23,6 +23,8 @@
 #include <utils/threads.h>
 #include <utils/List.h>
 
+#include "ATSParser.h"
+
 namespace android {
 
 struct ABuffer;
@@ -30,6 +32,8 @@
 struct AnotherPacketSource : public MediaSource {
     AnotherPacketSource(const sp<MetaData> &meta);
 
+    void setFormat(const sp<MetaData> &meta);
+
     virtual status_t start(MetaData *params = NULL);
     virtual status_t stop();
     virtual sp<MetaData> getFormat();
@@ -39,11 +43,13 @@
 
     bool hasBufferAvailable(status_t *finalResult);
 
+    status_t nextBufferTime(int64_t *timeUs);
+
     void queueAccessUnit(const sp<ABuffer> &buffer);
-    void queueDiscontinuity();
+    void queueDiscontinuity(ATSParser::DiscontinuityType type);
     void signalEOS(status_t result);
 
-    void clear();
+    status_t dequeueAccessUnit(sp<ABuffer> *buffer);
 
 protected:
     virtual ~AnotherPacketSource();
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index b0b9e66..73efdfe 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -40,10 +40,43 @@
     return mFormat;
 }
 
-void ElementaryStreamQueue::clear() {
-    mBuffer->setRange(0, 0);
-    mTimestamps.clear();
-    mFormat.clear();
+void ElementaryStreamQueue::clear(bool clearFormat) {
+    if (mBuffer != NULL) {
+        mBuffer->setRange(0, 0);
+    }
+
+    mRangeInfos.clear();
+
+    if (clearFormat) {
+        mFormat.clear();
+    }
+}
+
+static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) {
+    if (size < 3) {
+        // Not enough data to verify header.
+        return false;
+    }
+
+    if (ptr[0] != 0xff || (ptr[1] >> 4) != 0x0f) {
+        return false;
+    }
+
+    unsigned layer = (ptr[1] >> 1) & 3;
+
+    if (layer != 0) {
+        return false;
+    }
+
+    unsigned ID = (ptr[1] >> 3) & 1;
+    unsigned profile_ObjectType = ptr[2] >> 6;
+
+    if (ID == 1 && profile_ObjectType == 3) {
+        // MPEG-2 profile 3 is reserved.
+        return false;
+    }
+
+    return true;
 }
 
 status_t ElementaryStreamQueue::appendData(
@@ -52,9 +85,34 @@
         switch (mMode) {
             case H264:
             {
+#if 0
                 if (size < 4 || memcmp("\x00\x00\x00\x01", data, 4)) {
                     return ERROR_MALFORMED;
                 }
+#else
+                uint8_t *ptr = (uint8_t *)data;
+
+                ssize_t startOffset = -1;
+                for (size_t i = 0; i + 3 < size; ++i) {
+                    if (!memcmp("\x00\x00\x00\x01", &ptr[i], 4)) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+
+                if (startOffset > 0) {
+                    LOGI("found something resembling an H.264 syncword at "
+                         "offset %ld",
+                         startOffset);
+                }
+
+                data = &ptr[startOffset];
+                size -= startOffset;
+#endif
                 break;
             }
 
@@ -62,9 +120,31 @@
             {
                 uint8_t *ptr = (uint8_t *)data;
 
+#if 0
                 if (size < 2 || ptr[0] != 0xff || (ptr[1] >> 4) != 0x0f) {
                     return ERROR_MALFORMED;
                 }
+#else
+                ssize_t startOffset = -1;
+                for (size_t i = 0; i < size; ++i) {
+                    if (IsSeeminglyValidADTSHeader(&ptr[i], size - i)) {
+                        startOffset = i;
+                        break;
+                    }
+                }
+
+                if (startOffset < 0) {
+                    return ERROR_MALFORMED;
+                }
+
+                if (startOffset > 0) {
+                    LOGI("found something resembling an AAC syncword at offset %ld",
+                         startOffset);
+                }
+
+                data = &ptr[startOffset];
+                size -= startOffset;
+#endif
                 break;
             }
 
@@ -94,7 +174,17 @@
     memcpy(mBuffer->data() + mBuffer->size(), data, size);
     mBuffer->setRange(0, mBuffer->size() + size);
 
-    mTimestamps.push_back(timeUs);
+    RangeInfo info;
+    info.mLength = size;
+    info.mTimestampUs = timeUs;
+    mRangeInfos.push_back(info);
+
+#if 0
+    if (mMode == AAC) {
+        LOGI("size = %d, timeUs = %.2f secs", size, timeUs / 1E6);
+        hexdump(data, size);
+    }
+#endif
 
     return OK;
 }
@@ -109,6 +199,7 @@
 }
 
 sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
+    Vector<size_t> ranges;
     Vector<size_t> frameOffsets;
     Vector<size_t> frameSizes;
     size_t auSize = 0;
@@ -134,6 +225,14 @@
 
             mFormat = MakeAACCodecSpecificData(
                     profile, sampling_freq_index, channel_configuration);
+
+            int32_t sampleRate;
+            int32_t numChannels;
+            CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate));
+            CHECK(mFormat->findInt32(kKeyChannelCount, &numChannels));
+
+            LOGI("found AAC codec config (%d Hz, %d channels)",
+                 sampleRate, numChannels);
         } else {
             // profile_ObjectType, sampling_frequency_index, private_bits,
             // channel_configuration, original_copy, home
@@ -162,6 +261,7 @@
 
         size_t headerSize = protection_absent ? 7 : 9;
 
+        ranges.push(aac_frame_length);
         frameOffsets.push(offset + headerSize);
         frameSizes.push(aac_frame_length - headerSize);
         auSize += aac_frame_length - headerSize;
@@ -173,11 +273,23 @@
         return NULL;
     }
 
+    int64_t timeUs = -1;
+
+    for (size_t i = 0; i < ranges.size(); ++i) {
+        int64_t tmpUs = fetchTimestamp(ranges.itemAt(i));
+
+        if (i == 0) {
+            timeUs = tmpUs;
+        }
+    }
+
     sp<ABuffer> accessUnit = new ABuffer(auSize);
     size_t dstOffset = 0;
     for (size_t i = 0; i < frameOffsets.size(); ++i) {
+        size_t frameOffset = frameOffsets.itemAt(i);
+
         memcpy(accessUnit->data() + dstOffset,
-               mBuffer->data() + frameOffsets.itemAt(i),
+               mBuffer->data() + frameOffset,
                frameSizes.itemAt(i));
 
         dstOffset += frameSizes.itemAt(i);
@@ -187,15 +299,48 @@
             mBuffer->size() - offset);
     mBuffer->setRange(0, mBuffer->size() - offset);
 
-    CHECK_GT(mTimestamps.size(), 0u);
-    int64_t timeUs = *mTimestamps.begin();
-    mTimestamps.erase(mTimestamps.begin());
-
-    accessUnit->meta()->setInt64("time", timeUs);
+    if (timeUs >= 0) {
+        accessUnit->meta()->setInt64("timeUs", timeUs);
+    } else {
+        LOGW("no time for AAC access unit");
+    }
 
     return accessUnit;
 }
 
+int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
+    int64_t timeUs = -1;
+    bool first = true;
+
+    while (size > 0) {
+        CHECK(!mRangeInfos.empty());
+
+        RangeInfo *info = &*mRangeInfos.begin();
+
+        if (first) {
+            timeUs = info->mTimestampUs;
+            first = false;
+        }
+
+        if (info->mLength > size) {
+            info->mLength -= size;
+
+            if (first) {
+                info->mTimestampUs = -1;
+            }
+
+            size = 0;
+        } else {
+            size -= info->mLength;
+
+            mRangeInfos.erase(mRangeInfos.begin());
+            info = NULL;
+        }
+    }
+
+    return timeUs;
+}
+
 // static
 sp<MetaData> ElementaryStreamQueue::MakeAACCodecSpecificData(
         unsigned profile, unsigned sampling_freq_index,
@@ -333,11 +478,10 @@
 
             mBuffer->setRange(0, mBuffer->size() - nextScan);
 
-            CHECK_GT(mTimestamps.size(), 0u);
-            int64_t timeUs = *mTimestamps.begin();
-            mTimestamps.erase(mTimestamps.begin());
+            int64_t timeUs = fetchTimestamp(nextScan);
+            CHECK_GE(timeUs, 0ll);
 
-            accessUnit->meta()->setInt64("time", timeUs);
+            accessUnit->meta()->setInt64("timeUs", timeUs);
 
             if (mFormat == NULL) {
                 mFormat = MakeAVCCodecSpecificData(accessUnit);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h
index 9eaf834..5b7957e 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.h
+++ b/media/libstagefright/mpeg2ts/ESQueue.h
@@ -35,23 +35,32 @@
     ElementaryStreamQueue(Mode mode);
 
     status_t appendData(const void *data, size_t size, int64_t timeUs);
-    void clear();
+    void clear(bool clearFormat);
 
     sp<ABuffer> dequeueAccessUnit();
 
     sp<MetaData> getFormat();
 
 private:
+    struct RangeInfo {
+        int64_t mTimestampUs;
+        size_t mLength;
+    };
+
     Mode mMode;
 
     sp<ABuffer> mBuffer;
-    List<int64_t> mTimestamps;
+    List<RangeInfo> mRangeInfos;
 
     sp<MetaData> mFormat;
 
     sp<ABuffer> dequeueAccessUnitH264();
     sp<ABuffer> dequeueAccessUnitAAC();
 
+    // consume a logical (compressed) access unit of size "size",
+    // returns its timestamp in us (or -1 if no time information).
+    int64_t fetchTimestamp(size_t size);
+
     static sp<MetaData> MakeAACCodecSpecificData(
             unsigned profile, unsigned sampling_freq_index,
             unsigned channel_configuration);
diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
index 3176810..a1f0796 100644
--- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
+++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
@@ -19,7 +19,7 @@
 #include <utils/Log.h>
 
 #include "include/MPEG2TSExtractor.h"
-#include "include/LiveSource.h"
+#include "include/LiveSession.h"
 #include "include/NuCachedSource2.h"
 
 #include <media/stagefright/DataSource.h>
@@ -82,8 +82,8 @@
     sp<MetaData> meta = mImpl->getFormat();
 
     int64_t durationUs;
-    if (mExtractor->mLiveSource != NULL
-            && mExtractor->mLiveSource->getDuration(&durationUs)) {
+    if (mExtractor->mLiveSession != NULL
+            && mExtractor->mLiveSession->getDuration(&durationUs) == OK) {
         meta->setInt64(kKeyDuration, durationUs);
     }
 
@@ -214,7 +214,7 @@
 
     if (isDiscontinuity(packet, n)) {
         LOGI("XXX discontinuity detected");
-        mParser->signalDiscontinuity();
+        mParser->signalDiscontinuity(ATSParser::DISCONTINUITY_HTTPLIVE);
     } else if (n < (ssize_t)kTSPacketSize) {
         return (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;
     } else {
@@ -226,32 +226,20 @@
     return OK;
 }
 
-void MPEG2TSExtractor::setLiveSource(const sp<LiveSource> &liveSource) {
+void MPEG2TSExtractor::setLiveSession(const sp<LiveSession> &liveSession) {
     Mutex::Autolock autoLock(mLock);
 
-    mLiveSource = liveSource;
+    mLiveSession = liveSession;
 }
 
 void MPEG2TSExtractor::seekTo(int64_t seekTimeUs) {
     Mutex::Autolock autoLock(mLock);
 
-    if (mLiveSource == NULL) {
+    if (mLiveSession == NULL) {
         return;
     }
 
-    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
-        static_cast<NuCachedSource2 *>(mDataSource.get())->suspend();
-    }
-
-    if (mLiveSource->seekTo(seekTimeUs)) {
-        mParser->signalDiscontinuity(true  /* isSeek */);
-        mOffset = 0;
-    }
-
-    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
-        static_cast<NuCachedSource2 *>(mDataSource.get())
-            ->clearCacheAndResume();
-    }
+    mLiveSession->seekTo(seekTimeUs);
 }
 
 uint32_t MPEG2TSExtractor::flags() const {
@@ -259,7 +247,7 @@
 
     uint32_t flags = CAN_PAUSE;
 
-    if (mLiveSource != NULL && mLiveSource->isSeekable()) {
+    if (mLiveSession != NULL && mLiveSession->isSeekable()) {
         flags |= CAN_SEEK_FORWARD | CAN_SEEK_BACKWARD | CAN_SEEK;
     }
 
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index ead1675..6e069c8 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -31,7 +31,6 @@
         libutils                \
         libui                   \
         libcutils               \
-        libstagefright_color_conversion
 
 ifneq ($(BUILD_WITHOUT_PV),true)
 LOCAL_SHARED_LIBRARIES += \
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index f19c16a..3638f41 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -24,14 +24,11 @@
 #include <sys/resource.h>
 
 #include "../include/OMX.h"
-#include "OMXRenderer.h"
 
 #include "../include/OMXNodeInstance.h"
-#include "../include/SoftwareRenderer.h"
 
 #include <binder/IMemory.h>
 #include <media/stagefright/MediaDebug.h>
-#include <media/stagefright/VideoRenderer.h>
 #include <utils/threads.h>
 
 #include "OMXMaster.h"
@@ -89,6 +86,9 @@
         mQueueChanged.signal();
     }
 
+    // Don't call join on myself
+    CHECK(mThread != pthread_self());
+
     void *dummy;
     pthread_join(mThread, &dummy);
 }
@@ -249,9 +249,12 @@
 
     status_t err = instance->freeNode(mMaster);
 
-    index = mDispatchers.indexOfKey(node);
-    CHECK(index >= 0);
-    mDispatchers.removeItemsAt(index);
+    {
+        Mutex::Autolock autoLock(mLock);
+        index = mDispatchers.indexOfKey(node);
+        CHECK(index >= 0);
+        mDispatchers.removeItemsAt(index);
+    }
 
     return err;
 }
@@ -289,6 +292,16 @@
             index, params, size);
 }
 
+status_t OMX::enableGraphicBuffers(
+        node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+    return findInstance(node)->enableGraphicBuffers(port_index, enable);
+}
+
+status_t OMX::storeMetaDataInBuffers(
+        node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+    return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+}
+
 status_t OMX::useBuffer(
         node_id node, OMX_U32 port_index, const sp<IMemory> &params,
         buffer_id *buffer) {
@@ -296,6 +309,13 @@
             port_index, params, buffer);
 }
 
+status_t OMX::useGraphicBuffer(
+        node_id node, OMX_U32 port_index,
+        const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+    return findInstance(node)->useGraphicBuffer(
+            port_index, graphicBuffer, buffer);
+}
+
 status_t OMX::allocateBuffer(
         node_id node, OMX_U32 port_index, size_t size,
         buffer_id *buffer, void **buffer_data) {
@@ -425,135 +445,4 @@
     mNodeIDToInstance.removeItem(node);
 }
 
-////////////////////////////////////////////////////////////////////////////////
-
-struct SharedVideoRenderer : public VideoRenderer {
-    SharedVideoRenderer(void *libHandle, VideoRenderer *obj)
-        : mLibHandle(libHandle),
-          mObj(obj) {
-    }
-
-    virtual ~SharedVideoRenderer() {
-        delete mObj;
-        mObj = NULL;
-
-        dlclose(mLibHandle);
-        mLibHandle = NULL;
-    }
-
-    virtual void render(
-            const void *data, size_t size, void *platformPrivate) {
-        return mObj->render(data, size, platformPrivate);
-    }
-
-private:
-    void *mLibHandle;
-    VideoRenderer *mObj;
-
-    SharedVideoRenderer(const SharedVideoRenderer &);
-    SharedVideoRenderer &operator=(const SharedVideoRenderer &);
-};
-
-sp<IOMXRenderer> OMX::createRenderer(
-        const sp<ISurface> &surface,
-        const char *componentName,
-        OMX_COLOR_FORMATTYPE colorFormat,
-        size_t encodedWidth, size_t encodedHeight,
-        size_t displayWidth, size_t displayHeight,
-        int32_t rotationDegrees) {
-    Mutex::Autolock autoLock(mLock);
-
-    VideoRenderer *impl = NULL;
-
-    void *libHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
-
-    if (libHandle) {
-        typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
-                const sp<ISurface> &surface,
-                const char *componentName,
-                OMX_COLOR_FORMATTYPE colorFormat,
-                size_t displayWidth, size_t displayHeight,
-                size_t decodedWidth, size_t decodedHeight,
-                int32_t rotationDegrees);
-
-        typedef VideoRenderer *(*CreateRendererFunc)(
-                const sp<ISurface> &surface,
-                const char *componentName,
-                OMX_COLOR_FORMATTYPE colorFormat,
-                size_t displayWidth, size_t displayHeight,
-                size_t decodedWidth, size_t decodedHeight);
-
-        CreateRendererWithRotationFunc funcWithRotation =
-            (CreateRendererWithRotationFunc)dlsym(
-                    libHandle,
-                    "_Z26createRendererWithRotationRKN7android2spINS_8"
-                    "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
-
-        if (funcWithRotation) {
-            impl = (*funcWithRotation)(
-                    surface, componentName, colorFormat,
-                    displayWidth, displayHeight, encodedWidth, encodedHeight,
-                    rotationDegrees);
-        } else {
-            CreateRendererFunc func =
-                (CreateRendererFunc)dlsym(
-                        libHandle,
-                        "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
-                        "OMX_COLOR_FORMATTYPEjjjj");
-
-            if (func) {
-                impl = (*func)(surface, componentName, colorFormat,
-                        displayWidth, displayHeight, encodedWidth, encodedHeight);
-            }
-        }
-
-        if (impl) {
-            impl = new SharedVideoRenderer(libHandle, impl);
-            libHandle = NULL;
-        }
-
-        if (libHandle) {
-            dlclose(libHandle);
-            libHandle = NULL;
-        }
-    }
-
-    if (!impl) {
-        LOGW("Using software renderer.");
-        impl = new SoftwareRenderer(
-                colorFormat,
-                surface,
-                displayWidth, displayHeight,
-                encodedWidth, encodedHeight);
-
-        if (((SoftwareRenderer *)impl)->initCheck() != OK) {
-            delete impl;
-            impl = NULL;
-
-            return NULL;
-        }
-    }
-
-    return new OMXRenderer(impl);
-}
-
-OMXRenderer::OMXRenderer(VideoRenderer *impl)
-    : mImpl(impl) {
-}
-
-OMXRenderer::~OMXRenderer() {
-    delete mImpl;
-    mImpl = NULL;
-}
-
-void OMXRenderer::render(IOMX::buffer_id buffer) {
-    OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
-
-    mImpl->render(
-            header->pBuffer + header->nOffset,
-            header->nFilledLen,
-            header->pPlatformPrivate);
-}
-
 }  // namespace android
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5db516e..9b6d441 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -24,6 +24,7 @@
 #include <OMX_Component.h>
 
 #include <binder/IMemory.h>
+#include <media/stagefright/HardwareAPI.h>
 #include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaErrors.h>
 
@@ -40,6 +41,11 @@
           mIsBackup(false) {
     }
 
+    BufferMeta(const sp<GraphicBuffer> &graphicBuffer)
+        : mGraphicBuffer(graphicBuffer),
+          mIsBackup(false) {
+    }
+
     void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
         if (!mIsBackup) {
             return;
@@ -61,6 +67,7 @@
     }
 
 private:
+    sp<GraphicBuffer> mGraphicBuffer;
     sp<IMemory> mMem;
     size_t mSize;
     bool mIsBackup;
@@ -240,6 +247,74 @@
     return StatusFromOMXError(err);
 }
 
+status_t OMXNodeInstance::enableGraphicBuffers(
+        OMX_U32 portIndex, OMX_BOOL enable) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+            mHandle,
+            const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
+            &index);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex failed");
+
+        return StatusFromOMXError(err);
+    }
+
+    OMX_VERSIONTYPE ver;
+    ver.s.nVersionMajor = 1;
+    ver.s.nVersionMinor = 0;
+    ver.s.nRevision = 0;
+    ver.s.nStep = 0;
+    EnableAndroidNativeBuffersParams params = {
+        sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
+    };
+
+    err = OMX_SetParameter(mHandle, index, &params);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
+                err, err);
+
+        return UNKNOWN_ERROR;
+    }
+
+    return OK;
+}
+
+status_t OMXNodeInstance::storeMetaDataInBuffers(
+        OMX_U32 portIndex,
+        OMX_BOOL enable) {
+    Mutex::Autolock autolock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_STRING name = const_cast<OMX_STRING>(
+            "OMX.google.android.index.storeMetaDataInBuffers");
+
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex %s failed", name);
+        return StatusFromOMXError(err);
+    }
+
+    StoreMetaDataInBuffersParams params;
+    memset(&params, 0, sizeof(params));
+    params.nSize = sizeof(params);
+
+    // Version: 1.0.0.0
+    params.nVersion.s.nVersionMajor = 1;
+
+    params.nPortIndex = portIndex;
+    params.bStoreMetaData = enable;
+    if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+        LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+        return UNKNOWN_ERROR;
+    }
+    return err;
+}
+
 status_t OMXNodeInstance::useBuffer(
         OMX_U32 portIndex, const sp<IMemory> &params,
         OMX::buffer_id *buffer) {
@@ -273,6 +348,60 @@
     return OK;
 }
 
+status_t OMXNodeInstance::useGraphicBuffer(
+        OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+        OMX::buffer_id *buffer) {
+    Mutex::Autolock autoLock(mLock);
+
+    OMX_INDEXTYPE index;
+    OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+            mHandle,
+            const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
+            &index);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_GetExtensionIndex failed");
+
+        return StatusFromOMXError(err);
+    }
+
+    BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
+
+    OMX_BUFFERHEADERTYPE *header;
+
+    OMX_VERSIONTYPE ver;
+    ver.s.nVersionMajor = 1;
+    ver.s.nVersionMinor = 0;
+    ver.s.nRevision = 0;
+    ver.s.nStep = 0;
+    UseAndroidNativeBufferParams params = {
+        sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
+        &header, graphicBuffer,
+    };
+
+    err = OMX_SetParameter(mHandle, index, &params);
+
+    if (err != OMX_ErrorNone) {
+        LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
+                err);
+
+        delete bufferMeta;
+        bufferMeta = NULL;
+
+        *buffer = 0;
+
+        return UNKNOWN_ERROR;
+    }
+
+    CHECK_EQ(header->pAppPrivate, bufferMeta);
+
+    *buffer = header;
+
+    addActiveBuffer(portIndex, *buffer);
+
+    return OK;
+}
+
 status_t OMXNodeInstance::allocateBuffer(
         OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
         void **buffer_data) {
@@ -498,4 +627,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/libstagefright/omx/OMXRenderer.h b/media/libstagefright/omx/OMXRenderer.h
deleted file mode 100644
index 4d194ce..0000000
--- a/media/libstagefright/omx/OMXRenderer.h
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef OMX_RENDERER_H_
-
-#define OMX_RENDERER_H_
-
-#include <media/IOMX.h>
-
-namespace android {
-
-class VideoRenderer;
-
-class OMXRenderer : public BnOMXRenderer {
-public:
-    // Assumes ownership of "impl".
-    OMXRenderer(VideoRenderer *impl);
-    virtual ~OMXRenderer();
-
-    virtual void render(IOMX::buffer_id buffer);
-
-private:
-    VideoRenderer *mImpl;
-
-    OMXRenderer(const OMXRenderer &);
-    OMXRenderer &operator=(const OMXRenderer &);
-};
-
-}  // namespace android
-
-#endif  // OMX_RENDERER_H_
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index b0d2c64..bbde516 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -18,18 +18,381 @@
 
 #include "ARTPSource.h"
 
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <ctype.h>
 
 namespace android {
 
-AMPEG4AudioAssembler::AMPEG4AudioAssembler(const sp<AMessage> &notify)
+static bool GetAttribute(const char *s, const char *key, AString *value) {
+    value->clear();
+
+    size_t keyLen = strlen(key);
+
+    for (;;) {
+        while (isspace(*s)) {
+            ++s;
+        }
+
+        const char *colonPos = strchr(s, ';');
+
+        size_t len =
+            (colonPos == NULL) ? strlen(s) : colonPos - s;
+
+        if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
+            value->setTo(&s[keyLen + 1], len - keyLen - 1);
+            return true;
+        }
+
+        if (colonPos == NULL) {
+            return false;
+        }
+
+        s = colonPos + 1;
+    }
+}
+
+static sp<ABuffer> decodeHex(const AString &s) {
+    if ((s.size() % 2) != 0) {
+        return NULL;
+    }
+
+    size_t outLen = s.size() / 2;
+    sp<ABuffer> buffer = new ABuffer(outLen);
+    uint8_t *out = buffer->data();
+
+    uint8_t accum = 0;
+    for (size_t i = 0; i < s.size(); ++i) {
+        char c = s.c_str()[i];
+        unsigned value;
+        if (c >= '0' && c <= '9') {
+            value = c - '0';
+        } else if (c >= 'a' && c <= 'f') {
+            value = c - 'a' + 10;
+        } else if (c >= 'A' && c <= 'F') {
+            value = c - 'A' + 10;
+        } else {
+            return NULL;
+        }
+
+        accum = (accum << 4) | value;
+
+        if (i & 1) {
+            *out++ = accum;
+
+            accum = 0;
+        }
+    }
+
+    return buffer;
+}
+
+static status_t parseAudioObjectType(
+        ABitReader *bits, unsigned *audioObjectType) {
+    *audioObjectType = bits->getBits(5);
+    if ((*audioObjectType) == 31) {
+        *audioObjectType = 32 + bits->getBits(6);
+    }
+
+    return OK;
+}
+
+static status_t parseGASpecificConfig(
+        ABitReader *bits,
+        unsigned audioObjectType, unsigned channelConfiguration) {
+    unsigned frameLengthFlag = bits->getBits(1);
+    unsigned dependsOnCoreCoder = bits->getBits(1);
+    if (dependsOnCoreCoder) {
+        /* unsigned coreCoderDelay = */bits->getBits(1);
+    }
+    unsigned extensionFlag = bits->getBits(1);
+
+    if (!channelConfiguration) {
+        // program_config_element
+        return ERROR_UNSUPPORTED;  // XXX to be implemented
+    }
+
+    if (audioObjectType == 6 || audioObjectType == 20) {
+        /* unsigned layerNr = */bits->getBits(3);
+    }
+
+    if (extensionFlag) {
+        if (audioObjectType == 22) {
+            /* unsigned numOfSubFrame = */bits->getBits(5);
+            /* unsigned layerLength = */bits->getBits(11);
+        } else if (audioObjectType == 17 || audioObjectType == 19
+                || audioObjectType == 20 || audioObjectType == 23) {
+            /* unsigned aacSectionDataResilienceFlag = */bits->getBits(1);
+            /* unsigned aacScalefactorDataResilienceFlag = */bits->getBits(1);
+            /* unsigned aacSpectralDataResilienceFlag = */bits->getBits(1);
+        }
+
+        unsigned extensionFlag3 = bits->getBits(1);
+        CHECK_EQ(extensionFlag3, 0u);  // TBD in version 3
+    }
+
+    return OK;
+}
+
+static status_t parseAudioSpecificConfig(ABitReader *bits) {
+    unsigned audioObjectType;
+    CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+
+    unsigned samplingFreqIndex = bits->getBits(4);
+    if (samplingFreqIndex == 0x0f) {
+        /* unsigned samplingFrequency = */bits->getBits(24);
+    }
+
+    unsigned channelConfiguration = bits->getBits(4);
+
+    unsigned extensionAudioObjectType = 0;
+    unsigned sbrPresent = 0;
+
+    if (audioObjectType == 5) {
+        extensionAudioObjectType = audioObjectType;
+        sbrPresent = 1;
+        unsigned extensionSamplingFreqIndex = bits->getBits(4);
+        if (extensionSamplingFreqIndex == 0x0f) {
+            /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+        }
+        CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+    }
+
+    CHECK((audioObjectType >= 1 && audioObjectType <= 4)
+        || (audioObjectType >= 6 && audioObjectType <= 7)
+        || audioObjectType == 17
+        || (audioObjectType >= 19 && audioObjectType <= 23));
+
+    CHECK_EQ(parseGASpecificConfig(
+                bits, audioObjectType, channelConfiguration), (status_t)OK);
+
+    if (audioObjectType == 17
+            || (audioObjectType >= 19 && audioObjectType <= 27)) {
+        unsigned epConfig = bits->getBits(2);
+        if (epConfig == 2 || epConfig == 3) {
+            // ErrorProtectionSpecificConfig
+            return ERROR_UNSUPPORTED;  // XXX to be implemented
+
+            if (epConfig == 3) {
+                unsigned directMapping = bits->getBits(1);
+                CHECK_EQ(directMapping, 1u);
+            }
+        }
+    }
+
+#if 0
+    // This is not supported here as the upper layers did not explicitly
+    // signal the length of AudioSpecificConfig.
+
+    if (extensionAudioObjectType != 5 && bits->numBitsLeft() >= 16) {
+        unsigned syncExtensionType = bits->getBits(11);
+        if (syncExtensionType == 0x2b7) {
+            CHECK_EQ(parseAudioObjectType(bits, &extensionAudioObjectType),
+                     (status_t)OK);
+
+            sbrPresent = bits->getBits(1);
+
+            if (sbrPresent == 1) {
+                unsigned extensionSamplingFreqIndex = bits->getBits(4);
+                if (extensionSamplingFreqIndex == 0x0f) {
+                    /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+                }
+            }
+        }
+    }
+#endif
+
+    return OK;
+}
+
+static status_t parseStreamMuxConfig(
+        ABitReader *bits,
+        unsigned *numSubFrames,
+        unsigned *frameLengthType,
+        bool *otherDataPresent,
+        unsigned *otherDataLenBits) {
+    unsigned audioMuxVersion = bits->getBits(1);
+
+    unsigned audioMuxVersionA = 0;
+    if (audioMuxVersion == 1) {
+        audioMuxVersionA = bits->getBits(1);
+    }
+
+    CHECK_EQ(audioMuxVersionA, 0u);  // otherwise future spec
+
+    if (audioMuxVersion != 0) {
+        return ERROR_UNSUPPORTED;  // XXX to be implemented;
+    }
+    CHECK_EQ(audioMuxVersion, 0u);  // XXX to be implemented
+
+    unsigned allStreamsSameTimeFraming = bits->getBits(1);
+    CHECK_EQ(allStreamsSameTimeFraming, 1u);  // There's only one stream.
+
+    *numSubFrames = bits->getBits(6);
+    unsigned numProgram = bits->getBits(4);
+    CHECK_EQ(numProgram, 0u);  // disabled in RTP LATM
+
+    unsigned numLayer = bits->getBits(3);
+    CHECK_EQ(numLayer, 0u);  // disabled in RTP LATM
+
+    if (audioMuxVersion == 0) {
+        // AudioSpecificConfig
+        CHECK_EQ(parseAudioSpecificConfig(bits), (status_t)OK);
+    } else {
+        TRESPASS();  // XXX to be implemented
+    }
+
+    *frameLengthType = bits->getBits(3);
+    switch (*frameLengthType) {
+        case 0:
+        {
+            /* unsigned bufferFullness = */bits->getBits(8);
+
+            // The "coreFrameOffset" does not apply since there's only
+            // a single layer.
+            break;
+        }
+
+        case 1:
+        {
+            /* unsigned frameLength = */bits->getBits(9);
+            break;
+        }
+
+        case 3:
+        case 4:
+        case 5:
+        {
+            /* unsigned CELPframeLengthTableIndex = */bits->getBits(6);
+            break;
+        }
+
+        case 6:
+        case 7:
+        {
+            /* unsigned HVXCframeLengthTableIndex = */bits->getBits(1);
+            break;
+        }
+
+        default:
+            break;
+    }
+
+    *otherDataPresent = bits->getBits(1);
+    *otherDataLenBits = 0;
+    if (*otherDataPresent) {
+        if (audioMuxVersion == 1) {
+            TRESPASS();  // XXX to be implemented
+        } else {
+            *otherDataLenBits = 0;
+
+            unsigned otherDataLenEsc;
+            do {
+                (*otherDataLenBits) <<= 8;
+                otherDataLenEsc = bits->getBits(1);
+                unsigned otherDataLenTmp = bits->getBits(8);
+                (*otherDataLenBits) += otherDataLenTmp;
+            } while (otherDataLenEsc);
+        }
+    }
+
+    unsigned crcCheckPresent = bits->getBits(1);
+    if (crcCheckPresent) {
+        /* unsigned crcCheckSum = */bits->getBits(8);
+    }
+
+    return OK;
+}
+
+sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
+    CHECK(!mMuxConfigPresent);  // XXX to be implemented
+
+    sp<ABuffer> out = new ABuffer(buffer->size());
+    out->setRange(0, 0);
+
+    size_t offset = 0;
+    uint8_t *ptr = buffer->data();
+
+    for (size_t i = 0; i <= mNumSubFrames; ++i) {
+        // parse PayloadLengthInfo
+
+        unsigned payloadLength = 0;
+
+        switch (mFrameLengthType) {
+            case 0:
+            {
+                unsigned muxSlotLengthBytes = 0;
+                unsigned tmp;
+                do {
+                    CHECK_LT(offset, buffer->size());
+                    tmp = ptr[offset++];
+                    muxSlotLengthBytes += tmp;
+                } while (tmp == 0xff);
+
+                payloadLength = muxSlotLengthBytes;
+                break;
+            }
+
+            default:
+                TRESPASS();  // XXX to be implemented
+                break;
+        }
+
+        CHECK_LE(offset + payloadLength, buffer->size());
+
+        memcpy(out->data() + out->size(), &ptr[offset], payloadLength);
+        out->setRange(0, out->size() + payloadLength);
+
+        offset += payloadLength;
+
+        if (mOtherDataPresent) {
+            // We want to stay byte-aligned.
+
+            CHECK((mOtherDataLenBits % 8) == 0);
+            CHECK_LE(offset + (mOtherDataLenBits / 8), buffer->size());
+            offset += mOtherDataLenBits / 8;
+        }
+    }
+
+    CHECK_EQ(offset, buffer->size());
+
+    return out;
+}
+
+AMPEG4AudioAssembler::AMPEG4AudioAssembler(
+        const sp<AMessage> &notify, const AString &params)
     : mNotifyMsg(notify),
+      mMuxConfigPresent(false),
       mAccessUnitRTPTime(0),
       mNextExpectedSeqNoValid(false),
       mNextExpectedSeqNo(0),
       mAccessUnitDamaged(false) {
+    AString val;
+    if (!GetAttribute(params.c_str(), "cpresent", &val)) {
+        mMuxConfigPresent = true;
+    } else if (val == "0") {
+        mMuxConfigPresent = false;
+    } else {
+        CHECK(val == "1");
+        mMuxConfigPresent = true;
+    }
+
+    CHECK(GetAttribute(params.c_str(), "config", &val));
+
+    sp<ABuffer> config = decodeHex(val);
+    CHECK(config != NULL);
+
+    ABitReader bits(config->data(), config->size());
+    status_t err = parseStreamMuxConfig(
+            &bits, &mNumSubFrames, &mFrameLengthType,
+            &mOtherDataPresent, &mOtherDataLenBits);
+
+    CHECK_EQ(err, (status_t)NO_ERROR);
 }
 
 AMPEG4AudioAssembler::~AMPEG4AudioAssembler() {
@@ -108,13 +471,7 @@
     while (it != mPackets.end()) {
         const sp<ABuffer> &unit = *it;
 
-        size_t n = 0;
-        while (unit->data()[n] == 0xff) {
-            ++n;
-        }
-        ++n;
-
-        totalSize += unit->size() - n;
+        totalSize += unit->size();
         ++it;
     }
 
@@ -124,20 +481,13 @@
     while (it != mPackets.end()) {
         const sp<ABuffer> &unit = *it;
 
-        size_t n = 0;
-        while (unit->data()[n] == 0xff) {
-            ++n;
-        }
-        ++n;
-
         memcpy((uint8_t *)accessUnit->data() + offset,
-               unit->data() + n, unit->size() - n);
-
-        offset += unit->size() - n;
+               unit->data(), unit->size());
 
         ++it;
     }
 
+    accessUnit = removeLATMFraming(accessUnit);
     CopyTimes(accessUnit, *mPackets.begin());
 
 #if 0
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
index bf9f204..9cef94c 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
@@ -27,9 +27,11 @@
 namespace android {
 
 struct AMessage;
+struct AString;
 
 struct AMPEG4AudioAssembler : public ARTPAssembler {
-    AMPEG4AudioAssembler(const sp<AMessage> &notify);
+    AMPEG4AudioAssembler(
+            const sp<AMessage> &notify, const AString &params);
 
 protected:
     virtual ~AMPEG4AudioAssembler();
@@ -40,6 +42,13 @@
 
 private:
     sp<AMessage> mNotifyMsg;
+
+    bool mMuxConfigPresent;
+    unsigned mNumSubFrames;
+    unsigned mFrameLengthType;
+    bool mOtherDataPresent;
+    unsigned mOtherDataLenBits;
+
     uint32_t mAccessUnitRTPTime;
     bool mNextExpectedSeqNoValid;
     uint32_t mNextExpectedSeqNo;
@@ -49,6 +58,8 @@
     AssemblyStatus addPacket(const sp<ARTPSource> &source);
     void submitAccessUnit();
 
+    sp<ABuffer> removeLATMFraming(const sp<ABuffer> &buffer);
+
     DISALLOW_EVIL_CONSTRUCTORS(AMPEG4AudioAssembler);
 };
 
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 2518264..5aae4e7 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -57,7 +57,7 @@
         mAssembler = new AAVCAssembler(notify);
         mIssueFIRRequests = true;
     } else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
-        mAssembler = new AMPEG4AudioAssembler(notify);
+        mAssembler = new AMPEG4AudioAssembler(notify, params);
     } else if (!strncmp(desc.c_str(), "H263-1998/", 10)
             || !strncmp(desc.c_str(), "H263-2000/", 10)) {
         mAssembler = new AH263Assembler(notify);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 155fd96..5a033e1 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -46,7 +46,7 @@
 
 ARTPWriter::ARTPWriter(int fd)
     : mFlags(0),
-      mFd(fd),
+      mFd(dup(fd)),
       mLooper(new ALooper),
       mReflector(new AHandlerReflector<ARTPWriter>(this)) {
     CHECK_GE(fd, 0);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index f928c06..e936923 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -23,11 +23,13 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/base64.h>
 #include <media/stagefright/MediaErrors.h>
 
 #include <arpa/inet.h>
 #include <fcntl.h>
 #include <netdb.h>
+#include <openssl/md5.h>
 #include <sys/socket.h>
 
 namespace android {
@@ -37,6 +39,7 @@
 
 ARTSPConnection::ARTSPConnection()
     : mState(DISCONNECTED),
+      mAuthType(NONE),
       mSocket(-1),
       mConnectionID(0),
       mNextCSeq(0),
@@ -114,10 +117,13 @@
 
 // static
 bool ARTSPConnection::ParseURL(
-        const char *url, AString *host, unsigned *port, AString *path) {
+        const char *url, AString *host, unsigned *port, AString *path,
+        AString *user, AString *pass) {
     host->clear();
     *port = 0;
     path->clear();
+    user->clear();
+    pass->clear();
 
     if (strncasecmp("rtsp://", url, 7)) {
         return false;
@@ -133,6 +139,24 @@
         path->setTo(slashPos);
     }
 
+    ssize_t atPos = host->find("@");
+
+    if (atPos >= 0) {
+        // Split of user:pass@ from hostname.
+
+        AString userPass(*host, 0, atPos);
+        host->erase(0, atPos + 1);
+
+        ssize_t colonPos = userPass.find(":");
+
+        if (colonPos < 0) {
+            *user = userPass;
+        } else {
+            user->setTo(userPass, 0, colonPos);
+            pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
+        }
+    }
+
     const char *colonPos = strchr(host->c_str(), ':');
 
     if (colonPos != NULL) {
@@ -187,7 +211,12 @@
 
     AString host, path;
     unsigned port;
-    if (!ParseURL(url.c_str(), &host, &port, &path)) {
+    if (!ParseURL(url.c_str(), &host, &port, &path, &mUser, &mPass)
+            || (mUser.size() > 0 && mPass.size() == 0)) {
+        // If we have a user name but no password we have to give up
+        // right here, since we currently have no way of asking the user
+        // for this information.
+
         LOGE("Malformed rtsp url %s", url.c_str());
 
         reply->setInt32("result", ERROR_MALFORMED);
@@ -197,6 +226,10 @@
         return;
     }
 
+    if (mUser.size() > 0) {
+        LOGV("user = '%s', pass = '%s'", mUser.c_str(), mPass.c_str());
+    }
+
     struct hostent *ent = gethostbyname(host.c_str());
     if (ent == NULL) {
         LOGE("Unknown host %s", host.c_str());
@@ -262,6 +295,11 @@
     reply->setInt32("result", OK);
     mState = DISCONNECTED;
 
+    mUser.clear();
+    mPass.clear();
+    mAuthType = NONE;
+    mNonce.clear();
+
     reply->post();
 }
 
@@ -335,6 +373,12 @@
     AString request;
     CHECK(msg->findString("request", &request));
 
+    // Just in case we need to re-issue the request with proper authentication
+    // later, stash it away.
+    reply->setString("original-request", request.c_str(), request.size());
+
+    addAuthentication(&request);
+
     // Find the boundary between headers and the body.
     ssize_t i = request.find("\r\n\r\n");
     CHECK_GE(i, 0);
@@ -347,7 +391,7 @@
 
     request.insert(cseqHeader, i + 2);
 
-    LOGV("%s", request.c_str());
+    LOGV("request: '%s'", request.c_str());
 
     size_t numBytesSent = 0;
     while (numBytesSent < request.size()) {
@@ -612,6 +656,30 @@
         }
     }
 
+    if (response->mStatusCode == 401) {
+        if (mAuthType == NONE && mUser.size() > 0
+                && parseAuthMethod(response)) {
+            ssize_t i;
+            CHECK_EQ((status_t)OK, findPendingRequest(response, &i));
+            CHECK_GE(i, 0);
+
+            sp<AMessage> reply = mPendingRequests.valueAt(i);
+            mPendingRequests.removeItemsAt(i);
+
+            AString request;
+            CHECK(reply->findString("original-request", &request));
+
+            sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+            msg->setMessage("reply", reply);
+            msg->setString("request", request.c_str(), request.size());
+
+            LOGI("re-sending request with authentication headers...");
+            onSendRequest(msg);
+
+            return true;
+        }
+    }
+
     return notifyResponseListener(response);
 }
 
@@ -628,26 +696,47 @@
     return true;
 }
 
-bool ARTSPConnection::notifyResponseListener(
-        const sp<ARTSPResponse> &response) {
+status_t ARTSPConnection::findPendingRequest(
+        const sp<ARTSPResponse> &response, ssize_t *index) const {
+    *index = 0;
+
     ssize_t i = response->mHeaders.indexOfKey("cseq");
 
     if (i < 0) {
-        return true;
+        // This is an unsolicited server->client message.
+        return OK;
     }
 
     AString value = response->mHeaders.valueAt(i);
 
     unsigned long cseq;
     if (!ParseSingleUnsignedLong(value.c_str(), &cseq)) {
-        return false;
+        return ERROR_MALFORMED;
     }
 
     i = mPendingRequests.indexOfKey(cseq);
 
     if (i < 0) {
-        // Unsolicited response?
-        TRESPASS();
+        return -ENOENT;
+    }
+
+    *index = i;
+
+    return OK;
+}
+
+bool ARTSPConnection::notifyResponseListener(
+        const sp<ARTSPResponse> &response) {
+    ssize_t i;
+    status_t err = findPendingRequest(response, &i);
+
+    if (err == OK && i < 0) {
+        // An unsolicited server response is not a problem.
+        return true;
+    }
+
+    if (err != OK) {
+        return false;
     }
 
     sp<AMessage> reply = mPendingRequests.valueAt(i);
@@ -660,4 +749,160 @@
     return true;
 }
 
+bool ARTSPConnection::parseAuthMethod(const sp<ARTSPResponse> &response) {
+    ssize_t i = response->mHeaders.indexOfKey("www-authenticate");
+
+    if (i < 0) {
+        return false;
+    }
+
+    AString value = response->mHeaders.valueAt(i);
+
+    if (!strncmp(value.c_str(), "Basic", 5)) {
+        mAuthType = BASIC;
+    } else {
+#if !defined(HAVE_ANDROID_OS)
+        // We don't have access to the MD5 implementation on the simulator,
+        // so we won't support digest authentication.
+        return false;
+#endif
+
+        CHECK(!strncmp(value.c_str(), "Digest", 6));
+        mAuthType = DIGEST;
+
+        i = value.find("nonce=");
+        CHECK_GE(i, 0);
+        CHECK_EQ(value.c_str()[i + 6], '\"');
+        ssize_t j = value.find("\"", i + 7);
+        CHECK_GE(j, 0);
+
+        mNonce.setTo(value, i + 7, j - i - 7);
+    }
+
+    return true;
+}
+
+#if defined(HAVE_ANDROID_OS)
+static void H(const AString &s, AString *out) {
+    out->clear();
+
+    MD5_CTX m;
+    MD5_Init(&m);
+    MD5_Update(&m, s.c_str(), s.size());
+
+    uint8_t key[16];
+    MD5_Final(key, &m);
+
+    for (size_t i = 0; i < 16; ++i) {
+        char nibble = key[i] >> 4;
+        if (nibble <= 9) {
+            nibble += '0';
+        } else {
+            nibble += 'a' - 10;
+        }
+        out->append(&nibble, 1);
+
+        nibble = key[i] & 0x0f;
+        if (nibble <= 9) {
+            nibble += '0';
+        } else {
+            nibble += 'a' - 10;
+        }
+        out->append(&nibble, 1);
+    }
+}
+#endif
+
+static void GetMethodAndURL(
+        const AString &request, AString *method, AString *url) {
+    ssize_t space1 = request.find(" ");
+    CHECK_GE(space1, 0);
+
+    ssize_t space2 = request.find(" ", space1 + 1);
+    CHECK_GE(space2, 0);
+
+    method->setTo(request, 0, space1);
+    url->setTo(request, space1 + 1, space2 - space1);
+}
+
+void ARTSPConnection::addAuthentication(AString *request) {
+    if (mAuthType == NONE) {
+        return;
+    }
+
+    // Find the boundary between headers and the body.
+    ssize_t i = request->find("\r\n\r\n");
+    CHECK_GE(i, 0);
+
+    if (mAuthType == BASIC) {
+        AString tmp;
+        tmp.append(mUser);
+        tmp.append(":");
+        tmp.append(mPass);
+
+        AString out;
+        encodeBase64(tmp.c_str(), tmp.size(), &out);
+
+        AString fragment;
+        fragment.append("Authorization: Basic ");
+        fragment.append(out);
+        fragment.append("\r\n");
+
+        request->insert(fragment, i + 2);
+
+        return;
+    }
+
+#if defined(HAVE_ANDROID_OS)
+    CHECK_EQ((int)mAuthType, (int)DIGEST);
+
+    AString method, url;
+    GetMethodAndURL(*request, &method, &url);
+
+    AString A1;
+    A1.append(mUser);
+    A1.append(":");
+    A1.append("Streaming Server");
+    A1.append(":");
+    A1.append(mPass);
+
+    AString A2;
+    A2.append(method);
+    A2.append(":");
+    A2.append(url);
+
+    AString HA1, HA2;
+    H(A1, &HA1);
+    H(A2, &HA2);
+
+    AString tmp;
+    tmp.append(HA1);
+    tmp.append(":");
+    tmp.append(mNonce);
+    tmp.append(":");
+    tmp.append(HA2);
+
+    AString digest;
+    H(tmp, &digest);
+
+    AString fragment;
+    fragment.append("Authorization: Digest ");
+    fragment.append("nonce=\"");
+    fragment.append(mNonce);
+    fragment.append("\", ");
+    fragment.append("username=\"");
+    fragment.append(mUser);
+    fragment.append("\", ");
+    fragment.append("uri=\"");
+    fragment.append(url);
+    fragment.append("\", ");
+    fragment.append("response=\"");
+    fragment.append(digest);
+    fragment.append("\"");
+    fragment.append("\r\n");
+
+    request->insert(fragment, i + 2);
+#endif
+}
+
 }  // namespace android
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 96e0d5b..19be2a6 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -42,6 +42,10 @@
 
     void observeBinaryData(const sp<AMessage> &reply);
 
+    static bool ParseURL(
+            const char *url, AString *host, unsigned *port, AString *path,
+            AString *user, AString *pass);
+
 protected:
     virtual ~ARTSPConnection();
     virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -62,9 +66,18 @@
         kWhatObserveBinaryData  = 'obin',
     };
 
+    enum AuthType {
+        NONE,
+        BASIC,
+        DIGEST
+    };
+
     static const int64_t kSelectTimeoutUs;
 
     State mState;
+    AString mUser, mPass;
+    AuthType mAuthType;
+    AString mNonce;
     int mSocket;
     int32_t mConnectionID;
     int32_t mNextCSeq;
@@ -90,8 +103,11 @@
     sp<ABuffer> receiveBinaryData();
     bool notifyResponseListener(const sp<ARTSPResponse> &response);
 
-    static bool ParseURL(
-            const char *url, AString *host, unsigned *port, AString *path);
+    bool parseAuthMethod(const sp<ARTSPResponse> &response);
+    void addAuthentication(AString *request);
+
+    status_t findPendingRequest(
+            const sp<ARTSPResponse> &response, ssize_t *index) const;
 
     static bool ParseSingleUnsignedLong(
             const char *from, unsigned long *x);
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 612caff..547fbab 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -53,21 +53,30 @@
     mFormats.push(AString("[root]"));
 
     AString desc((const char *)data, size);
-    LOGI("%s", desc.c_str());
 
     size_t i = 0;
     for (;;) {
-        ssize_t eolPos = desc.find("\r\n", i);
+        ssize_t eolPos = desc.find("\n", i);
+
         if (eolPos < 0) {
             break;
         }
 
-        AString line(desc, i, eolPos - i);
+        AString line;
+        if ((size_t)eolPos > i && desc.c_str()[eolPos - 1] == '\r') {
+            // We accept both '\n' and '\r\n' line endings, if it's
+            // the latter, strip the '\r' as well.
+            line.setTo(desc, i, eolPos - i - 1);
+        } else {
+            line.setTo(desc, i, eolPos - i);
+        }
 
         if (line.size() < 2 || line.c_str()[1] != '=') {
             return false;
         }
 
+        LOGI("%s", line.c_str());
+
         switch (line.c_str()[0]) {
             case 'v':
             {
@@ -141,7 +150,7 @@
             }
         }
 
-        i = eolPos + 2;
+        i = eolPos + 1;
     }
 
     return true;
@@ -245,7 +254,7 @@
         return false;
     }
 
-    if (value == "npt=now-") {
+    if (value == "npt=now-" || value == "npt=0-") {
         return false;
     }
 
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 081ae32..0bbadc1 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -23,6 +23,7 @@
 	$(JNI_H_INCLUDE) \
 	$(TOP)/frameworks/base/include/media/stagefright/openmax \
         $(TOP)/frameworks/base/media/libstagefright/include \
+        $(TOP)/external/openssl/include
 
 LOCAL_MODULE:= libstagefright_rtsp
 
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6943608..9bb8c46 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -96,6 +96,7 @@
           mNetLooper(new ALooper),
           mConn(new ARTSPConnection),
           mRTPConn(new ARTPConnection),
+          mOriginalSessionURL(url),
           mSessionURL(url),
           mSetupTracksSuccessful(false),
           mSeekPending(false),
@@ -113,6 +114,23 @@
         mNetLooper->start(false /* runOnCallingThread */,
                           false /* canCallJava */,
                           PRIORITY_HIGHEST);
+
+        // Strip any authentication info from the session url, we don't
+        // want to transmit user/pass in cleartext.
+        AString host, path, user, pass;
+        unsigned port;
+        if (ARTSPConnection::ParseURL(
+                    mSessionURL.c_str(), &host, &port, &path, &user, &pass)
+                && user.size() > 0) {
+            mSessionURL.clear();
+            mSessionURL.append("rtsp://");
+            mSessionURL.append(host);
+            mSessionURL.append(":");
+            mSessionURL.append(StringPrintf("%u", port));
+            mSessionURL.append(path);
+
+            LOGI("rewritten session url: '%s'", mSessionURL.c_str());
+        }
     }
 
     void connect(const sp<AMessage> &doneMsg) {
@@ -126,7 +144,7 @@
         mConn->observeBinaryData(notify);
 
         sp<AMessage> reply = new AMessage('conn', id());
-        mConn->connect(mSessionURL.c_str(), reply);
+        mConn->connect(mOriginalSessionURL.c_str(), reply);
     }
 
     void disconnect(const sp<AMessage> &doneMsg) {
@@ -312,7 +330,7 @@
                 int32_t reconnect;
                 if (msg->findInt32("reconnect", &reconnect) && reconnect) {
                     sp<AMessage> reply = new AMessage('conn', id());
-                    mConn->connect(mSessionURL.c_str(), reply);
+                    mConn->connect(mOriginalSessionURL.c_str(), reply);
                 } else {
                     (new AMessage('quit', id()))->post();
                 }
@@ -922,7 +940,7 @@
         CHECK(GetAttribute(range.c_str(), "npt", &val));
         float npt1, npt2;
 
-        if (val == "now-") {
+        if (val == "now-" || val == "0-") {
             // This is a live stream and therefore not seekable.
             return;
         } else {
@@ -992,6 +1010,7 @@
     sp<ARTSPConnection> mConn;
     sp<ARTPConnection> mRTPConn;
     sp<ASessionDescription> mSessionDesc;
+    AString mOriginalSessionURL;  // This one still has user:pass@
     AString mSessionURL;
     AString mBaseURL;
     AString mSessionID;
diff --git a/media/libstagefright/string.cpp b/media/libstagefright/string.cpp
deleted file mode 100644
index 8b2c36c..0000000
--- a/media/libstagefright/string.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "include/stagefright_string.h"
-
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-// static
-string::size_type string::npos = (string::size_type)-1;
-
-string::string() {
-}
-
-string::string(const char *s, size_t length)
-    : mString(s, length) {
-}
-
-string::string(const string &from, size_type start, size_type length) {
-    CHECK(start <= from.size());
-    if (length == npos) {
-        length = from.size() - start;
-    } else {
-        CHECK(start + length <= from.size());
-    }
-
-    mString.setTo(from.c_str() + start, length);
-}
-
-string::string(const char *s)
-    : mString(s) {
-}
-
-const char *string::c_str() const {
-    return mString.string();
-}
-
-string::size_type string::size() const {
-    return mString.length();
-}
-
-void string::clear() {
-    mString = String8();
-}
-
-string::size_type string::find(char c) const {
-    char s[2];
-    s[0] = c;
-    s[1] = '\0';
-
-    ssize_t index = mString.find(s);
-
-    return index < 0 ? npos : (size_type)index;
-}
-
-bool string::operator<(const string &other) const {
-    return mString < other.mString;
-}
-
-bool string::operator==(const string &other) const {
-    return mString == other.mString;
-}
-
-string &string::operator+=(char c) {
-    mString.append(&c, 1);
-
-    return *this;
-}
-
-void string::erase(size_t from, size_t length) {
-    String8 s(mString.string(), from);
-    s.append(mString.string() + from + length);
-    
-    mString = s;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
new file mode 100644
index 0000000..7697e3c
--- /dev/null
+++ b/media/libstagefright/yuv/Android.mk
@@ -0,0 +1,15 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=               \
+        YUVImage.cpp            \
+        YUVCanvas.cpp
+
+LOCAL_SHARED_LIBRARIES :=       \
+        libcutils
+
+LOCAL_MODULE:= libstagefright_yuv
+
+LOCAL_PRELINK_MODULE := false
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp
new file mode 100644
index 0000000..38aa779
--- /dev/null
+++ b/media/libstagefright/yuv/YUVCanvas.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVCanvas"
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+
+namespace android {
+
+YUVCanvas::YUVCanvas(YUVImage &yuvImage)
+    : mYUVImage(yuvImage) {
+}
+
+YUVCanvas::~YUVCanvas() {
+}
+
+void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+    for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+        for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+            mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+        }
+    }
+}
+
+void YUVCanvas::FillYUVRectangle(const Rect& rect,
+        uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+    for (int32_t y = rect.top; y < rect.bottom; ++y) {
+        for (int32_t x = rect.left; x < rect.right; ++x) {
+            mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+        }
+    }
+}
+
+void YUVCanvas::CopyImageRect(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage) {
+
+    // Try fast copy first
+    if (YUVImage::fastCopyRectangle(
+                srcRect,
+                destStartX, destStartY,
+                srcImage, mYUVImage)) {
+        return;
+    }
+
+    int32_t srcStartX = srcRect.left;
+    int32_t srcStartY = srcRect.top;
+    for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) {
+        for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) {
+            int32_t srcX = srcStartX + offsetX;
+            int32_t srcY = srcStartY + offsetY;
+
+            int32_t destX = destStartX + offsetX;
+            int32_t destY = destStartY + offsetY;
+
+            uint8_t yValue;
+            uint8_t uValue;
+            uint8_t vValue;
+
+            srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+            mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue);
+        }
+    }
+}
+
+void YUVCanvas::downsample(
+        int32_t srcOffsetX, int32_t srcOffsetY,
+        int32_t skipX, int32_t skipY,
+        const YUVImage &srcImage) {
+    // TODO: Add a low pass filter for downsampling.
+
+    // Check that srcImage is big enough to fill mYUVImage.
+    CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width());
+    CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height());
+
+    uint8_t yValue;
+    uint8_t uValue;
+    uint8_t vValue;
+
+    int32_t srcY = srcOffsetY;
+    for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+        int32_t srcX = srcOffsetX;
+        for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+            srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+            mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+
+            srcX += skipX;
+        }
+        srcY += skipY;
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
new file mode 100644
index 0000000..b712062
--- /dev/null
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -0,0 +1,413 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVImage"
+
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+#include <media/stagefright/MediaDebug.h>
+
+namespace android {
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) {
+    mYUVFormat = yuvFormat;
+    mWidth = width;
+    mHeight = height;
+
+    size_t numberOfBytes = bufferSize(yuvFormat, width, height);
+    uint8_t *buffer = new uint8_t[numberOfBytes];
+    mBuffer = buffer;
+    mOwnBuffer = true;
+
+    initializeYUVPointers();
+}
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) {
+    mYUVFormat = yuvFormat;
+    mWidth = width;
+    mHeight = height;
+    mBuffer = buffer;
+    mOwnBuffer = false;
+
+    initializeYUVPointers();
+}
+
+//static
+size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) {
+    int32_t numberOfPixels = width*height;
+    size_t numberOfBytes = 0;
+    if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) {
+        // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each.
+        numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1));
+    } else {
+        LOGE("Format not supported");
+    }
+    return numberOfBytes;
+}
+
+bool YUVImage::initializeYUVPointers() {
+    int32_t numberOfPixels = mWidth * mHeight;
+
+    if (mYUVFormat == YUV420Planar) {
+        mYdata = (uint8_t *)mBuffer;
+        mUdata = mYdata + numberOfPixels;
+        mVdata = mUdata + (numberOfPixels >> 2);
+    } else if (mYUVFormat == YUV420SemiPlanar) {
+        // U and V channels are interleaved as VUVUVU.
+        // So V data starts at the end of Y channel and
+        // U data starts right after V's start.
+        mYdata = (uint8_t *)mBuffer;
+        mVdata = mYdata + numberOfPixels;
+        mUdata = mVdata + 1;
+    } else {
+        LOGE("Format not supported");
+        return false;
+    }
+    return true;
+}
+
+YUVImage::~YUVImage() {
+    if (mOwnBuffer) delete[] mBuffer;
+}
+
+bool YUVImage::getOffsets(int32_t x, int32_t y,
+        int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const {
+    *yOffset = y*mWidth + x;
+
+    int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1);
+    if (mYUVFormat == YUV420Planar) {
+        *uOffset = uvOffset;
+        *vOffset = uvOffset;
+    } else if (mYUVFormat == YUV420SemiPlanar) {
+        // Since U and V channels are interleaved, offsets need
+        // to be doubled.
+        *uOffset = 2*uvOffset;
+        *vOffset = 2*uvOffset;
+    } else {
+        LOGE("Format not supported");
+        return false;
+    }
+
+    return true;
+}
+
+bool YUVImage::getOffsetIncrementsPerDataRow(
+        int32_t *yDataOffsetIncrement,
+        int32_t *uDataOffsetIncrement,
+        int32_t *vDataOffsetIncrement) const {
+    *yDataOffsetIncrement = mWidth;
+
+    int32_t uvDataOffsetIncrement = mWidth >> 1;
+
+    if (mYUVFormat == YUV420Planar) {
+        *uDataOffsetIncrement = uvDataOffsetIncrement;
+        *vDataOffsetIncrement = uvDataOffsetIncrement;
+    } else if (mYUVFormat == YUV420SemiPlanar) {
+        // Since U and V channels are interleaved, offsets need
+        // to be doubled.
+        *uDataOffsetIncrement = 2*uvDataOffsetIncrement;
+        *vDataOffsetIncrement = 2*uvDataOffsetIncrement;
+    } else {
+        LOGE("Format not supported");
+        return false;
+    }
+
+    return true;
+}
+
+uint8_t* YUVImage::getYAddress(int32_t offset) const {
+    return mYdata + offset;
+}
+
+uint8_t* YUVImage::getUAddress(int32_t offset) const {
+    return mUdata + offset;
+}
+
+uint8_t* YUVImage::getVAddress(int32_t offset) const {
+    return mVdata + offset;
+}
+
+bool YUVImage::getYUVAddresses(int32_t x, int32_t y,
+        uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const {
+    int32_t yOffset;
+    int32_t uOffset;
+    int32_t vOffset;
+    if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false;
+
+    *yAddr = getYAddress(yOffset);
+    *uAddr = getUAddress(uOffset);
+    *vAddr = getVAddress(vOffset);
+
+    return true;
+}
+
+bool YUVImage::validPixel(int32_t x, int32_t y) const {
+    return (x >= 0 && x < mWidth &&
+            y >= 0 && y < mHeight);
+}
+
+bool YUVImage::getPixelValue(int32_t x, int32_t y,
+        uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const {
+    CHECK(validPixel(x, y));
+
+    uint8_t *yAddr;
+    uint8_t *uAddr;
+    uint8_t *vAddr;
+    if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+    *yPtr = *yAddr;
+    *uPtr = *uAddr;
+    *vPtr = *vAddr;
+
+    return true;
+}
+
+bool YUVImage::setPixelValue(int32_t x, int32_t y,
+        uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+    CHECK(validPixel(x, y));
+
+    uint8_t *yAddr;
+    uint8_t *uAddr;
+    uint8_t *vAddr;
+    if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+    *yAddr = yValue;
+    *uAddr = uValue;
+    *vAddr = vValue;
+
+    return true;
+}
+
+void YUVImage::fastCopyRectangle420Planar(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage, YUVImage &destImage) {
+    CHECK(srcImage.mYUVFormat == YUV420Planar);
+    CHECK(destImage.mYUVFormat == YUV420Planar);
+
+    int32_t srcStartX = srcRect.left;
+    int32_t srcStartY = srcRect.top;
+    int32_t width = srcRect.width();
+    int32_t height = srcRect.height();
+
+    // Get source and destination start addresses
+    uint8_t *ySrcAddrBase;
+    uint8_t *uSrcAddrBase;
+    uint8_t *vSrcAddrBase;
+    srcImage.getYUVAddresses(srcStartX, srcStartY,
+            &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+    uint8_t *yDestAddrBase;
+    uint8_t *uDestAddrBase;
+    uint8_t *vDestAddrBase;
+    destImage.getYUVAddresses(destStartX, destStartY,
+            &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+    // Get source and destination offset increments incurred in going
+    // from one data row to next.
+    int32_t ySrcOffsetIncrement;
+    int32_t uSrcOffsetIncrement;
+    int32_t vSrcOffsetIncrement;
+    srcImage.getOffsetIncrementsPerDataRow(
+            &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+    int32_t yDestOffsetIncrement;
+    int32_t uDestOffsetIncrement;
+    int32_t vDestOffsetIncrement;
+    destImage.getOffsetIncrementsPerDataRow(
+            &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+    // Copy Y
+    {
+        size_t numberOfYBytesPerRow = (size_t) width;
+        uint8_t *ySrcAddr = ySrcAddrBase;
+        uint8_t *yDestAddr = yDestAddrBase;
+        for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+            memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+            ySrcAddr += ySrcOffsetIncrement;
+            yDestAddr += yDestOffsetIncrement;
+        }
+    }
+
+    // Copy U
+    {
+        size_t numberOfUBytesPerRow = (size_t) (width >> 1);
+        uint8_t *uSrcAddr = uSrcAddrBase;
+        uint8_t *uDestAddr = uDestAddrBase;
+        // Every other row has an entry for U/V channel values. Hence only
+        // go half the height.
+        for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+            memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow);
+
+            uSrcAddr += uSrcOffsetIncrement;
+            uDestAddr += uDestOffsetIncrement;
+        }
+    }
+
+    // Copy V
+    {
+        size_t numberOfVBytesPerRow = (size_t) (width >> 1);
+        uint8_t *vSrcAddr = vSrcAddrBase;
+        uint8_t *vDestAddr = vDestAddrBase;
+        // Every other pixel row has a U/V data row. Hence only go half the height.
+        for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+            memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow);
+
+            vSrcAddr += vSrcOffsetIncrement;
+            vDestAddr += vDestOffsetIncrement;
+        }
+    }
+}
+
+void YUVImage::fastCopyRectangle420SemiPlanar(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage, YUVImage &destImage) {
+    CHECK(srcImage.mYUVFormat == YUV420SemiPlanar);
+    CHECK(destImage.mYUVFormat == YUV420SemiPlanar);
+
+    int32_t srcStartX = srcRect.left;
+    int32_t srcStartY = srcRect.top;
+    int32_t width = srcRect.width();
+    int32_t height = srcRect.height();
+
+    // Get source and destination start addresses
+    uint8_t *ySrcAddrBase;
+    uint8_t *uSrcAddrBase;
+    uint8_t *vSrcAddrBase;
+    srcImage.getYUVAddresses(srcStartX, srcStartY,
+            &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+    uint8_t *yDestAddrBase;
+    uint8_t *uDestAddrBase;
+    uint8_t *vDestAddrBase;
+    destImage.getYUVAddresses(destStartX, destStartY,
+            &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+    // Get source and destination offset increments incurred in going
+    // from one data row to next.
+    int32_t ySrcOffsetIncrement;
+    int32_t uSrcOffsetIncrement;
+    int32_t vSrcOffsetIncrement;
+    srcImage.getOffsetIncrementsPerDataRow(
+            &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+    int32_t yDestOffsetIncrement;
+    int32_t uDestOffsetIncrement;
+    int32_t vDestOffsetIncrement;
+    destImage.getOffsetIncrementsPerDataRow(
+            &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+    // Copy Y
+    {
+        size_t numberOfYBytesPerRow = (size_t) width;
+        uint8_t *ySrcAddr = ySrcAddrBase;
+        uint8_t *yDestAddr = yDestAddrBase;
+        for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+            memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+            ySrcAddr = ySrcAddr + ySrcOffsetIncrement;
+            yDestAddr = yDestAddr + yDestOffsetIncrement;
+        }
+    }
+
+    // Copy UV
+    {
+        // UV are interleaved. So number of UV bytes per row is 2*(width/2).
+        size_t numberOfUVBytesPerRow = (size_t) width;
+        uint8_t *vSrcAddr = vSrcAddrBase;
+        uint8_t *vDestAddr = vDestAddrBase;
+        // Every other pixel row has a U/V data row. Hence only go half the height.
+        for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+            memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow);
+
+            vSrcAddr += vSrcOffsetIncrement;
+            vDestAddr += vDestOffsetIncrement;
+        }
+    }
+}
+
+// static
+bool YUVImage::fastCopyRectangle(
+        const Rect& srcRect,
+        int32_t destStartX, int32_t destStartY,
+        const YUVImage &srcImage, YUVImage &destImage) {
+    if (srcImage.mYUVFormat == destImage.mYUVFormat) {
+        if (srcImage.mYUVFormat == YUV420Planar) {
+            fastCopyRectangle420Planar(
+                    srcRect,
+                    destStartX, destStartY,
+                    srcImage, destImage);
+        } else if (srcImage.mYUVFormat == YUV420SemiPlanar) {
+            fastCopyRectangle420SemiPlanar(
+                    srcRect,
+                    destStartX, destStartY,
+                    srcImage, destImage);
+        }
+        return true;
+    }
+    return false;
+}
+
+uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
+    CHECK(maxValue >= minValue);
+
+    if (v < minValue) return minValue;
+    else if (v > maxValue) return maxValue;
+    else return v;
+}
+
+void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+        uint8_t *r, uint8_t *g, uint8_t *b) const {
+    *r = yValue + (1.370705 * (vValue-128));
+    *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+    *b = yValue + (1.732446 * (uValue-128));
+
+    *r = clamp(*r, 0, 255);
+    *g = clamp(*g, 0, 255);
+    *b = clamp(*b, 0, 255);
+}
+
+bool YUVImage::writeToPPM(const char *filename) const {
+    FILE *fp = fopen(filename, "w");
+    if (fp == NULL) {
+        return false;
+    }
+    fprintf(fp, "P3\n");
+    fprintf(fp, "%d %d\n", mWidth, mHeight);
+    fprintf(fp, "255\n");
+    for (int32_t y = 0; y < mHeight; ++y) {
+        for (int32_t x = 0; x < mWidth; ++x) {
+            uint8_t yValue;
+            uint8_t uValue;
+            uint8_t vValue;
+            getPixelValue(x, y, &yValue, &uValue, & vValue);
+
+            uint8_t rValue;
+            uint8_t gValue;
+            uint8_t bValue;
+            yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue);
+
+            fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue);
+        }
+    }
+    fclose(fp);
+    return true;
+}
+
+}  // namespace android
diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk
new file mode 100644
index 0000000..70dc340
--- /dev/null
+++ b/media/mtp/Android.mk
@@ -0,0 +1,78 @@
+#
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=                                       \
+                  MtpClient.cpp                         \
+                  MtpDataPacket.cpp                     \
+                  MtpDebug.cpp                          \
+                  MtpDevice.cpp                         \
+                  MtpEventPacket.cpp                    \
+                  MtpDeviceInfo.cpp                     \
+                  MtpObjectInfo.cpp                     \
+                  MtpPacket.cpp                         \
+                  MtpProperty.cpp                       \
+                  MtpRequestPacket.cpp                  \
+                  MtpResponsePacket.cpp                 \
+                  MtpServer.cpp                         \
+                  MtpStorageInfo.cpp                    \
+                  MtpStringBuffer.cpp                   \
+                  MtpStorage.cpp                        \
+                  MtpUtils.cpp                          \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST
+
+LOCAL_SHARED_LIBRARIES := libutils libcutils libusbhost libbinder
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
+
+ifeq ($(HOST_OS),linux)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:=                                       \
+                  MtpClient.cpp                         \
+                  MtpDataPacket.cpp                     \
+                  MtpDebug.cpp                          \
+                  MtpDevice.cpp                         \
+                  MtpEventPacket.cpp                    \
+                  MtpDeviceInfo.cpp                     \
+                  MtpObjectInfo.cpp                     \
+                  MtpPacket.cpp                         \
+                  MtpProperty.cpp                       \
+                  MtpRequestPacket.cpp                  \
+                  MtpResponsePacket.cpp                 \
+                  MtpStorageInfo.cpp                    \
+                  MtpStringBuffer.cpp                   \
+                  MtpStorage.cpp                        \
+                  MtpUtils.cpp                          \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_HOST
+
+include $(BUILD_HOST_STATIC_LIBRARY)
+
+endif
diff --git a/media/mtp/MtpClient.cpp b/media/mtp/MtpClient.cpp
new file mode 100644
index 0000000..c830540
--- /dev/null
+++ b/media/mtp/MtpClient.cpp
@@ -0,0 +1,251 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpClient"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "MtpDevice.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+
+#include <usbhost/usbhost.h>
+
+struct usb_device;
+
+namespace android {
+
+static bool isMtpDevice(uint16_t vendor, uint16_t product) {
+    // Sandisk Sansa Fuze
+    if (vendor == 0x0781 && product == 0x74c2)
+        return true;
+    // Samsung YP-Z5
+    if (vendor == 0x04e8 && product == 0x503c)
+        return true;
+    return false;
+}
+
+class MtpClientThread : public Thread {
+private:
+    MtpClient*   mClient;
+
+public:
+    MtpClientThread(MtpClient* client)
+        : mClient(client)
+    {
+    }
+
+    virtual bool threadLoop() {
+        return mClient->threadLoop();
+    }
+};
+
+
+MtpClient::MtpClient()
+    :   mThread(NULL),
+        mUsbHostContext(NULL),
+        mDone(false)
+{
+}
+
+MtpClient::~MtpClient() {
+    usb_host_cleanup(mUsbHostContext);
+}
+
+bool MtpClient::start() {
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mThread)
+        return true;
+
+    mUsbHostContext = usb_host_init();
+    if (!mUsbHostContext)
+        return false;
+
+    mThread = new MtpClientThread(this);
+    mThread->run("MtpClientThread");
+    // wait for the thread to do initial device discovery before returning
+    mThreadStartCondition.wait(mMutex);
+
+    return true;
+}
+
+void MtpClient::stop() {
+    mDone = true;
+}
+
+MtpDevice* MtpClient::getDevice(int id) {
+    for (int i = 0; i < mDeviceList.size(); i++) {
+        MtpDevice* device = mDeviceList[i];
+        if (device->getID() == id)
+            return device;
+    }
+    return NULL;
+}
+
+bool MtpClient::usbDeviceAdded(const char *devname) {
+    struct usb_descriptor_header* desc;
+    struct usb_descriptor_iter iter;
+
+    struct usb_device *device = usb_device_open(devname);
+    if (!device) {
+        LOGE("usb_device_open failed\n");
+        return mDone;
+    }
+
+    usb_descriptor_iter_init(device, &iter);
+
+    while ((desc = usb_descriptor_iter_next(&iter)) != NULL) {
+        if (desc->bDescriptorType == USB_DT_INTERFACE) {
+            struct usb_interface_descriptor *interface = (struct usb_interface_descriptor *)desc;
+
+            if (interface->bInterfaceClass == USB_CLASS_STILL_IMAGE &&
+                interface->bInterfaceSubClass == 1 && // Still Image Capture
+                interface->bInterfaceProtocol == 1)     // Picture Transfer Protocol (PIMA 15470)
+            {
+                LOGD("Found camera: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+                        usb_device_get_product_name(device));
+            } else if (interface->bInterfaceClass == 0xFF &&
+                    interface->bInterfaceSubClass == 0xFF &&
+                    interface->bInterfaceProtocol == 0) {
+                char* interfaceName = usb_device_get_string(device, interface->iInterface);
+                if (!interfaceName || strcmp(interfaceName, "MTP"))
+                    continue;
+                // Looks like an android style MTP device
+                LOGD("Found MTP device: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+                        usb_device_get_product_name(device));
+            } else {
+                // look for special cased devices based on vendor/product ID
+                // we are doing this mainly for testing purposes
+                uint16_t vendor = usb_device_get_vendor_id(device);
+                uint16_t product = usb_device_get_product_id(device);
+                if (!isMtpDevice(vendor, product)) {
+                    // not an MTP or PTP device
+                    continue;
+                }
+                // request MTP OS string and descriptor
+                // some music players need to see this before entering MTP mode.
+                char buffer[256];
+                memset(buffer, 0, sizeof(buffer));
+                int ret = usb_device_send_control(device,
+                        USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_STANDARD,
+                        USB_REQ_GET_DESCRIPTOR, (USB_DT_STRING << 8) | 0xEE,
+                        0, sizeof(buffer), buffer);
+                printf("usb_device_send_control returned %d errno: %d\n", ret, errno);
+                if (ret > 0) {
+                    printf("got MTP string %s\n", buffer);
+                    ret = usb_device_send_control(device,
+                            USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_VENDOR, 1,
+                            0, 4, sizeof(buffer), buffer);
+                    printf("OS descriptor got %d\n", ret);
+                } else {
+                    printf("no MTP string\n");
+                }
+            }
+
+            // if we got here, then we have a likely MTP or PTP device
+
+            // interface should be followed by three endpoints
+            struct usb_endpoint_descriptor *ep;
+            struct usb_endpoint_descriptor *ep_in_desc = NULL;
+            struct usb_endpoint_descriptor *ep_out_desc = NULL;
+            struct usb_endpoint_descriptor *ep_intr_desc = NULL;
+            for (int i = 0; i < 3; i++) {
+                ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+                if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) {
+                    LOGE("endpoints not found\n");
+                    return mDone;
+                }
+                if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) {
+                    if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK)
+                        ep_in_desc = ep;
+                    else
+                        ep_out_desc = ep;
+                } else if (ep->bmAttributes == USB_ENDPOINT_XFER_INT &&
+                    ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) {
+                    ep_intr_desc = ep;
+                }
+            }
+            if (!ep_in_desc || !ep_out_desc || !ep_intr_desc) {
+                LOGE("endpoints not found\n");
+                return mDone;
+            }
+
+            if (usb_device_claim_interface(device, interface->bInterfaceNumber)) {
+                LOGE("usb_device_claim_interface failed errno: %d\n", errno);
+                return mDone;
+            }
+
+            MtpDevice* mtpDevice = new MtpDevice(device, interface->bInterfaceNumber,
+                        ep_in_desc, ep_out_desc, ep_intr_desc);
+            mDeviceList.add(mtpDevice);
+            mtpDevice->initialize();
+            deviceAdded(mtpDevice);
+            return mDone;
+        }
+    }
+
+    usb_device_close(device);
+    return mDone;
+}
+
+bool MtpClient::usbDeviceRemoved(const char *devname) {
+    for (int i = 0; i < mDeviceList.size(); i++) {
+        MtpDevice* device = mDeviceList[i];
+        if (!strcmp(devname, device->getDeviceName())) {
+            deviceRemoved(device);
+            mDeviceList.removeAt(i);
+            delete device;
+            LOGD("Camera removed!\n");
+            break;
+        }
+    }
+    return mDone;
+}
+
+bool MtpClient::usbDiscoveryDone() {
+    Mutex::Autolock autoLock(mMutex);
+    mThreadStartCondition.signal();
+    return mDone;
+}
+
+bool MtpClient::threadLoop() {
+    usb_host_run(mUsbHostContext, usb_device_added, usb_device_removed, usb_discovery_done, this);
+    return false;
+}
+
+int MtpClient::usb_device_added(const char *devname, void* client_data) {
+    LOGD("usb_device_added %s\n", devname);
+    return ((MtpClient *)client_data)->usbDeviceAdded(devname);
+}
+
+int MtpClient::usb_device_removed(const char *devname, void* client_data) {
+    LOGD("usb_device_removed %s\n", devname);
+    return ((MtpClient *)client_data)->usbDeviceRemoved(devname);
+}
+
+int MtpClient::usb_discovery_done(void* client_data) {
+    LOGD("usb_discovery_done\n");
+    return ((MtpClient *)client_data)->usbDiscoveryDone();
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpClient.h b/media/mtp/MtpClient.h
new file mode 100644
index 0000000..fa5c527
--- /dev/null
+++ b/media/mtp/MtpClient.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_CLIENT_H
+#define _MTP_CLIENT_H
+
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_host_context;
+
+namespace android {
+
+class MtpClientThread;
+
+class MtpClient {
+private:
+    MtpDeviceList               mDeviceList;
+    MtpClientThread*            mThread;
+    Condition                   mThreadStartCondition;
+    Mutex                       mMutex;
+    struct usb_host_context*    mUsbHostContext;
+    bool                        mDone;
+
+public:
+                            MtpClient();
+    virtual                 ~MtpClient();
+
+    bool                    start();
+    void                    stop();
+
+    inline MtpDeviceList&   getDeviceList() { return mDeviceList; }
+    MtpDevice*              getDevice(int id);
+
+
+    virtual void            deviceAdded(MtpDevice *device) = 0;
+    virtual void            deviceRemoved(MtpDevice *device) = 0;
+
+private:
+    // these return true if we should stop monitoring USB and clean up
+    bool                    usbDeviceAdded(const char *devname);
+    bool                    usbDeviceRemoved(const char *devname);
+    bool                    usbDiscoveryDone();
+
+    friend class MtpClientThread;
+    bool                    threadLoop();
+    static int              usb_device_added(const char *devname, void* client_data);
+    static int              usb_device_removed(const char *devname, void* client_data);
+    static int              usb_discovery_done(void* client_data);
+};
+
+}; // namespace android
+
+#endif // _MTP_CLIENT_H
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
new file mode 100644
index 0000000..801edb0
--- /dev/null
+++ b/media/mtp/MtpDataPacket.cpp
@@ -0,0 +1,506 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDataPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include <usbhost/usbhost.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDataPacket::MtpDataPacket()
+    :   MtpPacket(512),
+        mOffset(MTP_CONTAINER_HEADER_SIZE)
+{
+}
+
+MtpDataPacket::~MtpDataPacket() {
+}
+
+void MtpDataPacket::reset() {
+    MtpPacket::reset();
+    mOffset = MTP_CONTAINER_HEADER_SIZE;
+}
+
+void MtpDataPacket::setOperationCode(MtpOperationCode code) {
+    MtpPacket::putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+void MtpDataPacket::setTransactionID(MtpTransactionID id) {
+    MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint16_t MtpDataPacket::getUInt16() {
+    int offset = mOffset;
+    uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+    mOffset += 2;
+    return result;
+}
+
+uint32_t MtpDataPacket::getUInt32() {
+    int offset = mOffset;
+    uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+           ((uint32_t)mBuffer[offset + 2] << 16)  | ((uint32_t)mBuffer[offset + 3] << 24);
+    mOffset += 4;
+    return result;
+}
+
+uint64_t MtpDataPacket::getUInt64() {
+    int offset = mOffset;
+    uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+           ((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
+           ((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
+           ((uint64_t)mBuffer[offset + 6] << 48)  | ((uint64_t)mBuffer[offset + 7] << 56);
+    mOffset += 8;
+    return result;
+}
+
+void MtpDataPacket::getUInt128(uint128_t& value) {
+    value[0] = getUInt32();
+    value[1] = getUInt32();
+    value[2] = getUInt32();
+    value[3] = getUInt32();
+}
+
+void MtpDataPacket::getString(MtpStringBuffer& string)
+{
+    string.readFromPacket(this);
+}
+
+Int8List* MtpDataPacket::getAInt8() {
+    Int8List* result = new Int8List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt8());
+    return result;
+}
+
+UInt8List* MtpDataPacket::getAUInt8() {
+    UInt8List* result = new UInt8List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt8());
+    return result;
+}
+
+Int16List* MtpDataPacket::getAInt16() {
+    Int16List* result = new Int16List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt16());
+    return result;
+}
+
+UInt16List* MtpDataPacket::getAUInt16() {
+    UInt16List* result = new UInt16List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt16());
+    return result;
+}
+
+Int32List* MtpDataPacket::getAInt32() {
+    Int32List* result = new Int32List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt32());
+    return result;
+}
+
+UInt32List* MtpDataPacket::getAUInt32() {
+    UInt32List* result = new UInt32List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt32());
+    return result;
+}
+
+Int64List* MtpDataPacket::getAInt64() {
+    Int64List* result = new Int64List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getInt64());
+    return result;
+}
+
+UInt64List* MtpDataPacket::getAUInt64() {
+    UInt64List* result = new UInt64List;
+    int count = getUInt32();
+    for (int i = 0; i < count; i++)
+        result->push(getUInt64());
+    return result;
+}
+
+void MtpDataPacket::putInt8(int8_t value) {
+    allocate(mOffset + 1);
+    mBuffer[mOffset++] = (uint8_t)value;
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt8(uint8_t value) {
+    allocate(mOffset + 1);
+    mBuffer[mOffset++] = (uint8_t)value;
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt16(int16_t value) {
+    allocate(mOffset + 2);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt16(uint16_t value) {
+    allocate(mOffset + 2);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt32(int32_t value) {
+    allocate(mOffset + 4);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt32(uint32_t value) {
+    allocate(mOffset + 4);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt64(int64_t value) {
+    allocate(mOffset + 8);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt64(uint64_t value) {
+    allocate(mOffset + 8);
+    mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+    mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+    if (mPacketSize < mOffset)
+        mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt128(const int128_t& value) {
+    putInt32(value[0]);
+    putInt32(value[1]);
+    putInt32(value[2]);
+    putInt32(value[3]);
+}
+
+void MtpDataPacket::putUInt128(const uint128_t& value) {
+    putUInt32(value[0]);
+    putUInt32(value[1]);
+    putUInt32(value[2]);
+    putUInt32(value[3]);
+}
+
+void MtpDataPacket::putInt128(int64_t value) {
+    putInt64(value);
+    putInt64(value < 0 ? -1 : 0);
+}
+
+void MtpDataPacket::putUInt128(uint64_t value) {
+    putUInt64(value);
+    putUInt64(0);
+}
+
+void MtpDataPacket::putAInt8(const int8_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt8(*values++);
+}
+
+void MtpDataPacket::putAUInt8(const uint8_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt8(*values++);
+}
+
+void MtpDataPacket::putAInt16(const int16_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const uint16_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const UInt16List* values) {
+    size_t count = (values ? values->size() : 0);
+    putUInt32(count);
+    for (size_t i = 0; i < count; i++)
+        putUInt16((*values)[i]);
+}
+
+void MtpDataPacket::putAInt32(const int32_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const uint32_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const UInt32List* list) {
+    if (!list) {
+        putEmptyArray();
+    } else {
+        size_t size = list->size();
+        putUInt32(size);
+        for (size_t i = 0; i < size; i++)
+            putUInt32((*list)[i]);
+    }
+}
+
+void MtpDataPacket::putAInt64(const int64_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putInt64(*values++);
+}
+
+void MtpDataPacket::putAUInt64(const uint64_t* values, int count) {
+    putUInt32(count);
+    for (int i = 0; i < count; i++)
+        putUInt64(*values++);
+}
+
+void MtpDataPacket::putString(const MtpStringBuffer& string) {
+    string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const char* s) {
+    MtpStringBuffer string(s);
+    string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const uint16_t* string) {
+    int count = 0;
+    for (int i = 0; i < 256; i++) {
+        if (string[i])
+            count++;
+        else
+            break;
+    }
+    putUInt8(count > 0 ? count + 1 : 0);
+    for (int i = 0; i < count; i++)
+        putUInt16(string[i]);
+    // only terminate with zero if string is not empty
+    if (count > 0)
+        putUInt16(0);
+}
+
+#ifdef MTP_DEVICE 
+int MtpDataPacket::read(int fd) {
+    // first read the header
+    int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    if (ret != MTP_CONTAINER_HEADER_SIZE)
+        return -1;
+    // then the following data
+    int total = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+    allocate(total);
+    int remaining = total - MTP_CONTAINER_HEADER_SIZE;
+    ret = ::read(fd, &mBuffer[0] + MTP_CONTAINER_HEADER_SIZE, remaining);
+    if (ret != remaining)
+        return -1;
+
+    mPacketSize = total;
+    mOffset = MTP_CONTAINER_HEADER_SIZE;
+    return total;
+}
+
+int MtpDataPacket::readDataHeader(int fd) {
+    int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    if (ret > 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+
+int MtpDataPacket::write(int fd) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+    // send header separately from data
+    int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    if (ret == MTP_CONTAINER_HEADER_SIZE)
+        ret = ::write(fd, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+                        mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+    return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::writeDataHeader(int fd, uint32_t length) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+    int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+    return (ret < 0 ? ret : 0);
+}
+#endif // MTP_DEVICE
+
+#ifdef MTP_HOST
+int MtpDataPacket::read(struct usb_request *request) {
+    // first read the header
+    request->buffer = mBuffer;
+    request->buffer_length = mBufferSize;
+    int length = transfer(request);
+    if (length >= MTP_CONTAINER_HEADER_SIZE) {
+        // look at the length field to see if the data spans multiple packets
+        uint32_t totalLength = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+        while (totalLength > length) {
+            allocate(length + mAllocationIncrement);
+            request->buffer = mBuffer + length;
+            request->buffer_length = mAllocationIncrement;
+            int ret = transfer(request);
+            if (ret >= 0)
+                length += ret;
+            else {
+                length = ret;
+                break;
+            }
+        }
+    }
+    if (length >= 0)
+        mPacketSize = length;
+    return length;
+}
+
+int MtpDataPacket::readData(struct usb_request *request, void* buffer, int length) {
+    int read = 0;
+    while (read < length) {
+        request->buffer = (char *)buffer + read;
+        request->buffer_length = length - read;
+        int ret = transfer(request);
+        if (ret < 0) {
+            return ret;
+        }
+        read += ret;
+    }
+    return read;
+}
+
+// Queue a read request.  Call readDataWait to wait for result
+int MtpDataPacket::readDataAsync(struct usb_request *req) {
+    if (usb_request_queue(req)) {
+        LOGE("usb_endpoint_queue failed, errno: %d", errno);
+        return -1;
+    }
+    return 0;
+}
+
+// Wait for result of readDataAsync
+int MtpDataPacket::readDataWait(struct usb_device *device) {
+    struct usb_request *req = usb_request_wait(device);
+    return (req ? req->actual_length : -1);
+}
+
+int MtpDataPacket::readDataHeader(struct usb_request *request) {
+    request->buffer = mBuffer;
+    request->buffer_length = request->max_packet_size;
+    int length = transfer(request);
+    if (length >= 0)
+        mPacketSize = length;
+    return length;
+}
+
+int MtpDataPacket::writeDataHeader(struct usb_request *request, uint32_t length) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+    request->buffer = mBuffer;
+    request->buffer_length = MTP_CONTAINER_HEADER_SIZE;
+    int ret = transfer(request);
+    return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_request *request) {
+    MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+
+    // send header separately from data
+    request->buffer = mBuffer;
+    request->buffer_length = MTP_CONTAINER_HEADER_SIZE;
+    int ret = transfer(request);
+    if (ret == MTP_CONTAINER_HEADER_SIZE) {
+        request->buffer = mBuffer + MTP_CONTAINER_HEADER_SIZE;
+        request->buffer_length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+        ret = transfer(request);
+    }
+    return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_request *request, void* buffer, uint32_t length) {
+    request->buffer = buffer;
+    request->buffer_length = length;
+    int ret = transfer(request);
+    return (ret < 0 ? ret : 0);
+}
+
+#endif // MTP_HOST
+
+void* MtpDataPacket::getData(int& outLength) const {
+    int length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+    if (length > 0) {
+        void* result = malloc(length);
+        if (result) {
+            memcpy(result, mBuffer + MTP_CONTAINER_HEADER_SIZE, length);
+            outLength = length;
+            return result;
+        }
+    }
+    outLength = 0;
+    return NULL;
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
new file mode 100644
index 0000000..577cea1
--- /dev/null
+++ b/media/mtp/MtpDataPacket.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATA_PACKET_H
+#define _MTP_DATA_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+struct usb_device;
+struct usb_request;
+
+namespace android {
+
+class MtpStringBuffer;
+
+class MtpDataPacket : public MtpPacket {
+private:
+    // current offset for get/put methods
+    int                 mOffset;
+
+public:
+                        MtpDataPacket();
+    virtual             ~MtpDataPacket();
+
+    virtual void        reset();
+
+    void                setOperationCode(MtpOperationCode code);
+    void                setTransactionID(MtpTransactionID id);
+
+    inline uint8_t      getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
+    inline int8_t       getInt8() { return (int8_t)mBuffer[mOffset++]; }
+    uint16_t            getUInt16();
+    inline int16_t      getInt16() { return (int16_t)getUInt16(); }
+    uint32_t            getUInt32();
+    inline int32_t      getInt32() { return (int32_t)getUInt32(); }
+    uint64_t            getUInt64();
+    inline int64_t      getInt64() { return (int64_t)getUInt64(); }
+    void                getUInt128(uint128_t& value);
+    inline void         getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
+    void                getString(MtpStringBuffer& string);
+
+    Int8List*           getAInt8();
+    UInt8List*          getAUInt8();
+    Int16List*          getAInt16();
+    UInt16List*         getAUInt16();
+    Int32List*          getAInt32();
+    UInt32List*         getAUInt32();
+    Int64List*          getAInt64();
+    UInt64List*         getAUInt64();
+
+    void                putInt8(int8_t value);
+    void                putUInt8(uint8_t value);
+    void                putInt16(int16_t value);
+    void                putUInt16(uint16_t value);
+    void                putInt32(int32_t value);
+    void                putUInt32(uint32_t value);
+    void                putInt64(int64_t value);
+    void                putUInt64(uint64_t value);
+    void                putInt128(const int128_t& value);
+    void                putUInt128(const uint128_t& value);
+    void                putInt128(int64_t value);
+    void                putUInt128(uint64_t value);
+
+    void                putAInt8(const int8_t* values, int count);
+    void                putAUInt8(const uint8_t* values, int count);
+    void                putAInt16(const int16_t* values, int count);
+    void                putAUInt16(const uint16_t* values, int count);
+    void                putAUInt16(const UInt16List* values);
+    void                putAInt32(const int32_t* values, int count);
+    void                putAUInt32(const uint32_t* values, int count);
+    void                putAUInt32(const UInt32List* list);
+    void                putAInt64(const int64_t* values, int count);
+    void                putAUInt64(const uint64_t* values, int count);
+    void                putString(const MtpStringBuffer& string);
+    void                putString(const char* string);
+    void                putString(const uint16_t* string);
+    inline void         putEmptyString() { putUInt8(0); }
+    inline void         putEmptyArray() { putUInt32(0); }
+
+
+#ifdef MTP_DEVICE
+    // fill our buffer with data from the given file descriptor
+    int                 read(int fd);
+    int                 readDataHeader(int fd);
+
+    // write our data to the given file descriptor
+    int                 write(int fd);
+    int                 writeDataHeader(int fd, uint32_t length);
+#endif
+
+#ifdef MTP_HOST
+    int                 read(struct usb_request *request);
+    int                 readData(struct usb_request *request, void* buffer, int length);
+    int                 readDataAsync(struct usb_request *req);
+    int                 readDataWait(struct usb_device *device);
+    int                 readDataHeader(struct usb_request *ep);
+
+    int                 writeDataHeader(struct usb_request *ep, uint32_t length);
+    int                 write(struct usb_request *ep);
+    int                 write(struct usb_request *ep, void* buffer, uint32_t length);
+#endif
+
+    inline bool         hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; }
+    inline uint32_t     getContainerLength() const { return MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET); }
+    void*               getData(int& outLength) const;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATA_PACKET_H
diff --git a/media/mtp/MtpDatabase.h b/media/mtp/MtpDatabase.h
new file mode 100644
index 0000000..6dcb931
--- /dev/null
+++ b/media/mtp/MtpDatabase.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATABASE_H
+#define _MTP_DATABASE_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+class MtpProperty;
+
+class MtpDatabase {
+public:
+    virtual ~MtpDatabase() {}
+
+    // called from SendObjectInfo to reserve a database entry for the incoming file
+    virtual MtpObjectHandle         beginSendObject(const char* path,
+                                            MtpObjectFormat format,
+                                            MtpObjectHandle parent,
+                                            MtpStorageID storage,
+                                            uint64_t size,
+                                            time_t modified) = 0;
+
+    // called to report success or failure of the SendObject file transfer
+    // success should signal a notification of the new object's creation,
+    // failure should remove the database entry created in beginSendObject
+    virtual void                    endSendObject(const char* path,
+                                            MtpObjectHandle handle,
+                                            MtpObjectFormat format,
+                                            int64_t size,
+                                            bool succeeded) = 0;
+
+    virtual MtpObjectHandleList*    getObjectList(MtpStorageID storageID,
+                                            MtpObjectFormat format,
+                                            MtpObjectHandle parent) = 0;
+
+    virtual int                     getNumObjects(MtpStorageID storageID,
+                                            MtpObjectFormat format,
+                                            MtpObjectHandle parent) = 0;
+
+    // callee should delete[] the results from these
+    // results can be NULL
+    virtual MtpObjectFormatList*    getSupportedPlaybackFormats() = 0;
+    virtual MtpObjectFormatList*    getSupportedCaptureFormats() = 0;
+    virtual MtpObjectPropertyList*  getSupportedObjectProperties(MtpObjectFormat format) = 0;
+    virtual MtpDevicePropertyList*  getSupportedDeviceProperties() = 0;
+
+    virtual MtpResponseCode         getObjectPropertyValue(MtpObjectHandle handle,
+                                            MtpObjectProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         setObjectPropertyValue(MtpObjectHandle handle,
+                                            MtpObjectProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         getDevicePropertyValue(MtpDeviceProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         setDevicePropertyValue(MtpDeviceProperty property,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         resetDeviceProperty(MtpDeviceProperty property) = 0;
+
+    virtual MtpResponseCode         getObjectPropertyList(MtpObjectHandle handle,
+                                            uint32_t format, uint32_t property,
+                                            int groupCode, int depth,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         getObjectInfo(MtpObjectHandle handle,
+                                            MtpDataPacket& packet) = 0;
+
+    virtual MtpResponseCode         getObjectFilePath(MtpObjectHandle handle,
+                                            MtpString& outFilePath,
+                                            int64_t& outFileLength,
+                                            MtpObjectFormat& outFormat) = 0;
+
+    virtual MtpResponseCode         deleteFile(MtpObjectHandle handle) = 0;
+
+    virtual MtpObjectHandleList*    getObjectReferences(MtpObjectHandle handle) = 0;
+
+    virtual MtpResponseCode         setObjectReferences(MtpObjectHandle handle,
+                                            MtpObjectHandleList* references) = 0;
+
+    virtual MtpProperty*            getObjectPropertyDesc(MtpObjectProperty property,
+                                            MtpObjectFormat format) = 0;
+
+    virtual MtpProperty*            getDevicePropertyDesc(MtpDeviceProperty property) = 0;
+
+    virtual void                    sessionStarted() = 0;
+
+    virtual void                    sessionEnded() = 0;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATABASE_H
diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp
new file mode 100644
index 0000000..1668ecf
--- /dev/null
+++ b/media/mtp/MtpDebug.cpp
@@ -0,0 +1,396 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MtpDebug.h"
+
+namespace android {
+
+struct CodeEntry {
+    const char* name;
+    uint16_t code;
+};
+
+static const CodeEntry sOperationCodes[] = {
+    { "MTP_OPERATION_GET_DEVICE_INFO",              0x1001 },
+    { "MTP_OPERATION_OPEN_SESSION",                 0x1002 },
+    { "MTP_OPERATION_CLOSE_SESSION",                0x1003 },
+    { "MTP_OPERATION_GET_STORAGE_IDS",              0x1004 },
+    { "MTP_OPERATION_GET_STORAGE_INFO",             0x1005 },
+    { "MTP_OPERATION_GET_NUM_OBJECTS",              0x1006 },
+    { "MTP_OPERATION_GET_OBJECT_HANDLES",           0x1007 },
+    { "MTP_OPERATION_GET_OBJECT_INFO",              0x1008 },
+    { "MTP_OPERATION_GET_OBJECT",                   0x1009 },
+    { "MTP_OPERATION_GET_THUMB",                    0x100A },
+    { "MTP_OPERATION_DELETE_OBJECT",                0x100B },
+    { "MTP_OPERATION_SEND_OBJECT_INFO",             0x100C },
+    { "MTP_OPERATION_SEND_OBJECT",                  0x100D },
+    { "MTP_OPERATION_INITIATE_CAPTURE",             0x100E },
+    { "MTP_OPERATION_FORMAT_STORE",                 0x100F },
+    { "MTP_OPERATION_RESET_DEVICE",                 0x1010 },
+    { "MTP_OPERATION_SELF_TEST",                    0x1011 },
+    { "MTP_OPERATION_SET_OBJECT_PROTECTION",        0x1012 },
+    { "MTP_OPERATION_POWER_DOWN",                   0x1013 },
+    { "MTP_OPERATION_GET_DEVICE_PROP_DESC",         0x1014 },
+    { "MTP_OPERATION_GET_DEVICE_PROP_VALUE",        0x1015 },
+    { "MTP_OPERATION_SET_DEVICE_PROP_VALUE",        0x1016 },
+    { "MTP_OPERATION_RESET_DEVICE_PROP_VALUE",      0x1017 },
+    { "MTP_OPERATION_TERMINATE_OPEN_CAPTURE",       0x1018 },
+    { "MTP_OPERATION_MOVE_OBJECT",                  0x1019 },
+    { "MTP_OPERATION_COPY_OBJECT",                  0x101A },
+    { "MTP_OPERATION_GET_PARTIAL_OBJECT",           0x101B },
+    { "MTP_OPERATION_INITIATE_OPEN_CAPTURE",        0x101C },
+    { "MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED",   0x9801 },
+    { "MTP_OPERATION_GET_OBJECT_PROP_DESC",         0x9802 },
+    { "MTP_OPERATION_GET_OBJECT_PROP_VALUE",        0x9803 },
+    { "MTP_OPERATION_SET_OBJECT_PROP_VALUE",        0x9804 },
+    { "MTP_OPERATION_GET_OBJECT_PROP_LIST",         0x9805 },
+    { "MTP_OPERATION_SET_OBJECT_PROP_LIST",         0x9806 },
+    { "MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC", 0x9807 },
+    { "MTP_OPERATION_SEND_OBJECT_PROP_LIST",        0x9808 },
+    { "MTP_OPERATION_GET_OBJECT_REFERENCES",        0x9810 },
+    { "MTP_OPERATION_SET_OBJECT_REFERENCES",        0x9811 },
+    { "MTP_OPERATION_SKIP",                         0x9820 },
+    { 0,                                            0      },
+};
+
+static const CodeEntry sFormatCodes[] = {
+    { "MTP_FORMAT_UNDEFINED",                       0x3000 },
+    { "MTP_FORMAT_ASSOCIATION",                     0x3001 },
+    { "MTP_FORMAT_SCRIPT",                          0x3002 },
+    { "MTP_FORMAT_EXECUTABLE",                      0x3003 },
+    { "MTP_FORMAT_TEXT",                            0x3004 },
+    { "MTP_FORMAT_HTML",                            0x3005 },
+    { "MTP_FORMAT_DPOF",                            0x3006 },
+    { "MTP_FORMAT_AIFF",                            0x3007 },
+    { "MTP_FORMAT_WAV",                             0x3008 },
+    { "MTP_FORMAT_MP3",                             0x3009 },
+    { "MTP_FORMAT_AVI",                             0x300A },
+    { "MTP_FORMAT_MPEG",                            0x300B },
+    { "MTP_FORMAT_ASF",                             0x300C },
+    { "MTP_FORMAT_DEFINED",                         0x3800 },
+    { "MTP_FORMAT_EXIF_JPEG",                       0x3801 },
+    { "MTP_FORMAT_TIFF_EP",                         0x3802 },
+    { "MTP_FORMAT_FLASHPIX",                        0x3803 },
+    { "MTP_FORMAT_BMP",                             0x3804 },
+    { "MTP_FORMAT_CIFF",                            0x3805 },
+    { "MTP_FORMAT_GIF",                             0x3807 },
+    { "MTP_FORMAT_JFIF",                            0x3808 },
+    { "MTP_FORMAT_CD",                              0x3809 },
+    { "MTP_FORMAT_PICT",                            0x380A },
+    { "MTP_FORMAT_PNG",                             0x380B },
+    { "MTP_FORMAT_TIFF",                            0x380D },
+    { "MTP_FORMAT_TIFF_IT",                         0x380E },
+    { "MTP_FORMAT_JP2",                             0x380F },
+    { "MTP_FORMAT_JPX",                             0x3810 },
+    { "MTP_FORMAT_UNDEFINED_FIRMWARE",              0xB802 },
+    { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT",            0xB881 },
+    { "MTP_FORMAT_UNDEFINED_AUDIO",                 0xB900 },
+    { "MTP_FORMAT_WMA",                             0xB901 },
+    { "MTP_FORMAT_OGG",                             0xB902 },
+    { "MTP_FORMAT_AAC",                             0xB903 },
+    { "MTP_FORMAT_AUDIBLE",                         0xB904 },
+    { "MTP_FORMAT_FLAC",                            0xB906 },
+    { "MTP_FORMAT_UNDEFINED_VIDEO",                 0xB980 },
+    { "MTP_FORMAT_WMV",                             0xB981 },
+    { "MTP_FORMAT_MP4_CONTAINER",                   0xB982 },
+    { "MTP_FORMAT_MP2",                             0xB983 },
+    { "MTP_FORMAT_3GP_CONTAINER",                   0xB984 },
+    { "MTP_FORMAT_UNDEFINED_COLLECTION",            0xBA00 },
+    { "MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM",       0xBA01 },
+    { "MTP_FORMAT_ABSTRACT_IMAGE_ALBUM",            0xBA02 },
+    { "MTP_FORMAT_ABSTRACT_AUDIO_ALBUM",            0xBA03 },
+    { "MTP_FORMAT_ABSTRACT_VIDEO_ALBUM",            0xBA04 },
+    { "MTP_FORMAT_ABSTRACT_AV_PLAYLIST",            0xBA05 },
+    { "MTP_FORMAT_ABSTRACT_CONTACT_GROUP",          0xBA06 },
+    { "MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER",         0xBA07 },
+    { "MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION",   0xBA08 },
+    { "MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST",         0xBA09 },
+    { "MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST",         0xBA0A },
+    { "MTP_FORMAT_ABSTRACT_MEDIACAST",              0xBA0B },
+    { "MTP_FORMAT_WPL_PLAYLIST",                    0xBA10 },
+    { "MTP_FORMAT_M3U_PLAYLIST",                    0xBA11 },
+    { "MTP_FORMAT_MPL_PLAYLIST",                    0xBA12 },
+    { "MTP_FORMAT_ASX_PLAYLIST",                    0xBA13 },
+    { "MTP_FORMAT_PLS_PLAYLIST",                    0xBA14 },
+    { "MTP_FORMAT_UNDEFINED_DOCUMENT",              0xBA80 },
+    { "MTP_FORMAT_ABSTRACT_DOCUMENT",               0xBA81 },
+    { "MTP_FORMAT_XML_DOCUMENT",                    0xBA82 },
+    { "MTP_FORMAT_MS_WORD_DOCUMENT",                0xBA83 },
+    { "MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT",      0xBA84 },
+    { "MTP_FORMAT_MS_EXCEL_SPREADSHEET",            0xBA85 },
+    { "MTP_FORMAT_MS_POWERPOINT_PRESENTATION",      0xBA86 },
+    { "MTP_FORMAT_UNDEFINED_MESSAGE",               0xBB00 },
+    { "MTP_FORMAT_ABSTRACT_MESSSAGE",               0xBB01 },
+    { "MTP_FORMAT_UNDEFINED_CONTACT",               0xBB80 },
+    { "MTP_FORMAT_ABSTRACT_CONTACT",                0xBB81 },
+    { "MTP_FORMAT_VCARD_2",                         0xBB82 },
+    { 0,                                            0      },
+};
+
+static const CodeEntry sObjectPropCodes[] = {
+    { "MTP_PROPERTY_STORAGE_ID",                             0xDC01 },
+    { "MTP_PROPERTY_OBJECT_FORMAT",                          0xDC02 },
+    { "MTP_PROPERTY_PROTECTION_STATUS",                      0xDC03 },
+    { "MTP_PROPERTY_OBJECT_SIZE",                            0xDC04 },
+    { "MTP_PROPERTY_ASSOCIATION_TYPE",                       0xDC05 },
+    { "MTP_PROPERTY_ASSOCIATION_DESC",                       0xDC06 },
+    { "MTP_PROPERTY_OBJECT_FILE_NAME",                       0xDC07 },
+    { "MTP_PROPERTY_DATE_CREATED",                           0xDC08 },
+    { "MTP_PROPERTY_DATE_MODIFIED",                          0xDC09 },
+    { "MTP_PROPERTY_KEYWORDS",                               0xDC0A },
+    { "MTP_PROPERTY_PARENT_OBJECT",                          0xDC0B },
+    { "MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS",                0xDC0C },
+    { "MTP_PROPERTY_HIDDEN",                                 0xDC0D },
+    { "MTP_PROPERTY_SYSTEM_OBJECT",                          0xDC0E },
+    { "MTP_PROPERTY_PERSISTENT_UID",                         0xDC41 },
+    { "MTP_PROPERTY_SYNC_ID",                                0xDC42 },
+    { "MTP_PROPERTY_PROPERTY_BAG",                           0xDC43 },
+    { "MTP_PROPERTY_NAME",                                   0xDC44 },
+    { "MTP_PROPERTY_CREATED_BY",                             0xDC45 },
+    { "MTP_PROPERTY_ARTIST",                                 0xDC46 },
+    { "MTP_PROPERTY_DATE_AUTHORED",                          0xDC47 },
+    { "MTP_PROPERTY_DESCRIPTION",                            0xDC48 },
+    { "MTP_PROPERTY_URL_REFERENCE",                          0xDC49 },
+    { "MTP_PROPERTY_LANGUAGE_LOCALE",                        0xDC4A },
+    { "MTP_PROPERTY_COPYRIGHT_INFORMATION",                  0xDC4B },
+    { "MTP_PROPERTY_SOURCE",                                 0xDC4C },
+    { "MTP_PROPERTY_ORIGIN_LOCATION",                        0xDC4D },
+    { "MTP_PROPERTY_DATE_ADDED",                             0xDC4E },
+    { "MTP_PROPERTY_NON_CONSUMABLE",                         0xDC4F },
+    { "MTP_PROPERTY_CORRUPT_UNPLAYABLE",                     0xDC50 },
+    { "MTP_PROPERTY_PRODUCER_SERIAL_NUMBER",                 0xDC51 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT",           0xDC81 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE",             0xDC82 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT",           0xDC83 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH",            0xDC84 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION",         0xDC85 },
+    { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA",             0xDC86 },
+    { "MTP_PROPERTY_WIDTH",                                  0xDC87 },
+    { "MTP_PROPERTY_HEIGHT",                                 0xDC88 },
+    { "MTP_PROPERTY_DURATION",                               0xDC89 },
+    { "MTP_PROPERTY_RATING",                                 0xDC8A },
+    { "MTP_PROPERTY_TRACK",                                  0xDC8B },
+    { "MTP_PROPERTY_GENRE",                                  0xDC8C },
+    { "MTP_PROPERTY_CREDITS",                                0xDC8D },
+    { "MTP_PROPERTY_LYRICS",                                 0xDC8E },
+    { "MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID",                0xDC8F },
+    { "MTP_PROPERTY_PRODUCED_BY",                            0xDC90 },
+    { "MTP_PROPERTY_USE_COUNT",                              0xDC91 },
+    { "MTP_PROPERTY_SKIP_COUNT",                             0xDC92 },
+    { "MTP_PROPERTY_LAST_ACCESSED",                          0xDC93 },
+    { "MTP_PROPERTY_PARENTAL_RATING",                        0xDC94 },
+    { "MTP_PROPERTY_META_GENRE",                             0xDC95 },
+    { "MTP_PROPERTY_COMPOSER",                               0xDC96 },
+    { "MTP_PROPERTY_EFFECTIVE_RATING",                       0xDC97 },
+    { "MTP_PROPERTY_SUBTITLE",                               0xDC98 },
+    { "MTP_PROPERTY_ORIGINAL_RELEASE_DATE",                  0xDC99 },
+    { "MTP_PROPERTY_ALBUM_NAME",                             0xDC9A },
+    { "MTP_PROPERTY_ALBUM_ARTIST",                           0xDC9B },
+    { "MTP_PROPERTY_MOOD",                                   0xDC9C },
+    { "MTP_PROPERTY_DRM_STATUS",                             0xDC9D },
+    { "MTP_PROPERTY_SUB_DESCRIPTION",                        0xDC9E },
+    { "MTP_PROPERTY_IS_CROPPED",                             0xDCD1 },
+    { "MTP_PROPERTY_IS_COLOUR_CORRECTED",                    0xDCD2 },
+    { "MTP_PROPERTY_IMAGE_BIT_DEPTH",                        0xDCD3 },
+    { "MTP_PROPERTY_F_NUMBER",                               0xDCD4 },
+    { "MTP_PROPERTY_EXPOSURE_TIME",                          0xDCD5 },
+    { "MTP_PROPERTY_EXPOSURE_INDEX",                         0xDCD6 },
+    { "MTP_PROPERTY_TOTAL_BITRATE",                          0xDE91 },
+    { "MTP_PROPERTY_BITRATE_TYPE",                           0xDE92 },
+    { "MTP_PROPERTY_SAMPLE_RATE",                            0xDE93 },
+    { "MTP_PROPERTY_NUMBER_OF_CHANNELS",                     0xDE94 },
+    { "MTP_PROPERTY_AUDIO_BIT_DEPTH",                        0xDE95 },
+    { "MTP_PROPERTY_SCAN_TYPE",                              0xDE97 },
+    { "MTP_PROPERTY_AUDIO_WAVE_CODEC",                       0xDE99 },
+    { "MTP_PROPERTY_AUDIO_BITRATE",                          0xDE9A },
+    { "MTP_PROPERTY_VIDEO_FOURCC_CODEC",                     0xDE9B },
+    { "MTP_PROPERTY_VIDEO_BITRATE",                          0xDE9C },
+    { "MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS",            0xDE9D },
+    { "MTP_PROPERTY_KEYFRAME_DISTANCE",                      0xDE9E },
+    { "MTP_PROPERTY_BUFFER_SIZE",                            0xDE9F },
+    { "MTP_PROPERTY_ENCODING_QUALITY",                       0xDEA0 },
+    { "MTP_PROPERTY_ENCODING_PROFILE",                       0xDEA1 },
+    { "MTP_PROPERTY_DISPLAY_NAME",                           0xDCE0 },
+    { "MTP_PROPERTY_BODY_TEXT",                              0xDCE1 },
+    { "MTP_PROPERTY_SUBJECT",                                0xDCE2 },
+    { "MTP_PROPERTY_PRIORITY",                               0xDCE3 },
+    { "MTP_PROPERTY_GIVEN_NAME",                             0xDD00 },
+    { "MTP_PROPERTY_MIDDLE_NAMES",                           0xDD01 },
+    { "MTP_PROPERTY_FAMILY_NAME",                            0xDD02 },
+    { "MTP_PROPERTY_PREFIX",                                 0xDD03 },
+    { "MTP_PROPERTY_SUFFIX",                                 0xDD04 },
+    { "MTP_PROPERTY_PHONETIC_GIVEN_NAME",                    0xDD05 },
+    { "MTP_PROPERTY_PHONETIC_FAMILY_NAME",                   0xDD06 },
+    { "MTP_PROPERTY_EMAIL_PRIMARY",                          0xDD07 },
+    { "MTP_PROPERTY_EMAIL_PERSONAL_1",                       0xDD08 },
+    { "MTP_PROPERTY_EMAIL_PERSONAL_2",                       0xDD09 },
+    { "MTP_PROPERTY_EMAIL_BUSINESS_1",                       0xDD0A },
+    { "MTP_PROPERTY_EMAIL_BUSINESS_2",                       0xDD0B },
+    { "MTP_PROPERTY_EMAIL_OTHERS",                           0xDD0C },
+    { "MTP_PROPERTY_PHONE_NUMBER_PRIMARY",                   0xDD0D },
+    { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL",                  0xDD0E },
+    { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2",                0xDD0F },
+    { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS",                  0xDD10 },
+    { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2",                0xDD11 },
+    { "MTP_PROPERTY_PHONE_NUMBER_MOBILE",                    0xDD12 },
+    { "MTP_PROPERTY_PHONE_NUMBER_MOBILE_2",                  0xDD13 },
+    { "MTP_PROPERTY_FAX_NUMBER_PRIMARY",                     0xDD14 },
+    { "MTP_PROPERTY_FAX_NUMBER_PERSONAL",                    0xDD15 },
+    { "MTP_PROPERTY_FAX_NUMBER_BUSINESS",                    0xDD16 },
+    { "MTP_PROPERTY_PAGER_NUMBER",                           0xDD17 },
+    { "MTP_PROPERTY_PHONE_NUMBER_OTHERS",                    0xDD18 },
+    { "MTP_PROPERTY_PRIMARY_WEB_ADDRESS",                    0xDD19 },
+    { "MTP_PROPERTY_PERSONAL_WEB_ADDRESS",                   0xDD1A },
+    { "MTP_PROPERTY_BUSINESS_WEB_ADDRESS",                   0xDD1B },
+    { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS",              0xDD1C },
+    { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2",            0xDD1D },
+    { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3",            0xDD1E },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL",           0xDD1F },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1",         0xDD20 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2",         0xDD21 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY",           0xDD22 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION",         0xDD23 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE",    0xDD24 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY",        0xDD25 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL",           0xDD26 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1",         0xDD27 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2",         0xDD28 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY",           0xDD29 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION",         0xDD2A },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE",    0xDD2B },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY",        0xDD2C },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL",              0xDD2D },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1",            0xDD2E },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2",            0xDD2F },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY",              0xDD30 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION",            0xDD31 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE",       0xDD32 },
+    { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY",           0xDD33 },
+    { "MTP_PROPERTY_ORGANIZATION_NAME",                      0xDD34 },
+    { "MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME",             0xDD35 },
+    { "MTP_PROPERTY_ROLE",                                   0xDD36 },
+    { "MTP_PROPERTY_BIRTHDATE",                              0xDD37 },
+    { "MTP_PROPERTY_MESSAGE_TO",                             0xDD40 },
+    { "MTP_PROPERTY_MESSAGE_CC",                             0xDD41 },
+    { "MTP_PROPERTY_MESSAGE_BCC",                            0xDD42 },
+    { "MTP_PROPERTY_MESSAGE_READ",                           0xDD43 },
+    { "MTP_PROPERTY_MESSAGE_RECEIVED_TIME",                  0xDD44 },
+    { "MTP_PROPERTY_MESSAGE_SENDER",                         0xDD45 },
+    { "MTP_PROPERTY_ACTIVITY_BEGIN_TIME",                    0xDD50 },
+    { "MTP_PROPERTY_ACTIVITY_END_TIME",                      0xDD51 },
+    { "MTP_PROPERTY_ACTIVITY_LOCATION",                      0xDD52 },
+    { "MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES",            0xDD54 },
+    { "MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES",            0xDD55 },
+    { "MTP_PROPERTY_ACTIVITY_RESOURCES",                     0xDD56 },
+    { "MTP_PROPERTY_ACTIVITY_ACCEPTED",                      0xDD57 },
+    { "MTP_PROPERTY_ACTIVITY_TENTATIVE",                     0xDD58 },
+    { "MTP_PROPERTY_ACTIVITY_DECLINED",                      0xDD59 },
+    { "MTP_PROPERTY_ACTIVITY_REMAINDER_TIME",                0xDD5A },
+    { "MTP_PROPERTY_ACTIVITY_OWNER",                         0xDD5B },
+    { "MTP_PROPERTY_ACTIVITY_STATUS",                        0xDD5C },
+    { "MTP_PROPERTY_OWNER",                                  0xDD5D },
+    { "MTP_PROPERTY_EDITOR",                                 0xDD5E },
+    { "MTP_PROPERTY_WEBMASTER",                              0xDD5F },
+    { "MTP_PROPERTY_URL_SOURCE",                             0xDD60 },
+    { "MTP_PROPERTY_URL_DESTINATION",                        0xDD61 },
+    { "MTP_PROPERTY_TIME_BOOKMARK",                          0xDD62 },
+    { "MTP_PROPERTY_OBJECT_BOOKMARK",                        0xDD63 },
+    { "MTP_PROPERTY_BYTE_BOOKMARK",                          0xDD64 },
+    { "MTP_PROPERTY_LAST_BUILD_DATE",                        0xDD70 },
+    { "MTP_PROPERTY_TIME_TO_LIVE",                           0xDD71 },
+    { "MTP_PROPERTY_MEDIA_GUID",                             0xDD72 },
+    { 0,                                                     0      },
+};
+
+static const CodeEntry sDevicePropCodes[] = {
+    { "MTP_DEVICE_PROPERTY_UNDEFINED",                       0x5000 },
+    { "MTP_DEVICE_PROPERTY_BATTERY_LEVEL",                   0x5001 },
+    { "MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE",                 0x5002 },
+    { "MTP_DEVICE_PROPERTY_IMAGE_SIZE",                      0x5003 },
+    { "MTP_DEVICE_PROPERTY_COMPRESSION_SETTING",             0x5004 },
+    { "MTP_DEVICE_PROPERTY_WHITE_BALANCE",                   0x5005 },
+    { "MTP_DEVICE_PROPERTY_RGB_GAIN",                        0x5006 },
+    { "MTP_DEVICE_PROPERTY_F_NUMBER",                        0x5007 },
+    { "MTP_DEVICE_PROPERTY_FOCAL_LENGTH",                    0x5008 },
+    { "MTP_DEVICE_PROPERTY_FOCUS_DISTANCE",                  0x5009 },
+    { "MTP_DEVICE_PROPERTY_FOCUS_MODE",                      0x500A },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE",          0x500B },
+    { "MTP_DEVICE_PROPERTY_FLASH_MODE",                      0x500C },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_TIME",                   0x500D },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE",           0x500E },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_INDEX",                  0x500F },
+    { "MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION",      0x5010 },
+    { "MTP_DEVICE_PROPERTY_DATETIME",                        0x5011 },
+    { "MTP_DEVICE_PROPERTY_CAPTURE_DELAY",                   0x5012 },
+    { "MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE",              0x5013 },
+    { "MTP_DEVICE_PROPERTY_CONTRAST",                        0x5014 },
+    { "MTP_DEVICE_PROPERTY_SHARPNESS",                       0x5015 },
+    { "MTP_DEVICE_PROPERTY_DIGITAL_ZOOM",                    0x5016 },
+    { "MTP_DEVICE_PROPERTY_EFFECT_MODE",                     0x5017 },
+    { "MTP_DEVICE_PROPERTY_BURST_NUMBER",                    0x5018 },
+    { "MTP_DEVICE_PROPERTY_BURST_INTERVAL",                  0x5019 },
+    { "MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER",                0x501A },
+    { "MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL",              0x501B },
+    { "MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE",             0x501C },
+    { "MTP_DEVICE_PROPERTY_UPLOAD_URL",                      0x501D },
+    { "MTP_DEVICE_PROPERTY_ARTIST",                          0x501E },
+    { "MTP_DEVICE_PROPERTY_COPYRIGHT_INFO",                  0x501F },
+    { "MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER",         0xD401 },
+    { "MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME",            0xD402 },
+    { "MTP_DEVICE_PROPERTY_VOLUME",                          0xD403 },
+    { "MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED",       0xD404 },
+    { "MTP_DEVICE_PROPERTY_DEVICE_ICON",                     0xD405 },
+    { "MTP_DEVICE_PROPERTY_PLAYBACK_RATE",                   0xD410 },
+    { "MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT",                 0xD411 },
+    { "MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX",        0xD412 },
+    { "MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO",  0xD406 },
+    { "MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE",           0xD407 },
+    { 0,                                                     0      },
+};
+
+static const char* getCodeName(uint16_t code, const CodeEntry* table) {
+    const CodeEntry* entry = table;
+    while (entry->name) {
+        if (entry->code == code)
+            return entry->name;
+        entry++;
+    }
+    return "UNKNOWN";
+}
+
+const char* MtpDebug::getOperationCodeName(MtpOperationCode code) {
+    return getCodeName(code, sOperationCodes);
+}
+
+const char* MtpDebug::getFormatCodeName(MtpObjectFormat code) {
+    if (code == 0)
+        return "NONE";
+    return getCodeName(code, sFormatCodes);
+}
+
+const char* MtpDebug::getObjectPropCodeName(MtpPropertyCode code) {
+    if (code == 0)
+        return "NONE";
+    return getCodeName(code, sObjectPropCodes);
+}
+
+const char* MtpDebug::getDevicePropCodeName(MtpPropertyCode code) {
+    if (code == 0)
+        return "NONE";
+    return getCodeName(code, sDevicePropCodes);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDebug.h b/media/mtp/MtpDebug.h
new file mode 100644
index 0000000..5b53e31
--- /dev/null
+++ b/media/mtp/MtpDebug.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEBUG_H
+#define _MTP_DEBUG_H
+
+// #define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDebug {
+public:
+    static const char* getOperationCodeName(MtpOperationCode code);
+    static const char* getFormatCodeName(MtpObjectFormat code);
+    static const char* getObjectPropCodeName(MtpPropertyCode code);
+    static const char* getDevicePropCodeName(MtpPropertyCode code);
+};
+
+}; // namespace android
+
+#endif // _MTP_DEBUG_H
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
new file mode 100644
index 0000000..d22c72f
--- /dev/null
+++ b/media/mtp/MtpDevice.cpp
@@ -0,0 +1,591 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDevice"
+
+#include "MtpDebug.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpProperty.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <endian.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpDevice::MtpDevice(struct usb_device* device, int interface,
+            const struct usb_endpoint_descriptor *ep_in,
+            const struct usb_endpoint_descriptor *ep_out,
+            const struct usb_endpoint_descriptor *ep_intr)
+    :   mDevice(device),
+        mInterface(interface),
+        mRequestIn1(NULL),
+        mRequestIn2(NULL),
+        mRequestOut(NULL),
+        mRequestIntr(NULL),
+        mDeviceInfo(NULL),
+        mID(usb_device_get_unique_id(device)),
+        mSessionID(0),
+        mTransactionID(0),
+        mReceivedResponse(false)
+{
+    mRequestIn1 = usb_request_new(device, ep_in);
+    mRequestIn2 = usb_request_new(device, ep_in);
+    mRequestOut = usb_request_new(device, ep_out);
+    mRequestIntr = usb_request_new(device, ep_intr);
+}
+
+MtpDevice::~MtpDevice() {
+    close();
+    for (int i = 0; i < mDeviceProperties.size(); i++)
+        delete mDeviceProperties[i];
+    usb_request_free(mRequestIn1);
+    usb_request_free(mRequestIn2);
+    usb_request_free(mRequestOut);
+    usb_request_free(mRequestIntr);
+}
+
+void MtpDevice::initialize() {
+    openSession();
+    mDeviceInfo = getDeviceInfo();
+    if (mDeviceInfo) {
+        if (mDeviceInfo->mDeviceProperties) {
+            int count = mDeviceInfo->mDeviceProperties->size();
+            for (int i = 0; i < count; i++) {
+                MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+                MtpProperty* property = getDevicePropDesc(propCode);
+                if (property)
+                    mDeviceProperties.push(property);
+            }
+        }
+    }
+}
+
+void MtpDevice::close() {
+    if (mDevice) {
+        usb_device_release_interface(mDevice, mInterface);
+        usb_device_close(mDevice);
+        mDevice = NULL;
+    }
+}
+
+void MtpDevice::print() {
+    if (mDeviceInfo) {
+        mDeviceInfo->print();
+
+        if (mDeviceInfo->mDeviceProperties) {
+            LOGI("***** DEVICE PROPERTIES *****\n");
+            int count = mDeviceInfo->mDeviceProperties->size();
+            for (int i = 0; i < count; i++) {
+                MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+                MtpProperty* property = getDevicePropDesc(propCode);
+                if (property) {
+                    property->print();
+                }
+            }
+        }
+    }
+
+    if (mDeviceInfo->mPlaybackFormats) {
+            LOGI("***** OBJECT PROPERTIES *****\n");
+        int count = mDeviceInfo->mPlaybackFormats->size();
+        for (int i = 0; i < count; i++) {
+            MtpObjectFormat format = (*mDeviceInfo->mPlaybackFormats)[i];
+            LOGI("*** FORMAT: %s\n", MtpDebug::getFormatCodeName(format));
+            MtpObjectPropertyList* props = getObjectPropsSupported(format);
+            if (props) {
+                for (int j = 0; j < props->size(); j++) {
+                    MtpObjectProperty prop = (*props)[j];
+                    MtpProperty* property = getObjectPropDesc(prop, format);
+                    if (property)
+                        property->print();
+                    else
+                        LOGE("could not fetch property: %s",
+                                MtpDebug::getObjectPropCodeName(prop));
+                }
+            }
+        }
+    }
+}
+
+const char* MtpDevice::getDeviceName() {
+    if (mDevice)
+        return usb_device_get_name(mDevice);
+    else
+        return "???";
+}
+
+bool MtpDevice::openSession() {
+    Mutex::Autolock autoLock(mMutex);
+
+    mSessionID = 0;
+    mTransactionID = 0;
+    MtpSessionID newSession = 1;
+    mRequest.reset();
+    mRequest.setParameter(1, newSession);
+    if (!sendRequest(MTP_OPERATION_OPEN_SESSION))
+        return false;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_SESSION_ALREADY_OPEN)
+        newSession = mResponse.getParameter(1);
+    else if (ret != MTP_RESPONSE_OK)
+        return false;
+
+    mSessionID = newSession;
+    mTransactionID = 1;
+    return true;
+}
+
+bool MtpDevice::closeSession() {
+    // FIXME
+    return true;
+}
+
+MtpDeviceInfo* MtpDevice::getDeviceInfo() {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpDeviceInfo* info = new MtpDeviceInfo;
+        info->read(mData);
+        return info;
+    }
+    return NULL;
+}
+
+MtpStorageIDList* MtpDevice::getStorageIDs() {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        return mData.getAUInt32();
+    }
+    return NULL;
+}
+
+MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, storageID);
+    if (!sendRequest(MTP_OPERATION_GET_STORAGE_INFO))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpStorageInfo* info = new MtpStorageInfo(storageID);
+        info->read(mData);
+        return info;
+    }
+    return NULL;
+}
+
+MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID,
+            MtpObjectFormat format, MtpObjectHandle parent) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, storageID);
+    mRequest.setParameter(2, format);
+    mRequest.setParameter(3, parent);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_HANDLES))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        return mData.getAUInt32();
+    }
+    return NULL;
+}
+
+MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
+    Mutex::Autolock autoLock(mMutex);
+
+    // FIXME - we might want to add some caching here
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_INFO))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpObjectInfo* info = new MtpObjectInfo(handle);
+        info->read(mData);
+        return info;
+    }
+    return NULL;
+}
+
+void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_GET_THUMB) && readData()) {
+        MtpResponseCode ret = readResponse();
+        if (ret == MTP_RESPONSE_OK) {
+            return mData.getData(outLength);
+        }
+    }
+    outLength = 0;
+    return NULL;
+}
+
+MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    MtpObjectHandle parent = info->mParent;
+    if (parent == 0)
+        parent = MTP_PARENT_ROOT;
+
+    mRequest.setParameter(1, info->mStorageID);
+    mRequest.setParameter(2, info->mParent);
+
+    mData.putUInt32(info->mStorageID);
+    mData.putUInt16(info->mFormat);
+    mData.putUInt16(info->mProtectionStatus);
+    mData.putUInt32(info->mCompressedSize);
+    mData.putUInt16(info->mThumbFormat);
+    mData.putUInt32(info->mThumbCompressedSize);
+    mData.putUInt32(info->mThumbPixWidth);
+    mData.putUInt32(info->mThumbPixHeight);
+    mData.putUInt32(info->mImagePixWidth);
+    mData.putUInt32(info->mImagePixHeight);
+    mData.putUInt32(info->mImagePixDepth);
+    mData.putUInt32(info->mParent);
+    mData.putUInt16(info->mAssociationType);
+    mData.putUInt32(info->mAssociationDesc);
+    mData.putUInt32(info->mSequenceNumber);
+    mData.putString(info->mName);
+
+    char created[100], modified[100];
+    formatDateTime(info->mDateCreated, created, sizeof(created));
+    formatDateTime(info->mDateModified, modified, sizeof(modified));
+
+    mData.putString(created);
+    mData.putString(modified);
+    if (info->mKeywords)
+        mData.putString(info->mKeywords);
+    else
+        mData.putEmptyString();
+
+   if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) {
+        MtpResponseCode ret = readResponse();
+        if (ret == MTP_RESPONSE_OK) {
+            info->mStorageID = mResponse.getParameter(1);
+            info->mParent = mResponse.getParameter(2);
+            info->mHandle = mResponse.getParameter(3);
+            return info->mHandle;
+        }
+    }
+    return (MtpObjectHandle)-1;
+}
+
+bool MtpDevice::sendObject(MtpObjectInfo* info, int srcFD) {
+    Mutex::Autolock autoLock(mMutex);
+
+    int remaining = info->mCompressedSize;
+    mRequest.reset();
+    mRequest.setParameter(1, info->mHandle);
+    if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
+        // send data header
+        writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining);
+
+        char buffer[65536];
+        while (remaining > 0) {
+            int count = read(srcFD, buffer, sizeof(buffer));
+            if (count > 0) {
+                int written = mData.write(mRequestOut, buffer, count);
+                // FIXME check error
+                remaining -= count;
+            } else {
+                break;
+            }
+        }
+    }
+    MtpResponseCode ret = readResponse();
+    return (remaining == 0 && ret == MTP_RESPONSE_OK);
+}
+
+bool MtpDevice::deleteObject(MtpObjectHandle handle) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_DELETE_OBJECT)) {
+        MtpResponseCode ret = readResponse();
+        if (ret == MTP_RESPONSE_OK)
+            return true;
+    }
+    return false;
+}
+
+MtpObjectHandle MtpDevice::getParent(MtpObjectHandle handle) {
+    MtpObjectInfo* info = getObjectInfo(handle);
+    if (info)
+        return info->mParent;
+    else
+        return -1;
+}
+
+MtpObjectHandle MtpDevice::getStorageID(MtpObjectHandle handle) {
+    MtpObjectInfo* info = getObjectInfo(handle);
+    if (info)
+        return info->mStorageID;
+    else
+        return -1;
+}
+
+MtpObjectPropertyList* MtpDevice::getObjectPropsSupported(MtpObjectFormat format) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, format);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        return mData.getAUInt16();
+    }
+    return NULL;
+
+}
+
+MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, code);
+    if (!sendRequest(MTP_OPERATION_GET_DEVICE_PROP_DESC))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpProperty* property = new MtpProperty;
+        property->read(mData);
+        return property;
+    }
+    return NULL;
+}
+
+MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format) {
+    Mutex::Autolock autoLock(mMutex);
+
+    mRequest.reset();
+    mRequest.setParameter(1, code);
+    mRequest.setParameter(2, format);
+    if (!sendRequest(MTP_OPERATION_GET_OBJECT_PROP_DESC))
+        return NULL;
+    if (!readData())
+        return NULL;
+    MtpResponseCode ret = readResponse();
+    if (ret == MTP_RESPONSE_OK) {
+        MtpProperty* property = new MtpProperty;
+        property->read(mData);
+        return property;
+    }
+    return NULL;
+}
+
+// reads the object's data and writes it to the specified file path
+bool MtpDevice::readObject(MtpObjectHandle handle, const char* destPath, int group, int perm) {
+    LOGD("readObject: %s", destPath);
+    int fd = ::open(destPath, O_RDWR | O_CREAT | O_TRUNC);
+    if (fd < 0) {
+        LOGE("open failed for %s", destPath);
+        return false;
+    }
+
+    fchown(fd, getuid(), group);
+    // set permissions
+    int mask = umask(0);
+    fchmod(fd, perm);
+    umask(mask);
+
+    Mutex::Autolock autoLock(mMutex);
+    bool result = false;
+
+    mRequest.reset();
+    mRequest.setParameter(1, handle);
+    if (sendRequest(MTP_OPERATION_GET_OBJECT)
+            && mData.readDataHeader(mRequestIn1)) {
+        uint32_t length = mData.getContainerLength();
+        if (length < MTP_CONTAINER_HEADER_SIZE)
+            goto fail;
+        length -= MTP_CONTAINER_HEADER_SIZE;
+        uint32_t remaining = length;
+
+        int initialDataLength = 0;
+        void* initialData = mData.getData(initialDataLength);
+        if (initialData) {
+            if (initialDataLength > 0) {
+                if (write(fd, initialData, initialDataLength) != initialDataLength)
+                    goto fail;
+                remaining -= initialDataLength;
+            }
+            free(initialData);
+        }
+
+        // USB reads greater than 16K don't work
+        char buffer1[16384], buffer2[16384];
+        mRequestIn1->buffer = buffer1;
+        mRequestIn2->buffer = buffer2;
+        struct usb_request* req = mRequestIn1;
+        void* writeBuffer = NULL;
+        int writeLength = 0;
+
+        while (remaining > 0 || writeBuffer) {
+            if (remaining > 0) {
+                // queue up a read request
+                req->buffer_length = (remaining > sizeof(buffer1) ? sizeof(buffer1) : remaining);
+                if (mData.readDataAsync(req)) {
+                    LOGE("readDataAsync failed");
+                    goto fail;
+                }
+            } else {
+                req = NULL;
+            }
+
+            if (writeBuffer) {
+                // write previous buffer
+                if (write(fd, writeBuffer, writeLength) != writeLength) {
+                    LOGE("write failed");
+                    // wait for pending read before failing
+                    if (req)
+                        mData.readDataWait(mDevice);
+                    goto fail;
+                }
+                writeBuffer = NULL;
+            }
+
+            // wait for read to complete
+            if (req) {
+                int read = mData.readDataWait(mDevice);
+                if (read < 0)
+                    goto fail;
+
+                writeBuffer = req->buffer;
+                writeLength = read;
+                remaining -= read;
+                req = (req == mRequestIn1 ? mRequestIn2 : mRequestIn1);
+            }
+        }
+
+        MtpResponseCode response = readResponse();
+        if (response == MTP_RESPONSE_OK)
+            result = true;
+    }
+
+fail:
+    ::close(fd);
+    return result;
+}
+
+bool MtpDevice::sendRequest(MtpOperationCode operation) {
+    LOGV("sendRequest: %s\n", MtpDebug::getOperationCodeName(operation));
+    mReceivedResponse = false;
+    mRequest.setOperationCode(operation);
+    if (mTransactionID > 0)
+        mRequest.setTransactionID(mTransactionID++);
+    int ret = mRequest.write(mRequestOut);
+    mRequest.dump();
+    return (ret > 0);
+}
+
+bool MtpDevice::sendData() {
+    LOGV("sendData\n");
+    mData.setOperationCode(mRequest.getOperationCode());
+    mData.setTransactionID(mRequest.getTransactionID());
+    int ret = mData.write(mRequestOut);
+    mData.dump();
+    return (ret > 0);
+}
+
+bool MtpDevice::readData() {
+    mData.reset();
+    int ret = mData.read(mRequestIn1);
+    LOGV("readData returned %d\n", ret);
+    if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+        if (mData.getContainerType() == MTP_CONTAINER_TYPE_RESPONSE) {
+            LOGD("got response packet instead of data packet");
+            // we got a response packet rather than data
+            // copy it to mResponse
+            mResponse.copyFrom(mData);
+            mReceivedResponse = true;
+            return false;
+        }
+        mData.dump();
+        return true;
+    }
+    else {
+        LOGV("readResponse failed\n");
+        return false;
+    }
+}
+
+bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) {
+    mData.setOperationCode(operation);
+    mData.setTransactionID(mRequest.getTransactionID());
+    return (!mData.writeDataHeader(mRequestOut, dataLength));
+}
+
+MtpResponseCode MtpDevice::readResponse() {
+    LOGV("readResponse\n");
+    if (mReceivedResponse) {
+        mReceivedResponse = false;
+        return mResponse.getResponseCode();
+    }
+    int ret = mResponse.read(mRequestIn1);
+    if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+        mResponse.dump();
+        return mResponse.getResponseCode();
+    } else {
+        LOGD("readResponse failed\n");
+        return -1;
+    }
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
new file mode 100644
index 0000000..d0a0fb3
--- /dev/null
+++ b/media/mtp/MtpDevice.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_H
+#define _MTP_DEVICE_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_device;
+struct usb_request;
+struct usb_endpoint_descriptor;
+
+namespace android {
+
+class MtpDeviceInfo;
+class MtpObjectInfo;
+class MtpStorageInfo;
+
+class MtpDevice {
+private:
+    struct usb_device*      mDevice;
+    int                     mInterface;
+    struct usb_request*     mRequestIn1;
+    struct usb_request*     mRequestIn2;
+    struct usb_request*     mRequestOut;
+    struct usb_request*     mRequestIntr;
+    MtpDeviceInfo*          mDeviceInfo;
+    MtpPropertyList         mDeviceProperties;
+
+    // a unique ID for the device
+    int                     mID;
+
+    // current session ID
+    MtpSessionID            mSessionID;
+    // current transaction ID
+    MtpTransactionID        mTransactionID;
+
+    MtpRequestPacket        mRequest;
+    MtpDataPacket           mData;
+    MtpResponsePacket       mResponse;
+    // set to true if we received a response packet instead of a data packet
+    bool                    mReceivedResponse;
+
+    // to ensure only one MTP transaction at a time
+    Mutex                   mMutex;
+
+public:
+                            MtpDevice(struct usb_device* device, int interface,
+                                    const struct usb_endpoint_descriptor *ep_in,
+                                    const struct usb_endpoint_descriptor *ep_out,
+                                    const struct usb_endpoint_descriptor *ep_intr);
+    virtual                 ~MtpDevice();
+
+    inline int              getID() const { return mID; }
+
+    void                    initialize();
+    void                    close();
+    void                    print();
+    const char*             getDeviceName();
+
+    bool                    openSession();
+    bool                    closeSession();
+
+    MtpDeviceInfo*          getDeviceInfo();
+    MtpStorageIDList*       getStorageIDs();
+    MtpStorageInfo*         getStorageInfo(MtpStorageID storageID);
+    MtpObjectHandleList*    getObjectHandles(MtpStorageID storageID, MtpObjectFormat format,
+                                    MtpObjectHandle parent);
+    MtpObjectInfo*          getObjectInfo(MtpObjectHandle handle);
+    void*                   getThumbnail(MtpObjectHandle handle, int& outLength);
+    MtpObjectHandle         sendObjectInfo(MtpObjectInfo* info);
+    bool                    sendObject(MtpObjectInfo* info, int srcFD);
+    bool                    deleteObject(MtpObjectHandle handle);
+    MtpObjectHandle         getParent(MtpObjectHandle handle);
+    MtpObjectHandle         getStorageID(MtpObjectHandle handle);
+
+    MtpObjectPropertyList*  getObjectPropsSupported(MtpObjectFormat format);
+
+    MtpProperty*            getDevicePropDesc(MtpDeviceProperty code);
+    MtpProperty*            getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format);
+
+    bool                   readObject(MtpObjectHandle handle, const char* destPath, int group,
+                                    int perm);
+
+private:
+    bool                    sendRequest(MtpOperationCode operation);
+    bool                    sendData();
+    bool                    readData();
+    bool                    writeDataHeader(MtpOperationCode operation, int dataLength);
+    MtpResponseCode         readResponse();
+
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_H
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
new file mode 100644
index 0000000..5a9322e
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDeviceInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpDeviceInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDeviceInfo::MtpDeviceInfo()
+    :   mStandardVersion(0),
+        mVendorExtensionID(0),
+        mVendorExtensionVersion(0),
+        mVendorExtensionDesc(NULL),
+        mFunctionalCode(0),
+        mOperations(NULL),
+        mEvents(NULL),
+        mDeviceProperties(NULL),
+        mCaptureFormats(NULL),
+        mPlaybackFormats(NULL),
+        mManufacturer(NULL),
+        mModel(NULL),
+        mVersion(NULL),
+        mSerial(NULL)
+{
+}
+
+MtpDeviceInfo::~MtpDeviceInfo() {
+    if (mVendorExtensionDesc)
+        free(mVendorExtensionDesc);
+    delete mOperations;
+    delete mEvents;
+    delete mDeviceProperties;
+    delete mCaptureFormats;
+    delete mPlaybackFormats;
+    if (mManufacturer)
+        free(mManufacturer);
+    if (mModel)
+        free(mModel);
+    if (mVersion)
+        free(mVersion);
+    if (mSerial)
+        free(mSerial);
+}
+
+void MtpDeviceInfo::read(MtpDataPacket& packet) {
+    MtpStringBuffer string;
+
+    // read the device info
+    mStandardVersion = packet.getUInt16();
+    mVendorExtensionID = packet.getUInt32();
+    mVendorExtensionVersion = packet.getUInt16();
+
+    packet.getString(string);
+    mVendorExtensionDesc = strdup((const char *)string);
+
+    mFunctionalCode = packet.getUInt16();
+    mOperations = packet.getAUInt16();
+    mEvents = packet.getAUInt16();
+    mDeviceProperties = packet.getAUInt16();
+    mCaptureFormats = packet.getAUInt16();
+    mPlaybackFormats = packet.getAUInt16();
+
+    packet.getString(string);
+    mManufacturer = strdup((const char *)string);
+    packet.getString(string);
+    mModel = strdup((const char *)string);
+    packet.getString(string);
+    mVersion = strdup((const char *)string);
+    packet.getString(string);
+    mSerial = strdup((const char *)string);
+}
+
+void MtpDeviceInfo::print() {
+    LOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
+            mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
+    LOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+            mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
new file mode 100644
index 0000000..2abaa10
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_INFO_H
+#define _MTP_DEVICE_INFO_H
+
+struct stat;
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpDeviceInfo {
+public:
+    uint16_t                mStandardVersion;
+    uint32_t                mVendorExtensionID;
+    uint16_t                mVendorExtensionVersion;
+    char*                   mVendorExtensionDesc;
+    uint16_t                mFunctionalCode;
+    UInt16List*             mOperations;
+    UInt16List*             mEvents;
+    MtpDevicePropertyList*  mDeviceProperties;
+    MtpObjectFormatList*    mCaptureFormats;
+    MtpObjectFormatList*    mPlaybackFormats;
+    char*                   mManufacturer;
+    char*                   mModel;
+    char*                   mVersion;
+    char*                   mSerial;
+
+public:
+                            MtpDeviceInfo();
+    virtual                 ~MtpDeviceInfo();
+
+    void                    read(MtpDataPacket& packet);
+
+    void                    print();
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_INFO_H
diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp
new file mode 100644
index 0000000..d2fca42
--- /dev/null
+++ b/media/mtp/MtpEventPacket.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpEventPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+
+#ifdef MTP_DEVICE
+#include <linux/usb/f_mtp.h>
+#endif
+
+#include "MtpEventPacket.h"
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpEventPacket::MtpEventPacket()
+    :   MtpPacket(512)
+{
+}
+
+MtpEventPacket::~MtpEventPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpEventPacket::write(int fd) {
+    struct mtp_event    event;
+
+    putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_EVENT);
+
+    event.data = mBuffer;
+    event.length = mPacketSize;
+    int ret = ::ioctl(fd, MTP_SEND_EVENT, (unsigned long)&event);
+    return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+int MtpEventPacket::read(struct usb_request *request) {
+    request->buffer = mBuffer;
+    request->buffer_length = mBufferSize;
+    int ret = transfer(request);
+     if (ret >= 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+#endif
+
+}  // namespace android
+
diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h
new file mode 100644
index 0000000..660baad
--- /dev/null
+++ b/media/mtp/MtpEventPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_EVENT_PACKET_H
+#define _MTP_EVENT_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpEventPacket : public MtpPacket {
+
+public:
+                        MtpEventPacket();
+    virtual             ~MtpEventPacket();
+
+#ifdef MTP_DEVICE
+    // write our data to the given file descriptor
+    int                 write(int fd);
+#endif
+
+#ifdef MTP_HOST
+    // read our buffer with the given request
+    int                 read(struct usb_request *request);
+#endif
+
+    inline MtpEventCode     getEventCode() const { return getContainerCode(); }
+    inline void             setEventCode(MtpEventCode code)
+                                                     { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_EVENT_PACKET_H
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
new file mode 100644
index 0000000..ea68c3b
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpObjectInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpObjectInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpObjectInfo::MtpObjectInfo(MtpObjectHandle handle)
+    :   mHandle(handle),
+        mStorageID(0),
+        mFormat(0),
+        mProtectionStatus(0),
+        mCompressedSize(0),
+        mThumbFormat(0),
+        mThumbCompressedSize(0),
+        mThumbPixWidth(0),
+        mThumbPixHeight(0),
+        mImagePixWidth(0),
+        mImagePixHeight(0),
+        mImagePixDepth(0),
+        mParent(0),
+        mAssociationType(0),
+        mAssociationDesc(0),
+        mSequenceNumber(0),
+        mName(NULL),
+        mDateCreated(0),
+        mDateModified(0),
+        mKeywords(NULL)
+{
+}
+
+MtpObjectInfo::~MtpObjectInfo() {
+    if (mName)
+        free(mName);
+    if (mKeywords)
+        free(mKeywords);
+}
+
+void MtpObjectInfo::read(MtpDataPacket& packet) {
+    MtpStringBuffer string;
+    time_t time;
+
+    mStorageID = packet.getUInt32();
+    mFormat = packet.getUInt16();
+    mProtectionStatus = packet.getUInt16();
+    mCompressedSize = packet.getUInt32();
+    mThumbFormat = packet.getUInt16();
+    mThumbCompressedSize = packet.getUInt32();
+    mThumbPixWidth = packet.getUInt32();
+    mThumbPixHeight = packet.getUInt32();
+    mImagePixWidth = packet.getUInt32();
+    mImagePixHeight = packet.getUInt32();
+    mImagePixDepth = packet.getUInt32();
+    mParent = packet.getUInt32();
+    mAssociationType = packet.getUInt16();
+    mAssociationDesc = packet.getUInt32();
+    mSequenceNumber = packet.getUInt32();
+
+    packet.getString(string);
+    mName = strdup((const char *)string);
+
+    packet.getString(string);
+    if (parseDateTime((const char*)string, time))
+        mDateCreated = time;
+
+    packet.getString(string);
+    if (parseDateTime((const char*)string, time))
+        mDateModified = time;
+
+    packet.getString(string);
+    mKeywords = strdup((const char *)string);
+}
+
+void MtpObjectInfo::print() {
+    LOGD("MtpObject Info %08X: %s\n", mHandle, mName);
+    LOGD("  mStorageID: %08X mFormat: %04X mProtectionStatus: %d\n",
+            mStorageID, mFormat, mProtectionStatus);
+    LOGD("  mCompressedSize: %d mThumbFormat: %04X mThumbCompressedSize: %d\n",
+            mCompressedSize, mFormat, mThumbCompressedSize);
+    LOGD("  mThumbPixWidth: %d mThumbPixHeight: %d\n", mThumbPixWidth, mThumbPixHeight);
+    LOGD("  mImagePixWidth: %d mImagePixHeight: %d mImagePixDepth: %d\n",
+            mImagePixWidth, mImagePixHeight, mImagePixDepth);
+    LOGD("  mParent: %08X mAssociationType: %04X mAssociationDesc: %04X\n",
+            mParent, mAssociationType, mAssociationDesc);
+    LOGD("  mSequenceNumber: %d mDateCreated: %ld mDateModified: %ld mKeywords: %s\n",
+            mSequenceNumber, mDateCreated, mDateModified, mKeywords);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
new file mode 100644
index 0000000..c7a449c
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_OBJECT_INFO_H
+#define _MTP_OBJECT_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpObjectInfo {
+public:
+    MtpObjectHandle     mHandle;
+    MtpStorageID        mStorageID;
+    MtpObjectFormat     mFormat;
+    uint16_t            mProtectionStatus;
+    uint32_t            mCompressedSize;
+    MtpObjectFormat     mThumbFormat;
+    uint32_t            mThumbCompressedSize;
+    uint32_t            mThumbPixWidth;
+    uint32_t            mThumbPixHeight;
+    uint32_t            mImagePixWidth;
+    uint32_t            mImagePixHeight;
+    uint32_t            mImagePixDepth;
+    MtpObjectHandle     mParent;
+    uint16_t            mAssociationType;
+    uint32_t            mAssociationDesc;
+    uint32_t            mSequenceNumber;
+    char*               mName;
+    time_t              mDateCreated;
+    time_t              mDateModified;
+    char*               mKeywords;
+
+public:
+                        MtpObjectInfo(MtpObjectHandle handle);
+    virtual             ~MtpObjectInfo();
+
+    void                read(MtpDataPacket& packet);
+
+    void                print();
+};
+
+}; // namespace android
+
+#endif // _MTP_OBJECT_INFO_H
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
new file mode 100644
index 0000000..d3f2cb4
--- /dev/null
+++ b/media/mtp/MtpPacket.cpp
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpPacket"
+
+#include "MtpDebug.h"
+#include "MtpPacket.h"
+#include "mtp.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpPacket::MtpPacket(int bufferSize)
+    :   mBuffer(NULL),
+        mBufferSize(bufferSize),
+        mAllocationIncrement(bufferSize),
+        mPacketSize(0)
+{
+    mBuffer = (uint8_t *)malloc(bufferSize);
+    if (!mBuffer) {
+        LOGE("out of memory!");
+        abort();
+    }
+}
+
+MtpPacket::~MtpPacket() {
+    if (mBuffer)
+        free(mBuffer);
+}
+
+void MtpPacket::reset() {
+    allocate(MTP_CONTAINER_HEADER_SIZE);
+    mPacketSize = MTP_CONTAINER_HEADER_SIZE;
+    memset(mBuffer, 0, mBufferSize);
+}
+
+void MtpPacket::allocate(int length) {
+    if (length > mBufferSize) {
+        int newLength = length + mAllocationIncrement;
+        mBuffer = (uint8_t *)realloc(mBuffer, newLength);
+        if (!mBuffer) {
+            LOGE("out of memory!");
+            abort();
+        }
+        mBufferSize = newLength;
+    }
+}
+
+void MtpPacket::dump() {
+#define DUMP_BYTES_PER_ROW  16
+    char buffer[500];
+    char* bufptr = buffer;
+
+    for (int i = 0; i < mPacketSize; i++) {
+        sprintf(bufptr, "%02X ", mBuffer[i]);
+        bufptr += strlen(bufptr);
+        if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) {
+            LOGV("%s", buffer);
+            bufptr = buffer;
+        }
+    }
+    if (bufptr != buffer) {
+        // print last line
+        LOGV("%s", buffer);
+    }
+    LOGV("\n");
+}
+
+void MtpPacket::copyFrom(const MtpPacket& src) {
+    int length = src.mPacketSize;
+    allocate(length);
+    mPacketSize = length;
+    memcpy(mBuffer, src.mBuffer, length);
+}
+
+uint16_t MtpPacket::getUInt16(int offset) const {
+    return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+}
+
+uint32_t MtpPacket::getUInt32(int offset) const {
+    return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+           ((uint32_t)mBuffer[offset + 1] << 8)  | (uint32_t)mBuffer[offset];
+}
+
+void MtpPacket::putUInt16(int offset, uint16_t value) {
+    mBuffer[offset++] = (uint8_t)(value & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+}
+
+void MtpPacket::putUInt32(int offset, uint32_t value) {
+    mBuffer[offset++] = (uint8_t)(value & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+    mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+}
+
+uint16_t MtpPacket::getContainerCode() const {
+    return getUInt16(MTP_CONTAINER_CODE_OFFSET);
+}
+
+void MtpPacket::setContainerCode(uint16_t code) {
+    putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+uint16_t MtpPacket::getContainerType() const {
+    return getUInt16(MTP_CONTAINER_TYPE_OFFSET);
+}
+
+MtpTransactionID MtpPacket::getTransactionID() const {
+    return getUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET);
+}
+
+void MtpPacket::setTransactionID(MtpTransactionID id) {
+    putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint32_t MtpPacket::getParameter(int index) const {
+    if (index < 1 || index > 5) {
+        LOGE("index %d out of range in MtpPacket::getParameter", index);
+        return 0;
+    }
+    return getUInt32(MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t));
+}
+
+void MtpPacket::setParameter(int index, uint32_t value) {
+    if (index < 1 || index > 5) {
+        LOGE("index %d out of range in MtpPacket::setParameter", index);
+        return;
+    }
+    int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t);
+    if (mPacketSize < offset + sizeof(uint32_t))
+        mPacketSize = offset + sizeof(uint32_t);
+    putUInt32(offset, value);
+}
+
+#ifdef MTP_HOST
+int MtpPacket::transfer(struct usb_request* request) {
+    if (usb_request_queue(request)) {
+        LOGE("usb_endpoint_queue failed, errno: %d", errno);
+        return -1;
+    }
+    request = usb_request_wait(request->dev);
+    return (request ? request->actual_length : -1);
+}
+#endif
+
+}  // namespace android
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
new file mode 100644
index 0000000..0ffb1d3
--- /dev/null
+++ b/media/mtp/MtpPacket.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PACKET_H
+#define _MTP_PACKET_H
+
+#include "MtpTypes.h"
+
+struct usb_request;
+
+namespace android {
+
+class MtpPacket {
+
+protected:
+    uint8_t*            mBuffer;
+    // current size of the buffer
+    int                 mBufferSize;
+    // number of bytes to add when resizing the buffer
+    int                 mAllocationIncrement;
+    // size of the data in the packet
+    int                 mPacketSize;
+
+public:
+                        MtpPacket(int bufferSize);
+    virtual             ~MtpPacket();
+
+    // sets packet size to the default container size and sets buffer to zero
+    virtual void        reset();
+
+    void                allocate(int length);
+    void                dump();
+    void                copyFrom(const MtpPacket& src);
+
+    uint16_t            getContainerCode() const;
+    void                setContainerCode(uint16_t code);
+
+    uint16_t            getContainerType() const;
+
+    MtpTransactionID    getTransactionID() const;
+    void                setTransactionID(MtpTransactionID id);
+
+    uint32_t            getParameter(int index) const;
+    void                setParameter(int index, uint32_t value);
+
+#ifdef MTP_HOST
+    int                 transfer(struct usb_request* request);
+#endif
+
+protected:
+    uint16_t            getUInt16(int offset) const;
+    uint32_t            getUInt32(int offset) const;
+    void                putUInt16(int offset, uint16_t value);
+    void                putUInt32(int offset, uint32_t value);
+};
+
+}; // namespace android
+
+#endif // _MTP_PACKET_H
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
new file mode 100644
index 0000000..8016c35
--- /dev/null
+++ b/media/mtp/MtpProperty.cpp
@@ -0,0 +1,534 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpProperty"
+
+#include "MtpDataPacket.h"
+#include "MtpDebug.h"
+#include "MtpProperty.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpProperty::MtpProperty()
+    :   mCode(0),
+        mType(0),
+        mWriteable(false),
+        mDefaultArrayLength(0),
+        mDefaultArrayValues(NULL),
+        mCurrentArrayLength(0),
+        mCurrentArrayValues(NULL),
+        mGroupCode(0),
+        mFormFlag(kFormNone),
+        mEnumLength(0),
+        mEnumValues(NULL)
+{
+    memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+    memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+    memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+    memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+}
+
+MtpProperty::MtpProperty(MtpPropertyCode propCode,
+                         MtpDataType type,
+                         bool writeable,
+                         int defaultValue)
+    :   mCode(propCode),
+        mType(type),
+        mWriteable(writeable),
+        mDefaultArrayLength(0),
+        mDefaultArrayValues(NULL),
+        mCurrentArrayLength(0),
+        mCurrentArrayValues(NULL),
+        mGroupCode(0),
+        mFormFlag(kFormNone),
+        mEnumLength(0),
+        mEnumValues(NULL)
+{
+    memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+    memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+    memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+    memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+
+    if (defaultValue) {
+        switch (type) {
+            case MTP_TYPE_INT8:
+                mDefaultValue.u.i8 = defaultValue;
+                break;
+            case MTP_TYPE_UINT8:
+                mDefaultValue.u.u8 = defaultValue;
+                break;
+            case MTP_TYPE_INT16:
+                mDefaultValue.u.i16 = defaultValue;
+                break;
+            case MTP_TYPE_UINT16:
+                mDefaultValue.u.u16 = defaultValue;
+                break;
+            case MTP_TYPE_INT32:
+                mDefaultValue.u.i32 = defaultValue;
+                break;
+            case MTP_TYPE_UINT32:
+                mDefaultValue.u.u32 = defaultValue;
+                break;
+            case MTP_TYPE_INT64:
+                mDefaultValue.u.i64 = defaultValue;
+                break;
+            case MTP_TYPE_UINT64:
+                mDefaultValue.u.u64 = defaultValue;
+                break;
+            default:
+                LOGE("unknown type %04X in MtpProperty::MtpProperty", type);
+        }
+    }
+}
+
+MtpProperty::~MtpProperty() {
+    if (mType == MTP_TYPE_STR) {
+        // free all strings
+        free(mDefaultValue.str);
+        free(mCurrentValue.str);
+        free(mMinimumValue.str);
+        free(mMaximumValue.str);
+        if (mDefaultArrayValues) {
+            for (int i = 0; i < mDefaultArrayLength; i++)
+                free(mDefaultArrayValues[i].str);
+        }
+        if (mCurrentArrayValues) {
+            for (int i = 0; i < mCurrentArrayLength; i++)
+                free(mCurrentArrayValues[i].str);
+        }
+        if (mEnumValues) {
+            for (int i = 0; i < mEnumLength; i++)
+                free(mEnumValues[i].str);
+        }
+    }
+    delete[] mDefaultArrayValues;
+    delete[] mCurrentArrayValues;
+    delete[] mEnumValues;
+}
+
+void MtpProperty::read(MtpDataPacket& packet) {
+    mCode = packet.getUInt16();
+    bool deviceProp = isDeviceProperty();
+    mType = packet.getUInt16();
+    mWriteable = (packet.getUInt8() == 1);
+    switch (mType) {
+        case MTP_TYPE_AINT8:
+        case MTP_TYPE_AUINT8:
+        case MTP_TYPE_AINT16:
+        case MTP_TYPE_AUINT16:
+        case MTP_TYPE_AINT32:
+        case MTP_TYPE_AUINT32:
+        case MTP_TYPE_AINT64:
+        case MTP_TYPE_AUINT64:
+        case MTP_TYPE_AINT128:
+        case MTP_TYPE_AUINT128:
+            mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
+            if (deviceProp)
+                mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+            break;
+        default:
+            readValue(packet, mDefaultValue);
+            if (deviceProp)
+                readValue(packet, mCurrentValue);
+    }
+    if (!deviceProp)
+        mGroupCode = packet.getUInt32();
+    mFormFlag = packet.getUInt8();
+
+    if (mFormFlag == kFormRange) {
+            readValue(packet, mMinimumValue);
+            readValue(packet, mMaximumValue);
+            readValue(packet, mStepSize);
+    } else if (mFormFlag == kFormEnum) {
+        mEnumLength = packet.getUInt16();
+        mEnumValues = new MtpPropertyValue[mEnumLength];
+        for (int i = 0; i < mEnumLength; i++)
+            readValue(packet, mEnumValues[i]);
+    }
+}
+
+void MtpProperty::write(MtpDataPacket& packet) {
+    bool deviceProp = isDeviceProperty();
+
+    packet.putUInt16(mCode);
+    packet.putUInt16(mType);
+    packet.putUInt8(mWriteable ? 1 : 0);
+
+    switch (mType) {
+        case MTP_TYPE_AINT8:
+        case MTP_TYPE_AUINT8:
+        case MTP_TYPE_AINT16:
+        case MTP_TYPE_AUINT16:
+        case MTP_TYPE_AINT32:
+        case MTP_TYPE_AUINT32:
+        case MTP_TYPE_AINT64:
+        case MTP_TYPE_AUINT64:
+        case MTP_TYPE_AINT128:
+        case MTP_TYPE_AUINT128:
+            writeArrayValues(packet, mDefaultArrayValues, mDefaultArrayLength);
+            if (deviceProp)
+                writeArrayValues(packet, mCurrentArrayValues, mCurrentArrayLength);
+            break;
+        default:
+            writeValue(packet, mDefaultValue);
+            if (deviceProp)
+                writeValue(packet, mCurrentValue);
+    }
+    packet.putUInt32(mGroupCode);
+    if (!deviceProp)
+        packet.putUInt8(mFormFlag);
+    if (mFormFlag == kFormRange) {
+            writeValue(packet, mMinimumValue);
+            writeValue(packet, mMaximumValue);
+            writeValue(packet, mStepSize);
+    } else if (mFormFlag == kFormEnum) {
+        packet.putUInt16(mEnumLength);
+        for (int i = 0; i < mEnumLength; i++)
+            writeValue(packet, mEnumValues[i]);
+    }
+}
+
+void MtpProperty::setDefaultValue(const uint16_t* string) {
+    free(mDefaultValue.str);
+    if (string) {
+        MtpStringBuffer buffer(string);
+        mDefaultValue.str = strdup(buffer);
+    }
+    else
+        mDefaultValue.str = NULL;
+}
+
+void MtpProperty::setCurrentValue(const uint16_t* string) {
+    free(mCurrentValue.str);
+    if (string) {
+        MtpStringBuffer buffer(string);
+        mCurrentValue.str = strdup(buffer);
+    }
+    else
+        mCurrentValue.str = NULL;
+}
+
+void MtpProperty::setFormRange(int min, int max, int step) {
+    mFormFlag = kFormRange;
+    switch (mType) {
+        case MTP_TYPE_INT8:
+            mMinimumValue.u.i8 = min;
+            mMaximumValue.u.i8 = max;
+            mStepSize.u.i8 = step;
+            break;
+        case MTP_TYPE_UINT8:
+            mMinimumValue.u.u8 = min;
+            mMaximumValue.u.u8 = max;
+            mStepSize.u.u8 = step;
+            break;
+        case MTP_TYPE_INT16:
+            mMinimumValue.u.i16 = min;
+            mMaximumValue.u.i16 = max;
+            mStepSize.u.i16 = step;
+            break;
+        case MTP_TYPE_UINT16:
+            mMinimumValue.u.u16 = min;
+            mMaximumValue.u.u16 = max;
+            mStepSize.u.u16 = step;
+            break;
+        case MTP_TYPE_INT32:
+            mMinimumValue.u.i32 = min;
+            mMaximumValue.u.i32 = max;
+            mStepSize.u.i32 = step;
+            break;
+        case MTP_TYPE_UINT32:
+            mMinimumValue.u.u32 = min;
+            mMaximumValue.u.u32 = max;
+            mStepSize.u.u32 = step;
+            break;
+        case MTP_TYPE_INT64:
+            mMinimumValue.u.i64 = min;
+            mMaximumValue.u.i64 = max;
+            mStepSize.u.i64 = step;
+            break;
+        case MTP_TYPE_UINT64:
+            mMinimumValue.u.u64 = min;
+            mMaximumValue.u.u64 = max;
+            mStepSize.u.u64 = step;
+            break;
+        default:
+            LOGE("unsupported type for MtpProperty::setRange");
+            break;
+    }
+}
+
+void MtpProperty::setFormEnum(const int* values, int count) {
+     mFormFlag = kFormEnum;
+     delete[] mEnumValues;
+     mEnumValues = new MtpPropertyValue[count];
+     mEnumLength = count;
+
+    for (int i = 0; i < count; i++) {
+        int value = *values++;
+            switch (mType) {
+                case MTP_TYPE_INT8:
+                    mEnumValues[i].u.i8 = value;
+                    break;
+                case MTP_TYPE_UINT8:
+                    mEnumValues[i].u.u8 = value;
+                    break;
+                case MTP_TYPE_INT16:
+                    mEnumValues[i].u.i16 = value;
+                    break;
+                case MTP_TYPE_UINT16:
+                    mEnumValues[i].u.u16 = value;
+                    break;
+                case MTP_TYPE_INT32:
+                    mEnumValues[i].u.i32 = value;
+                    break;
+                case MTP_TYPE_UINT32:
+                    mEnumValues[i].u.u32 = value;
+                    break;
+                case MTP_TYPE_INT64:
+                    mEnumValues[i].u.i64 = value;
+                    break;
+                case MTP_TYPE_UINT64:
+                    mEnumValues[i].u.u64 = value;
+                    break;
+                default:
+                    LOGE("unsupported type for MtpProperty::setEnum");
+                    break;
+        }
+    }
+}
+
+void MtpProperty::setFormDateTime() {
+     mFormFlag = kFormDateTime;
+}
+
+void MtpProperty::print() {
+    MtpString buffer;
+    bool deviceProp = isDeviceProperty();
+    if (deviceProp)
+        LOGI("    %s (%04X)", MtpDebug::getDevicePropCodeName(mCode), mCode);
+    else
+        LOGI("    %s (%04X)", MtpDebug::getObjectPropCodeName(mCode), mCode);
+    LOGI("    type %04X", mType);
+    LOGI("    writeable %s", (mWriteable ? "true" : "false"));
+    buffer = "    default value: ";
+    print(mDefaultValue, buffer);
+    LOGI("%s", (const char *)buffer);
+    if (deviceProp) {
+        buffer = "    current value: ";
+        print(mCurrentValue, buffer);
+        LOGI("%s", (const char *)buffer);
+    }
+    switch (mFormFlag) {
+        case kFormNone:
+            break;
+        case kFormRange:
+            buffer = "    Range (";
+            print(mMinimumValue, buffer);
+            buffer += ", ";
+            print(mMaximumValue, buffer);
+            buffer += ", ";
+            print(mStepSize, buffer);
+            buffer += ")";
+            LOGI("%s", (const char *)buffer);
+            break;
+        case kFormEnum:
+            buffer = "    Enum { ";
+            for (int i = 0; i < mEnumLength; i++) {
+                print(mEnumValues[i], buffer);
+                buffer += " ";
+            }
+            buffer += "}";
+            LOGI("%s", (const char *)buffer);
+            break;
+        case kFormDateTime:
+            LOGI("    DateTime\n");
+            break;
+        default:
+            LOGI("    form %d\n", mFormFlag);
+            break;
+    }
+}
+
+void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) {
+    switch (mType) {
+        case MTP_TYPE_INT8:
+            buffer.appendFormat("%d", value.u.i8);
+            break;
+        case MTP_TYPE_UINT8:
+            buffer.appendFormat("%d", value.u.u8);
+            break;
+        case MTP_TYPE_INT16:
+            buffer.appendFormat("%d", value.u.i16);
+            break;
+        case MTP_TYPE_UINT16:
+            buffer.appendFormat("%d", value.u.u16);
+            break;
+        case MTP_TYPE_INT32:
+            buffer.appendFormat("%d", value.u.i32);
+            break;
+        case MTP_TYPE_UINT32:
+            buffer.appendFormat("%d", value.u.u32);
+            break;
+        case MTP_TYPE_INT64:
+            buffer.appendFormat("%lld", value.u.i64);
+            break;
+        case MTP_TYPE_UINT64:
+            buffer.appendFormat("%lld", value.u.u64);
+            break;
+        case MTP_TYPE_INT128:
+            buffer.appendFormat("%08X%08X%08X%08X", value.u.i128[0], value.u.i128[1],
+                    value.u.i128[2], value.u.i128[3]);
+            break;
+        case MTP_TYPE_UINT128:
+            buffer.appendFormat("%08X%08X%08X%08X", value.u.u128[0], value.u.u128[1],
+                    value.u.u128[2], value.u.u128[3]);
+            break;
+        case MTP_TYPE_STR:
+            buffer.appendFormat("%s", value.str);
+            break;
+        default:
+            LOGE("unsupported type for MtpProperty::print\n");
+            break;
+    }
+}
+
+void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+    MtpStringBuffer stringBuffer;
+
+    switch (mType) {
+        case MTP_TYPE_INT8:
+        case MTP_TYPE_AINT8:
+            value.u.i8 = packet.getInt8();
+            break;
+        case MTP_TYPE_UINT8:
+        case MTP_TYPE_AUINT8:
+            value.u.u8 = packet.getUInt8();
+            break;
+        case MTP_TYPE_INT16:
+        case MTP_TYPE_AINT16:
+            value.u.i16 = packet.getInt16();
+            break;
+        case MTP_TYPE_UINT16:
+        case MTP_TYPE_AUINT16:
+            value.u.u16 = packet.getUInt16();
+            break;
+        case MTP_TYPE_INT32:
+        case MTP_TYPE_AINT32:
+            value.u.i32 = packet.getInt32();
+            break;
+        case MTP_TYPE_UINT32:
+        case MTP_TYPE_AUINT32:
+            value.u.u32 = packet.getUInt32();
+            break;
+        case MTP_TYPE_INT64:
+        case MTP_TYPE_AINT64:
+            value.u.i64 = packet.getInt64();
+            break;
+        case MTP_TYPE_UINT64:
+        case MTP_TYPE_AUINT64:
+            value.u.u64 = packet.getUInt64();
+            break;
+        case MTP_TYPE_INT128:
+        case MTP_TYPE_AINT128:
+            packet.getInt128(value.u.i128);
+            break;
+        case MTP_TYPE_UINT128:
+        case MTP_TYPE_AUINT128:
+            packet.getUInt128(value.u.u128);
+            break;
+        case MTP_TYPE_STR:
+            packet.getString(stringBuffer);
+            value.str = strdup(stringBuffer);
+            break;
+        default:
+            LOGE("unknown type %04X in MtpProperty::readValue", mType);
+    }
+}
+
+void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+    MtpStringBuffer stringBuffer;
+
+    switch (mType) {
+        case MTP_TYPE_INT8:
+        case MTP_TYPE_AINT8:
+            packet.putInt8(value.u.i8);
+            break;
+        case MTP_TYPE_UINT8:
+        case MTP_TYPE_AUINT8:
+            packet.putUInt8(value.u.u8);
+            break;
+        case MTP_TYPE_INT16:
+        case MTP_TYPE_AINT16:
+            packet.putInt16(value.u.i16);
+            break;
+        case MTP_TYPE_UINT16:
+        case MTP_TYPE_AUINT16:
+            packet.putUInt16(value.u.u16);
+            break;
+        case MTP_TYPE_INT32:
+        case MTP_TYPE_AINT32:
+            packet.putInt32(value.u.i32);
+            break;
+        case MTP_TYPE_UINT32:
+        case MTP_TYPE_AUINT32:
+            packet.putUInt32(value.u.u32);
+            break;
+        case MTP_TYPE_INT64:
+        case MTP_TYPE_AINT64:
+            packet.putInt64(value.u.i64);
+            break;
+        case MTP_TYPE_UINT64:
+        case MTP_TYPE_AUINT64:
+            packet.putUInt64(value.u.u64);
+            break;
+        case MTP_TYPE_INT128:
+        case MTP_TYPE_AINT128:
+            packet.putInt128(value.u.i128);
+            break;
+        case MTP_TYPE_UINT128:
+        case MTP_TYPE_AUINT128:
+            packet.putUInt128(value.u.u128);
+            break;
+        case MTP_TYPE_STR:
+            if (value.str)
+                packet.putString(value.str);
+            else
+                packet.putEmptyString();
+            break;
+        default:
+            LOGE("unknown type %04X in MtpProperty::writeValue", mType);
+    }
+}
+
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
+    length = packet.getUInt32();
+    if (length == 0)
+        return NULL;
+    MtpPropertyValue* result = new MtpPropertyValue[length];
+    for (int i = 0; i < length; i++)
+        readValue(packet, result[i]);
+    return result;
+}
+
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+    packet.putUInt32(length);
+    for (int i = 0; i < length; i++)
+        writeValue(packet, values[i]);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
new file mode 100644
index 0000000..06ca56e
--- /dev/null
+++ b/media/mtp/MtpProperty.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PROPERTY_H
+#define _MTP_PROPERTY_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+struct MtpPropertyValue {
+    union {
+        int8_t          i8;
+        uint8_t         u8;
+        int16_t         i16;
+        uint16_t        u16;
+        int32_t         i32;
+        uint32_t        u32;
+        int64_t         i64;
+        uint64_t        u64;
+        int128_t        i128;
+        uint128_t       u128;
+    } u;
+    // string in UTF8 format
+    char*               str;
+};
+
+class MtpProperty {
+public:
+    MtpPropertyCode     mCode;
+    MtpDataType         mType;
+    bool                mWriteable;
+    MtpPropertyValue    mDefaultValue;
+    MtpPropertyValue    mCurrentValue;
+
+    // for array types
+    int                 mDefaultArrayLength;
+    MtpPropertyValue*   mDefaultArrayValues;
+    int                 mCurrentArrayLength;
+    MtpPropertyValue*   mCurrentArrayValues;
+
+    enum {
+        kFormNone = 0,
+        kFormRange = 1,
+        kFormEnum = 2,
+        kFormDateTime = 3,
+    };
+
+    uint32_t            mGroupCode;
+    uint8_t             mFormFlag;
+
+    // for range form
+    MtpPropertyValue    mMinimumValue;
+    MtpPropertyValue    mMaximumValue;
+    MtpPropertyValue    mStepSize;
+
+    // for enum form
+    int                 mEnumLength;
+    MtpPropertyValue*   mEnumValues;
+
+public:
+                        MtpProperty();
+                        MtpProperty(MtpPropertyCode propCode,
+                                     MtpDataType type,
+                                     bool writeable = false,
+                                     int defaultValue = 0);
+    virtual             ~MtpProperty();
+
+    inline MtpPropertyCode getPropertyCode() const { return mCode; }
+
+    void                read(MtpDataPacket& packet);
+    void                write(MtpDataPacket& packet);
+
+    void                setDefaultValue(const uint16_t* string);
+    void                setCurrentValue(const uint16_t* string);
+
+    void                setFormRange(int min, int max, int step);
+    void                setFormEnum(const int* values, int count);
+    void                setFormDateTime();
+
+    void                print();
+    void                print(MtpPropertyValue& value, MtpString& buffer);
+
+    inline bool         isDeviceProperty() const {
+                            return (   ((mCode & 0xF000) == 0x5000)
+                                    || ((mCode & 0xF800) == 0xD000));
+                        }
+
+private:
+    void                readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+    void                writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
+    MtpPropertyValue*   readArrayValues(MtpDataPacket& packet, int& length);
+    void                writeArrayValues(MtpDataPacket& packet,
+                                            MtpPropertyValue* values, int length);
+};
+
+}; // namespace android
+
+#endif // _MTP_PROPERTY_H
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
new file mode 100644
index 0000000..0e58e01
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpRequestPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpRequestPacket.h"
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpRequestPacket::MtpRequestPacket()
+    :   MtpPacket(512)
+{
+}
+
+MtpRequestPacket::~MtpRequestPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpRequestPacket::read(int fd) {
+    int ret = ::read(fd, mBuffer, mBufferSize);
+    if (ret >= 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+#endif
+
+#ifdef MTP_HOST
+    // write our buffer to the given endpoint (host mode)
+int MtpRequestPacket::write(struct usb_request *request)
+{
+    putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_COMMAND);
+    request->buffer = mBuffer;
+    request->buffer_length = mPacketSize;
+    return transfer(request);
+}
+#endif
+
+}  // namespace android
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
new file mode 100644
index 0000000..1201f11
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_REQUEST_PACKET_H
+#define _MTP_REQUEST_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+struct usb_request;
+
+namespace android {
+
+class MtpRequestPacket : public MtpPacket {
+
+public:
+                        MtpRequestPacket();
+    virtual             ~MtpRequestPacket();
+
+#ifdef MTP_DEVICE
+    // fill our buffer with data from the given file descriptor
+    int                 read(int fd);
+#endif
+
+#ifdef MTP_HOST
+    // write our buffer to the given endpoint
+    int                 write(struct usb_request *request);
+#endif
+
+    inline MtpOperationCode    getOperationCode() const { return getContainerCode(); }
+    inline void                setOperationCode(MtpOperationCode code)
+                                                    { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_REQUEST_PACKET_H
diff --git a/media/mtp/MtpResponsePacket.cpp b/media/mtp/MtpResponsePacket.cpp
new file mode 100644
index 0000000..c2b41e4
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.cpp
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpResponsePacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpResponsePacket.h"
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpResponsePacket::MtpResponsePacket()
+    :   MtpPacket(512)
+{
+}
+
+MtpResponsePacket::~MtpResponsePacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpResponsePacket::write(int fd) {
+    putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+    putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_RESPONSE);
+    int ret = ::write(fd, mBuffer, mPacketSize);
+    return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+int MtpResponsePacket::read(struct usb_request *request) {
+    request->buffer = mBuffer;
+    request->buffer_length = mBufferSize;
+    int ret = transfer(request);
+     if (ret >= 0)
+        mPacketSize = ret;
+    else
+        mPacketSize = 0;
+    return ret;
+}
+#endif
+
+}  // namespace android
+
diff --git a/media/mtp/MtpResponsePacket.h b/media/mtp/MtpResponsePacket.h
new file mode 100644
index 0000000..592ad4a
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_RESPONSE_PACKET_H
+#define _MTP_RESPONSE_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpResponsePacket : public MtpPacket {
+
+public:
+                        MtpResponsePacket();
+    virtual             ~MtpResponsePacket();
+
+#ifdef MTP_DEVICE
+    // write our data to the given file descriptor
+    int                 write(int fd);
+#endif
+
+#ifdef MTP_HOST
+    // read our buffer with the given request
+    int                 read(struct usb_request *request);
+#endif
+
+    inline MtpResponseCode      getResponseCode() const { return getContainerCode(); }
+    inline void                 setResponseCode(MtpResponseCode code)
+                                                     { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_RESPONSE_PACKET_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
new file mode 100644
index 0000000..236cd0a
--- /dev/null
+++ b/media/mtp/MtpServer.cpp
@@ -0,0 +1,883 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <dirent.h>
+
+#include <cutils/properties.h>
+
+#define LOG_TAG "MtpServer"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpProperty.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpStringBuffer.h"
+
+#include <linux/usb/f_mtp.h>
+
+namespace android {
+
+static const MtpOperationCode kSupportedOperationCodes[] = {
+    MTP_OPERATION_GET_DEVICE_INFO,
+    MTP_OPERATION_OPEN_SESSION,
+    MTP_OPERATION_CLOSE_SESSION,
+    MTP_OPERATION_GET_STORAGE_IDS,
+    MTP_OPERATION_GET_STORAGE_INFO,
+    MTP_OPERATION_GET_NUM_OBJECTS,
+    MTP_OPERATION_GET_OBJECT_HANDLES,
+    MTP_OPERATION_GET_OBJECT_INFO,
+    MTP_OPERATION_GET_OBJECT,
+//    MTP_OPERATION_GET_THUMB,
+    MTP_OPERATION_DELETE_OBJECT,
+    MTP_OPERATION_SEND_OBJECT_INFO,
+    MTP_OPERATION_SEND_OBJECT,
+//    MTP_OPERATION_INITIATE_CAPTURE,
+//    MTP_OPERATION_FORMAT_STORE,
+//    MTP_OPERATION_RESET_DEVICE,
+//    MTP_OPERATION_SELF_TEST,
+//    MTP_OPERATION_SET_OBJECT_PROTECTION,
+//    MTP_OPERATION_POWER_DOWN,
+    MTP_OPERATION_GET_DEVICE_PROP_DESC,
+    MTP_OPERATION_GET_DEVICE_PROP_VALUE,
+    MTP_OPERATION_SET_DEVICE_PROP_VALUE,
+    MTP_OPERATION_RESET_DEVICE_PROP_VALUE,
+//    MTP_OPERATION_TERMINATE_OPEN_CAPTURE,
+//    MTP_OPERATION_MOVE_OBJECT,
+//    MTP_OPERATION_COPY_OBJECT,
+    MTP_OPERATION_GET_PARTIAL_OBJECT,
+//    MTP_OPERATION_INITIATE_OPEN_CAPTURE,
+    MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED,
+    MTP_OPERATION_GET_OBJECT_PROP_DESC,
+    MTP_OPERATION_GET_OBJECT_PROP_VALUE,
+    MTP_OPERATION_SET_OBJECT_PROP_VALUE,
+    MTP_OPERATION_GET_OBJECT_PROP_LIST,
+//    MTP_OPERATION_SET_OBJECT_PROP_LIST,
+//    MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC,
+//    MTP_OPERATION_SEND_OBJECT_PROP_LIST,
+    MTP_OPERATION_GET_OBJECT_REFERENCES,
+    MTP_OPERATION_SET_OBJECT_REFERENCES,
+//    MTP_OPERATION_SKIP,
+};
+
+static const MtpEventCode kSupportedEventCodes[] = {
+    MTP_EVENT_OBJECT_ADDED,
+    MTP_EVENT_OBJECT_REMOVED,
+};
+
+MtpServer::MtpServer(int fd, MtpDatabase* database,
+                    int fileGroup, int filePerm, int directoryPerm)
+    :   mFD(fd),
+        mDatabase(database),
+        mFileGroup(fileGroup),
+        mFilePermission(filePerm),
+        mDirectoryPermission(directoryPerm),
+        mSessionID(0),
+        mSessionOpen(false),
+        mSendObjectHandle(kInvalidObjectHandle),
+        mSendObjectFormat(0),
+        mSendObjectFileSize(0)
+{
+}
+
+MtpServer::~MtpServer() {
+}
+
+void MtpServer::addStorage(const char* filePath, uint64_t reserveSpace) {
+    int index = mStorages.size() + 1;
+    index |= index << 16;   // set high and low part to our index
+    MtpStorage* storage = new MtpStorage(index, filePath, reserveSpace);
+    addStorage(storage);
+}
+
+MtpStorage* MtpServer::getStorage(MtpStorageID id) {
+    if (id == 0)
+        return mStorages[0];
+    for (int i = 0; i < mStorages.size(); i++) {
+        MtpStorage* storage = mStorages[i];
+        if (storage->getStorageID() == id)
+            return storage;
+    }
+    return NULL;
+}
+
+void MtpServer::run() {
+    int fd = mFD;
+
+    LOGV("MtpServer::run fd: %d\n", fd);
+
+    while (1) {
+        int ret = mRequest.read(fd);
+        if (ret < 0) {
+            LOGE("request read returned %d, errno: %d", ret, errno);
+            if (errno == ECANCELED) {
+                // return to top of loop and wait for next command
+                continue;
+            }
+            break;
+        }
+        MtpOperationCode operation = mRequest.getOperationCode();
+        MtpTransactionID transaction = mRequest.getTransactionID();
+
+        LOGV("operation: %s", MtpDebug::getOperationCodeName(operation));
+        mRequest.dump();
+
+        // FIXME need to generalize this
+        bool dataIn = (operation == MTP_OPERATION_SEND_OBJECT_INFO
+                    || operation == MTP_OPERATION_SET_OBJECT_REFERENCES
+                    || operation == MTP_OPERATION_SET_OBJECT_PROP_VALUE
+                    || operation == MTP_OPERATION_SET_DEVICE_PROP_VALUE);
+        if (dataIn) {
+            int ret = mData.read(fd);
+            if (ret < 0) {
+                LOGE("data read returned %d, errno: %d", ret, errno);
+                if (errno == ECANCELED) {
+                    // return to top of loop and wait for next command
+                    continue;
+                }
+                break;
+            }
+            LOGV("received data:");
+            mData.dump();
+        } else {
+            mData.reset();
+        }
+
+        if (handleRequest()) {
+            if (!dataIn && mData.hasData()) {
+                mData.setOperationCode(operation);
+                mData.setTransactionID(transaction);
+                LOGV("sending data:");
+                mData.dump();
+                ret = mData.write(fd);
+                if (ret < 0) {
+                    LOGE("request write returned %d, errno: %d", ret, errno);
+                    if (errno == ECANCELED) {
+                        // return to top of loop and wait for next command
+                        continue;
+                    }
+                    break;
+                }
+            }
+
+            mResponse.setTransactionID(transaction);
+            LOGV("sending response %04X", mResponse.getResponseCode());
+            ret = mResponse.write(fd);
+            mResponse.dump();
+            if (ret < 0) {
+                LOGE("request write returned %d, errno: %d", ret, errno);
+                if (errno == ECANCELED) {
+                    // return to top of loop and wait for next command
+                    continue;
+                }
+                break;
+            }
+        } else {
+            LOGV("skipping response\n");
+        }
+    }
+
+    if (mSessionOpen)
+        mDatabase->sessionEnded();
+}
+
+void MtpServer::sendObjectAdded(MtpObjectHandle handle) {
+    if (mSessionOpen) {
+        LOGD("sendObjectAdded %d\n", handle);
+        mEvent.setEventCode(MTP_EVENT_OBJECT_ADDED);
+        mEvent.setTransactionID(mRequest.getTransactionID());
+        mEvent.setParameter(1, handle);
+        int ret = mEvent.write(mFD);
+        LOGD("mEvent.write returned %d\n", ret);
+    }
+}
+
+void MtpServer::sendObjectRemoved(MtpObjectHandle handle) {
+    if (mSessionOpen) {
+        LOGD("sendObjectRemoved %d\n", handle);
+        mEvent.setEventCode(MTP_EVENT_OBJECT_REMOVED);
+        mEvent.setTransactionID(mRequest.getTransactionID());
+        mEvent.setParameter(1, handle);
+        int ret = mEvent.write(mFD);
+        LOGD("mEvent.write returned %d\n", ret);
+    }
+}
+
+bool MtpServer::handleRequest() {
+    MtpOperationCode operation = mRequest.getOperationCode();
+    MtpResponseCode response;
+
+    mResponse.reset();
+
+    if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) {
+        // FIXME - need to delete mSendObjectHandle from the database
+        LOGE("expected SendObject after SendObjectInfo");
+        mSendObjectHandle = kInvalidObjectHandle;
+    }
+
+    switch (operation) {
+        case MTP_OPERATION_GET_DEVICE_INFO:
+            response = doGetDeviceInfo();
+            break;
+        case MTP_OPERATION_OPEN_SESSION:
+            response = doOpenSession();
+            break;
+        case MTP_OPERATION_CLOSE_SESSION:
+            response = doCloseSession();
+            break;
+        case MTP_OPERATION_GET_STORAGE_IDS:
+            response = doGetStorageIDs();
+            break;
+         case MTP_OPERATION_GET_STORAGE_INFO:
+            response = doGetStorageInfo();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED:
+            response = doGetObjectPropsSupported();
+            break;
+        case MTP_OPERATION_GET_OBJECT_HANDLES:
+            response = doGetObjectHandles();
+            break;
+        case MTP_OPERATION_GET_NUM_OBJECTS:
+            response = doGetNumObjects();
+            break;
+        case MTP_OPERATION_GET_OBJECT_REFERENCES:
+            response = doGetObjectReferences();
+            break;
+        case MTP_OPERATION_SET_OBJECT_REFERENCES:
+            response = doSetObjectReferences();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROP_VALUE:
+            response = doGetObjectPropValue();
+            break;
+        case MTP_OPERATION_SET_OBJECT_PROP_VALUE:
+            response = doSetObjectPropValue();
+            break;
+        case MTP_OPERATION_GET_DEVICE_PROP_VALUE:
+            response = doGetDevicePropValue();
+            break;
+        case MTP_OPERATION_SET_DEVICE_PROP_VALUE:
+            response = doSetDevicePropValue();
+            break;
+        case MTP_OPERATION_RESET_DEVICE_PROP_VALUE:
+            response = doResetDevicePropValue();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROP_LIST:
+            response = doGetObjectPropList();
+            break;
+        case MTP_OPERATION_GET_OBJECT_INFO:
+            response = doGetObjectInfo();
+            break;
+        case MTP_OPERATION_GET_OBJECT:
+            response = doGetObject();
+            break;
+        case MTP_OPERATION_GET_PARTIAL_OBJECT:
+            response = doGetPartialObject();
+            break;
+        case MTP_OPERATION_SEND_OBJECT_INFO:
+            response = doSendObjectInfo();
+            break;
+        case MTP_OPERATION_SEND_OBJECT:
+            response = doSendObject();
+            break;
+        case MTP_OPERATION_DELETE_OBJECT:
+            response = doDeleteObject();
+            break;
+        case MTP_OPERATION_GET_OBJECT_PROP_DESC:
+            response = doGetObjectPropDesc();
+            break;
+        case MTP_OPERATION_GET_DEVICE_PROP_DESC:
+            response = doGetDevicePropDesc();
+            break;
+        default:
+            LOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation));
+            response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED;
+            break;
+    }
+
+    if (response == MTP_RESPONSE_TRANSACTION_CANCELLED)
+        return false;
+    mResponse.setResponseCode(response);
+    return true;
+}
+
+MtpResponseCode MtpServer::doGetDeviceInfo() {
+    MtpStringBuffer   string;
+    char prop_value[PROPERTY_VALUE_MAX];
+
+    MtpObjectFormatList* playbackFormats = mDatabase->getSupportedPlaybackFormats();
+    MtpObjectFormatList* captureFormats = mDatabase->getSupportedCaptureFormats();
+    MtpDevicePropertyList* deviceProperties = mDatabase->getSupportedDeviceProperties();
+
+    // fill in device info
+    mData.putUInt16(MTP_STANDARD_VERSION);
+    mData.putUInt32(6); // MTP Vendor Extension ID
+    mData.putUInt16(MTP_STANDARD_VERSION);
+    string.set("microsoft.com: 1.0;");
+    mData.putString(string); // MTP Extensions
+    mData.putUInt16(0); //Functional Mode
+    mData.putAUInt16(kSupportedOperationCodes,
+            sizeof(kSupportedOperationCodes) / sizeof(uint16_t)); // Operations Supported
+    mData.putAUInt16(kSupportedEventCodes,
+            sizeof(kSupportedEventCodes) / sizeof(uint16_t)); // Events Supported
+    mData.putAUInt16(deviceProperties); // Device Properties Supported
+    mData.putAUInt16(captureFormats); // Capture Formats
+    mData.putAUInt16(playbackFormats);  // Playback Formats
+    // FIXME
+    string.set("Google, Inc.");
+    mData.putString(string);   // Manufacturer
+
+    property_get("ro.product.model", prop_value, "MTP Device");
+    string.set(prop_value);
+    mData.putString(string);   // Model
+    string.set("1.0");
+    mData.putString(string);   // Device Version
+
+    property_get("ro.serialno", prop_value, "????????");
+    string.set(prop_value);
+    mData.putString(string);   // Serial Number
+
+    delete playbackFormats;
+    delete captureFormats;
+    delete deviceProperties;
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doOpenSession() {
+    if (mSessionOpen) {
+        mResponse.setParameter(1, mSessionID);
+        return MTP_RESPONSE_SESSION_ALREADY_OPEN;
+    }
+    mSessionID = mRequest.getParameter(1);
+    mSessionOpen = true;
+
+    mDatabase->sessionStarted();
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doCloseSession() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    mSessionID = 0;
+    mSessionOpen = false;
+    mDatabase->sessionEnded();
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageIDs() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+
+    int count = mStorages.size();
+    mData.putUInt32(count);
+    for (int i = 0; i < count; i++)
+        mData.putUInt32(mStorages[i]->getStorageID());
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageInfo() {
+    MtpStringBuffer   string;
+
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID id = mRequest.getParameter(1);
+    MtpStorage* storage = getStorage(id);
+    if (!storage)
+        return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+    mData.putUInt16(storage->getType());
+    mData.putUInt16(storage->getFileSystemType());
+    mData.putUInt16(storage->getAccessCapability());
+    mData.putUInt64(storage->getMaxCapacity());
+    mData.putUInt64(storage->getFreeSpace());
+    mData.putUInt32(1024*1024*1024); // Free Space in Objects
+    string.set(storage->getDescription());
+    mData.putString(string);
+    mData.putEmptyString();   // Volume Identifier
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropsSupported() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpObjectFormat format = mRequest.getParameter(1);
+    MtpObjectPropertyList* properties = mDatabase->getSupportedObjectProperties(format);
+    mData.putAUInt16(properties);
+    delete properties;
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectHandles() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID storageID = mRequest.getParameter(1);      // 0xFFFFFFFF for all storage
+    MtpObjectFormat format = mRequest.getParameter(2);      // 0 for all formats
+    MtpObjectHandle parent = mRequest.getParameter(3);      // 0xFFFFFFFF for objects with no parent
+                                                            // 0x00000000 for all objects?
+    if (parent == 0xFFFFFFFF)
+        parent = 0;
+
+    MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent);
+    mData.putAUInt32(handles);
+    delete handles;
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetNumObjects() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID storageID = mRequest.getParameter(1);      // 0xFFFFFFFF for all storage
+    MtpObjectFormat format = mRequest.getParameter(2);      // 0 for all formats
+    MtpObjectHandle parent = mRequest.getParameter(3);      // 0xFFFFFFFF for objects with no parent
+                                                            // 0x00000000 for all objects?
+    if (parent == 0xFFFFFFFF)
+        parent = 0;
+
+    int count = mDatabase->getNumObjects(storageID, format, parent);
+    if (count >= 0) {
+        mResponse.setParameter(1, count);
+        return MTP_RESPONSE_OK;
+    } else {
+        mResponse.setParameter(1, 0);
+        return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+    }
+}
+
+MtpResponseCode MtpServer::doGetObjectReferences() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID handle = mRequest.getParameter(1);
+
+    // FIXME - check for invalid object handle
+    MtpObjectHandleList* handles = mDatabase->getObjectReferences(handle);
+    if (handles) {
+        mData.putAUInt32(handles);
+        delete handles;
+    } else {
+        mData.putEmptyArray();
+    }
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSetObjectReferences() {
+    if (!mSessionOpen)
+        return MTP_RESPONSE_SESSION_NOT_OPEN;
+    MtpStorageID handle = mRequest.getParameter(1);
+    MtpObjectHandleList* references = mData.getAUInt32();
+    MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
+    delete references;
+    return result;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropValue() {
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpObjectProperty property = mRequest.getParameter(2);
+    LOGD("GetObjectPropValue %d %s\n", handle,
+            MtpDebug::getObjectPropCodeName(property));
+
+    return mDatabase->getObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doSetObjectPropValue() {
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpObjectProperty property = mRequest.getParameter(2);
+    LOGD("SetObjectPropValue %d %s\n", handle,
+            MtpDebug::getObjectPropCodeName(property));
+
+    return mDatabase->setObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doGetDevicePropValue() {
+    MtpDeviceProperty property = mRequest.getParameter(1);
+    LOGD("GetDevicePropValue %s\n",
+            MtpDebug::getDevicePropCodeName(property));
+
+    return mDatabase->getDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doSetDevicePropValue() {
+    MtpDeviceProperty property = mRequest.getParameter(1);
+    LOGD("SetDevicePropValue %s\n",
+            MtpDebug::getDevicePropCodeName(property));
+
+    return mDatabase->setDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doResetDevicePropValue() {
+    MtpDeviceProperty property = mRequest.getParameter(1);
+    LOGD("ResetDevicePropValue %s\n",
+            MtpDebug::getDevicePropCodeName(property));
+
+    return mDatabase->resetDeviceProperty(property);
+}
+
+MtpResponseCode MtpServer::doGetObjectPropList() {
+
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    // use uint32_t so we can support 0xFFFFFFFF
+    uint32_t format = mRequest.getParameter(2);
+    uint32_t property = mRequest.getParameter(3);
+    int groupCode = mRequest.getParameter(4);
+    int depth = mRequest.getParameter(5);
+   LOGD("GetObjectPropList %d format: %s property: %s group: %d depth: %d\n",
+            handle, MtpDebug::getFormatCodeName(format),
+            MtpDebug::getObjectPropCodeName(property), groupCode, depth);
+
+    return mDatabase->getObjectPropertyList(handle, format, property, groupCode, depth, mData);
+}
+
+MtpResponseCode MtpServer::doGetObjectInfo() {
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    return mDatabase->getObjectInfo(handle, mData);
+}
+
+MtpResponseCode MtpServer::doGetObject() {
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpString pathBuf;
+    int64_t fileLength;
+    MtpObjectFormat format;
+    int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
+    if (result != MTP_RESPONSE_OK)
+        return result;
+
+    const char* filePath = (const char *)pathBuf;
+    mtp_file_range  mfr;
+    mfr.fd = open(filePath, O_RDONLY);
+    if (mfr.fd < 0) {
+        return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    mfr.offset = 0;
+    mfr.length = fileLength;
+
+    // send data header
+    mData.setOperationCode(mRequest.getOperationCode());
+    mData.setTransactionID(mRequest.getTransactionID());
+    mData.writeDataHeader(mFD, fileLength + MTP_CONTAINER_HEADER_SIZE);
+
+    // then transfer the file
+    int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+    close(mfr.fd);
+    if (ret < 0) {
+        if (errno == ECANCELED)
+            return MTP_RESPONSE_TRANSACTION_CANCELLED;
+        else
+            return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetPartialObject() {
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    uint32_t offset = mRequest.getParameter(2);
+    uint32_t length = mRequest.getParameter(3);
+    MtpString pathBuf;
+    int64_t fileLength;
+    MtpObjectFormat format;
+    int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
+    if (result != MTP_RESPONSE_OK)
+        return result;
+    if (offset + length > fileLength)
+        length = fileLength - offset;
+
+    const char* filePath = (const char *)pathBuf;
+    mtp_file_range  mfr;
+    mfr.fd = open(filePath, O_RDONLY);
+    if (mfr.fd < 0) {
+        return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    mfr.offset = offset;
+    mfr.length = length;
+    mResponse.setParameter(1, length);
+
+    // send data header
+    mData.setOperationCode(mRequest.getOperationCode());
+    mData.setTransactionID(mRequest.getTransactionID());
+    mData.writeDataHeader(mFD, length + MTP_CONTAINER_HEADER_SIZE);
+
+    // then transfer the file
+    int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+    close(mfr.fd);
+    if (ret < 0) {
+        if (errno == ECANCELED)
+            return MTP_RESPONSE_TRANSACTION_CANCELLED;
+        else
+            return MTP_RESPONSE_GENERAL_ERROR;
+    }
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObjectInfo() {
+    MtpString path;
+    MtpStorageID storageID = mRequest.getParameter(1);
+    MtpStorage* storage = getStorage(storageID);
+    MtpObjectHandle parent = mRequest.getParameter(2);
+    if (!storage)
+        return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+    // special case the root
+    if (parent == MTP_PARENT_ROOT) {
+        path = storage->getPath();
+        parent = 0;
+    } else {
+        int64_t length;
+        MtpObjectFormat format;
+        int result = mDatabase->getObjectFilePath(parent, path, length, format);
+        if (result != MTP_RESPONSE_OK)
+            return result;
+        if (format != MTP_FORMAT_ASSOCIATION)
+            return MTP_RESPONSE_INVALID_PARENT_OBJECT;
+    }
+
+    // read only the fields we need
+    mData.getUInt32();  // storage ID
+    MtpObjectFormat format = mData.getUInt16();
+    mData.getUInt16();  // protection status
+    mSendObjectFileSize = mData.getUInt32();
+    mData.getUInt16();  // thumb format
+    mData.getUInt32();  // thumb compressed size
+    mData.getUInt32();  // thumb pix width
+    mData.getUInt32();  // thumb pix height
+    mData.getUInt32();  // image pix width
+    mData.getUInt32();  // image pix height
+    mData.getUInt32();  // image bit depth
+    mData.getUInt32();  // parent
+    uint16_t associationType = mData.getUInt16();
+    uint32_t associationDesc = mData.getUInt32();   // association desc
+    mData.getUInt32();  // sequence number
+    MtpStringBuffer name, created, modified;
+    mData.getString(name);    // file name
+    mData.getString(created);      // date created
+    mData.getString(modified);     // date modified
+    // keywords follow
+
+    LOGD("name: %s format: %04X\n", (const char *)name, format);
+    time_t modifiedTime;
+    if (!parseDateTime(modified, modifiedTime))
+        modifiedTime = 0;
+
+    if (path[path.size() - 1] != '/')
+        path += "/";
+    path += (const char *)name;
+
+    // check space first
+    if (mSendObjectFileSize > storage->getFreeSpace())
+        return MTP_RESPONSE_STORAGE_FULL;
+
+    MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
+            format, parent, storageID, mSendObjectFileSize, modifiedTime);
+    if (handle == kInvalidObjectHandle) {
+        return MTP_RESPONSE_GENERAL_ERROR;
+    }
+
+  if (format == MTP_FORMAT_ASSOCIATION) {
+        mode_t mask = umask(0);
+        int ret = mkdir((const char *)path, mDirectoryPermission);
+        umask(mask);
+        if (ret && ret != -EEXIST)
+            return MTP_RESPONSE_GENERAL_ERROR;
+        chown((const char *)path, getuid(), mFileGroup);
+    } else {
+        mSendObjectFilePath = path;
+        // save the handle for the SendObject call, which should follow
+        mSendObjectHandle = handle;
+        mSendObjectFormat = format;
+    }
+
+    mResponse.setParameter(1, storageID);
+    mResponse.setParameter(2, parent);
+    mResponse.setParameter(3, handle);
+
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObject() {
+    MtpResponseCode result = MTP_RESPONSE_OK;
+    mode_t mask;
+    int ret;
+    uint64_t actualSize = -1;
+
+    if (mSendObjectHandle == kInvalidObjectHandle) {
+        LOGE("Expected SendObjectInfo before SendObject");
+        result = MTP_RESPONSE_NO_VALID_OBJECT_INFO;
+        goto done;
+    }
+
+    // read the header
+    ret = mData.readDataHeader(mFD);
+    // FIXME - check for errors here.
+
+    // reset so we don't attempt to send this back
+    mData.reset();
+
+    mtp_file_range  mfr;
+    mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC);
+    if (mfr.fd < 0) {
+        result = MTP_RESPONSE_GENERAL_ERROR;
+        goto done;
+    }
+    fchown(mfr.fd, getuid(), mFileGroup);
+    // set permissions
+    mask = umask(0);
+    fchmod(mfr.fd, mFilePermission);
+    umask(mask);
+
+    mfr.offset = 0;
+    mfr.length = mSendObjectFileSize;
+
+    LOGD("receiving %s\n", (const char *)mSendObjectFilePath);
+    // transfer the file
+    ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+    close(mfr.fd);
+
+    LOGV("MTP_RECEIVE_FILE returned %d", ret);
+
+    if (ret < 0) {
+        unlink(mSendObjectFilePath);
+        if (errno == ECANCELED)
+            result = MTP_RESPONSE_TRANSACTION_CANCELLED;
+        else
+            result = MTP_RESPONSE_GENERAL_ERROR;
+    } else if (mSendObjectFileSize == 0xFFFFFFFF) {
+        // actual size is likely > 4 gig so stat the file to compute actual length
+        struct stat s;
+        if (lstat(mSendObjectFilePath, &s) == 0) {
+            actualSize = s.st_size;
+            LOGD("actualSize: %lld\n", actualSize);
+        }
+    }
+
+done:
+    mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat,
+            actualSize, result == MTP_RESPONSE_OK);
+    mSendObjectHandle = kInvalidObjectHandle;
+    mSendObjectFormat = 0;
+    return result;
+}
+
+static void deleteRecursive(const char* path) {
+    char pathbuf[PATH_MAX];
+    int pathLength = strlen(path);
+    if (pathLength >= sizeof(pathbuf) - 1) {
+        LOGE("path too long: %s\n", path);
+    }
+    strcpy(pathbuf, path);
+    if (pathbuf[pathLength - 1] != '/') {
+        pathbuf[pathLength++] = '/';
+    }
+    char* fileSpot = pathbuf + pathLength;
+    int pathRemaining = sizeof(pathbuf) - pathLength - 1;
+
+    DIR* dir = opendir(path);
+    if (!dir) {
+        LOGE("opendir %s failed: %s", path, strerror(errno));
+        return;
+    }
+
+    struct dirent* entry;
+    while ((entry = readdir(dir))) {
+        const char* name = entry->d_name;
+
+        // ignore "." and ".."
+        if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+            continue;
+        }
+
+        int nameLength = strlen(name);
+        if (nameLength > pathRemaining) {
+            LOGE("path %s/%s too long\n", path, name);
+            continue;
+        }
+        strcpy(fileSpot, name);
+
+        int type = entry->d_type;
+        if (entry->d_type == DT_DIR) {
+            deleteRecursive(pathbuf);
+            rmdir(pathbuf);
+        } else {
+            unlink(pathbuf);
+        }
+    }
+    closedir(dir);
+}
+
+static void deletePath(const char* path) {
+    struct stat statbuf;
+    if (stat(path, &statbuf) == 0) {
+        if (S_ISDIR(statbuf.st_mode)) {
+            deleteRecursive(path);
+            rmdir(path);
+        } else {
+            unlink(path);
+        }
+    } else {
+        LOGE("deletePath stat failed for %s: %s", path, strerror(errno));
+    }
+}
+
+MtpResponseCode MtpServer::doDeleteObject() {
+    MtpObjectHandle handle = mRequest.getParameter(1);
+    MtpObjectFormat format = mRequest.getParameter(2);
+    // FIXME - support deleting all objects if handle is 0xFFFFFFFF
+    // FIXME - implement deleting objects by format
+
+    MtpString filePath;
+    int64_t fileLength;
+    int result = mDatabase->getObjectFilePath(handle, filePath, fileLength, format);
+    if (result == MTP_RESPONSE_OK) {
+        LOGV("deleting %s", (const char *)filePath);
+        deletePath((const char *)filePath);
+        return mDatabase->deleteFile(handle);
+    } else {
+        return result;
+    }
+}
+
+MtpResponseCode MtpServer::doGetObjectPropDesc() {
+    MtpObjectProperty propCode = mRequest.getParameter(1);
+    MtpObjectFormat format = mRequest.getParameter(2);
+    LOGD("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
+                                        MtpDebug::getFormatCodeName(format));
+    MtpProperty* property = mDatabase->getObjectPropertyDesc(propCode, format);
+    if (!property)
+        return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+    property->write(mData);
+    delete property;
+    return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetDevicePropDesc() {
+    MtpDeviceProperty propCode = mRequest.getParameter(1);
+    LOGD("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
+    MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
+    if (!property)
+        return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+    property->write(mData);
+    delete property;
+    return MTP_RESPONSE_OK;
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
new file mode 100644
index 0000000..605d5a2
--- /dev/null
+++ b/media/mtp/MtpServer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_SERVER_H
+#define _MTP_SERVER_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpEventPacket.h"
+#include "mtp.h"
+
+#include "MtpUtils.h"
+
+namespace android {
+
+class MtpDatabase;
+class MtpStorage;
+
+class MtpServer {
+
+private:
+    // file descriptor for MTP kernel driver
+    int                 mFD;
+
+    MtpDatabase*        mDatabase;
+
+    // group to own new files and folders
+    int                 mFileGroup;
+    // permissions for new files and directories
+    int                 mFilePermission;
+    int                 mDirectoryPermission;
+
+    // current session ID
+    MtpSessionID        mSessionID;
+    // true if we have an open session and mSessionID is valid
+    bool                mSessionOpen;
+
+    MtpRequestPacket    mRequest;
+    MtpDataPacket       mData;
+    MtpResponsePacket   mResponse;
+    MtpEventPacket      mEvent;
+
+    MtpStorageList      mStorages;
+
+    // handle for new object, set by SendObjectInfo and used by SendObject
+    MtpObjectHandle     mSendObjectHandle;
+    MtpObjectFormat     mSendObjectFormat;
+    MtpString           mSendObjectFilePath;
+    size_t              mSendObjectFileSize;
+
+public:
+                        MtpServer(int fd, MtpDatabase* database,
+                                    int fileGroup, int filePerm, int directoryPerm);
+    virtual             ~MtpServer();
+
+    void                addStorage(const char* filePath, uint64_t reserveSpace);
+    inline void         addStorage(MtpStorage* storage) { mStorages.push(storage); }
+    MtpStorage*         getStorage(MtpStorageID id);
+    void                run();
+
+    void                sendObjectAdded(MtpObjectHandle handle);
+    void                sendObjectRemoved(MtpObjectHandle handle);
+
+private:
+    bool                handleRequest();
+
+    MtpResponseCode     doGetDeviceInfo();
+    MtpResponseCode     doOpenSession();
+    MtpResponseCode     doCloseSession();
+    MtpResponseCode     doGetStorageIDs();
+    MtpResponseCode     doGetStorageInfo();
+    MtpResponseCode     doGetObjectPropsSupported();
+    MtpResponseCode     doGetObjectHandles();
+    MtpResponseCode     doGetNumObjects();
+    MtpResponseCode     doGetObjectReferences();
+    MtpResponseCode     doSetObjectReferences();
+    MtpResponseCode     doGetObjectPropValue();
+    MtpResponseCode     doSetObjectPropValue();
+    MtpResponseCode     doGetDevicePropValue();
+    MtpResponseCode     doSetDevicePropValue();
+    MtpResponseCode     doResetDevicePropValue();
+    MtpResponseCode     doGetObjectPropList();
+    MtpResponseCode     doGetObjectInfo();
+    MtpResponseCode     doGetObject();
+    MtpResponseCode     doGetPartialObject();
+    MtpResponseCode     doSendObjectInfo();
+    MtpResponseCode     doSendObject();
+    MtpResponseCode     doDeleteObject();
+    MtpResponseCode     doGetObjectPropDesc();
+    MtpResponseCode     doGetDevicePropDesc();
+};
+
+}; // namespace android
+
+#endif // _MTP_SERVER_H
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
new file mode 100644
index 0000000..abc23de
--- /dev/null
+++ b/media/mtp/MtpStorage.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorage"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpStorage.h"
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/statfs.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+#include <string.h>
+#include <stdio.h>
+#include <limits.h>
+
+namespace android {
+
+MtpStorage::MtpStorage(MtpStorageID id, const char* filePath, uint64_t reserveSpace)
+    :   mStorageID(id),
+        mFilePath(filePath),
+        mMaxCapacity(0),
+        mReserveSpace(reserveSpace)
+{
+    LOGD("MtpStorage id: %d path: %s\n", id, filePath);
+}
+
+MtpStorage::~MtpStorage() {
+}
+
+int MtpStorage::getType() const {
+    return MTP_STORAGE_FIXED_RAM;
+}
+
+int MtpStorage::getFileSystemType() const {
+    return MTP_STORAGE_FILESYSTEM_HIERARCHICAL;
+}
+
+int MtpStorage::getAccessCapability() const {
+    return MTP_STORAGE_READ_WRITE;
+}
+
+uint64_t MtpStorage::getMaxCapacity() {
+    if (mMaxCapacity == 0) {
+        struct statfs   stat;
+        if (statfs(mFilePath, &stat))
+            return -1;
+        mMaxCapacity = (uint64_t)stat.f_blocks * (uint64_t)stat.f_bsize;
+    }
+    return mMaxCapacity;
+}
+
+uint64_t MtpStorage::getFreeSpace() {
+    struct statfs   stat;
+    if (statfs(mFilePath, &stat))
+        return -1;
+    uint64_t freeSpace = (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
+    return (freeSpace > mReserveSpace ? freeSpace - mReserveSpace : 0);
+}
+
+const char* MtpStorage::getDescription() const {
+    return "Device Storage";
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
new file mode 100644
index 0000000..ace720b
--- /dev/null
+++ b/media/mtp/MtpStorage.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_H
+#define _MTP_STORAGE_H
+
+#include "mtp.h"
+
+namespace android {
+
+class MtpDatabase;
+
+class MtpStorage {
+
+private:
+    MtpStorageID            mStorageID;
+    const char*             mFilePath;
+    uint64_t                mMaxCapacity;
+    // amount of free space to leave unallocated
+    uint64_t                mReserveSpace;
+
+public:
+                            MtpStorage(MtpStorageID id, const char* filePath,
+                                    uint64_t reserveSpace);
+    virtual                 ~MtpStorage();
+
+    inline MtpStorageID     getStorageID() const { return mStorageID; }
+    int                     getType() const;
+    int                     getFileSystemType() const;
+    int                     getAccessCapability() const;
+    uint64_t                getMaxCapacity();
+    uint64_t                getFreeSpace();
+    const char*             getDescription() const;
+    inline const char*      getPath() const { return mFilePath; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_H
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
new file mode 100644
index 0000000..ca64ac0
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorageInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStorageInfo::MtpStorageInfo(MtpStorageID id)
+    :   mStorageID(id),
+        mStorageType(0),
+        mFileSystemType(0),
+        mAccessCapability(0),
+        mMaxCapacity(0),
+        mFreeSpaceBytes(0),
+        mFreeSpaceObjects(0),
+        mStorageDescription(NULL),
+        mVolumeIdentifier(NULL)
+{
+}
+
+MtpStorageInfo::~MtpStorageInfo() {
+    if (mStorageDescription)
+        free(mStorageDescription);
+    if (mVolumeIdentifier)
+        free(mVolumeIdentifier);
+}
+
+void MtpStorageInfo::read(MtpDataPacket& packet) {
+    MtpStringBuffer string;
+
+    // read the device info
+    mStorageType = packet.getUInt16();
+    mFileSystemType = packet.getUInt16();
+    mAccessCapability = packet.getUInt16();
+    mMaxCapacity = packet.getUInt64();
+    mFreeSpaceBytes = packet.getUInt64();
+    mFreeSpaceObjects = packet.getUInt32();
+
+    packet.getString(string);
+    mStorageDescription = strdup((const char *)string);
+    packet.getString(string);
+    mVolumeIdentifier = strdup((const char *)string);
+}
+
+void MtpStorageInfo::print() {
+    LOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n",
+            mStorageID, mStorageType, mFileSystemType, mAccessCapability);
+    LOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n",
+            mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects);
+    LOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n",
+            mStorageDescription, mVolumeIdentifier);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
new file mode 100644
index 0000000..2cb626e
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_INFO_H
+#define _MTP_STORAGE_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpStorageInfo {
+public:
+    MtpStorageID        mStorageID;
+    uint16_t            mStorageType;
+    uint16_t            mFileSystemType;
+    uint16_t            mAccessCapability;
+    uint64_t            mMaxCapacity;
+    uint64_t            mFreeSpaceBytes;
+    uint32_t            mFreeSpaceObjects;
+    char*               mStorageDescription;
+    char*               mVolumeIdentifier;
+
+public:
+                        MtpStorageInfo(MtpStorageID id);
+    virtual             ~MtpStorageInfo();
+
+    void                read(MtpDataPacket& packet);
+
+    void                print();
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_INFO_H
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
new file mode 100644
index 0000000..fe8cf04
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStringBuffer"
+
+#include <string.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStringBuffer::MtpStringBuffer()
+    :   mCharCount(0),
+        mByteCount(1)
+{
+    mBuffer[0] = 0;
+}
+
+MtpStringBuffer::MtpStringBuffer(const char* src)
+    :   mCharCount(0),
+        mByteCount(1)
+{
+    set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const uint16_t* src)
+    :   mCharCount(0),
+        mByteCount(1)
+{
+    set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src)
+    :   mCharCount(src.mCharCount),
+        mByteCount(src.mByteCount)
+{
+    memcpy(mBuffer, src.mBuffer, mByteCount);
+}
+
+
+MtpStringBuffer::~MtpStringBuffer() {
+}
+
+void MtpStringBuffer::set(const char* src) {
+    int length = strlen(src);
+    if (length >= sizeof(mBuffer))
+        length = sizeof(mBuffer) - 1;
+    memcpy(mBuffer, src, length);
+
+    // count the characters
+    int count = 0;
+    char ch;
+    while ((ch = *src++) != 0) {
+        if ((ch & 0x80) == 0) {
+            // single byte character
+        } else if ((ch & 0xE0) == 0xC0) {
+            // two byte character
+            if (! *src++) {
+                // last character was truncated, so ignore last byte
+                length--;
+                break;
+            }
+        } else if ((ch & 0xF0) == 0xE0) {
+            // 3 byte char
+            if (! *src++) {
+                // last character was truncated, so ignore last byte
+                length--;
+                break;
+            }
+            if (! *src++) {
+                // last character was truncated, so ignore last two bytes
+                length -= 2;
+                break;
+            }
+        }
+        count++;
+    }
+
+    mByteCount = length + 1;
+    mBuffer[length] = 0;
+    mCharCount = count;
+}
+
+void MtpStringBuffer::set(const uint16_t* src) {
+    int count = 0;
+    uint16_t ch;
+    uint8_t* dest = mBuffer;
+
+    while ((ch = *src++) != 0 && count < 255) {
+        if (ch >= 0x0800) {
+            *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+            *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else if (ch >= 0x80) {
+            *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else {
+            *dest++ = ch;
+        }
+        count++;
+    }
+    *dest++ = 0;
+    mCharCount = count;
+    mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+    int count = packet->getUInt8();
+    uint8_t* dest = mBuffer;
+    for (int i = 0; i < count; i++) {
+        uint16_t ch = packet->getUInt16();
+        if (ch >= 0x0800) {
+            *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+            *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else if (ch >= 0x80) {
+            *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+            *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+        } else {
+            *dest++ = ch;
+        }
+    }
+    *dest++ = 0;
+    mCharCount = count;
+    mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
+    int count = mCharCount;
+    const uint8_t* src = mBuffer;
+    packet->putUInt8(count > 0 ? count + 1 : 0);
+
+    // expand utf8 to 16 bit chars
+    for (int i = 0; i < count; i++) {
+        uint16_t ch;
+        uint16_t ch1 = *src++;
+        if ((ch1 & 0x80) == 0) {
+            // single byte character
+            ch = ch1;
+        } else if ((ch1 & 0xE0) == 0xC0) {
+            // two byte character
+            uint16_t ch2 = *src++;
+            ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F);
+        } else {
+            // three byte character
+            uint16_t ch2 = *src++;
+            uint16_t ch3 = *src++;
+            ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F);
+        }
+        packet->putUInt16(ch);
+    }
+    // only terminate with zero if string is not empty
+    if (count > 0)
+        packet->putUInt16(0);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
new file mode 100644
index 0000000..cbc8307
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STRING_BUFFER_H
+#define _MTP_STRING_BUFFER_H
+
+#include <stdint.h>
+
+namespace android {
+
+class MtpDataPacket;
+
+// Represents a utf8 string, with a maximum of 255 characters
+class MtpStringBuffer {
+
+private:
+    // mBuffer contains string in UTF8 format
+    // maximum 3 bytes/character, with 1 extra for zero termination
+    uint8_t         mBuffer[255 * 3 + 1];
+    int             mCharCount;
+    int             mByteCount;
+
+public:
+                    MtpStringBuffer();
+                    MtpStringBuffer(const char* src);
+                    MtpStringBuffer(const uint16_t* src);
+                    MtpStringBuffer(const MtpStringBuffer& src);
+    virtual         ~MtpStringBuffer();
+
+    void            set(const char* src);
+    void            set(const uint16_t* src);
+
+    void            readFromPacket(MtpDataPacket* packet);
+    void            writeToPacket(MtpDataPacket* packet) const;
+
+    inline int      getCharCount() const { return mCharCount; }
+    inline int      getByteCount() const { return mByteCount; }
+
+	inline operator const char*() const { return (const char *)mBuffer; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STRING_BUFFER_H
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
new file mode 100644
index 0000000..720c854
--- /dev/null
+++ b/media/mtp/MtpTypes.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_TYPES_H
+#define _MTP_TYPES_H
+
+#include <stdint.h>
+#include "utils/String8.h"
+#include "utils/Vector.h"
+
+namespace android {
+
+typedef int32_t int128_t[4];
+typedef uint32_t uint128_t[4];
+
+typedef uint16_t MtpOperationCode;
+typedef uint16_t MtpResponseCode;
+typedef uint16_t MtpEventCode;
+typedef uint32_t MtpSessionID;
+typedef uint32_t MtpStorageID;
+typedef uint32_t MtpTransactionID;
+typedef uint16_t MtpPropertyCode;
+typedef uint16_t MtpDataType;
+typedef uint16_t MtpObjectFormat;
+typedef MtpPropertyCode MtpDeviceProperty;
+typedef MtpPropertyCode MtpObjectProperty;
+
+// object handles are unique across all storage but only within a single session.
+// object handles cannot be reused after an object is deleted.
+// values 0x00000000 and 0xFFFFFFFF are reserved for special purposes.
+typedef uint32_t MtpObjectHandle;
+
+// Special values
+#define MTP_PARENT_ROOT         0xFFFFFFFF       // parent is root of the storage
+#define kInvalidObjectHandle    0xFFFFFFFF
+
+class MtpStorage;
+class MtpDevice;
+class MtpProperty;
+
+typedef Vector<MtpStorage *> MtpStorageList;
+typedef Vector<MtpDevice*> MtpDeviceList;
+typedef Vector<MtpProperty*> MtpPropertyList;
+
+typedef Vector<uint8_t> UInt8List;
+typedef Vector<uint16_t> UInt16List;
+typedef Vector<uint32_t> UInt32List;
+typedef Vector<uint64_t> UInt64List;
+typedef Vector<int8_t> Int8List;
+typedef Vector<int16_t> Int16List;
+typedef Vector<int32_t> Int32List;
+typedef Vector<int64_t> Int64List;
+
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt16List MtpDevicePropertyList;
+typedef UInt16List MtpObjectFormatList;
+typedef UInt32List MtpObjectHandleList;
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt32List MtpStorageIDList;
+
+typedef String8    MtpString;
+
+}; // namespace android
+
+#endif // _MTP_TYPES_H
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
new file mode 100644
index 0000000..ab01ef5
--- /dev/null
+++ b/media/mtp/MtpUtils.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpUtils"
+
+#include <stdio.h>
+#include <time.h>
+
+#include <cutils/tztime.h>
+#include "MtpUtils.h"
+
+namespace android {
+
+/*
+DateTime strings follow a compatible subset of the definition found in ISO 8601, and
+take the form of a Unicode string formatted as: "YYYYMMDDThhmmss.s". In this
+representation, YYYY shall be replaced by the year, MM replaced by the month (01-12),
+DD replaced by the day (01-31), T is a constant character 'T' delimiting time from date,
+hh is replaced by the hour (00-23), mm is replaced by the minute (00-59), and ss by the
+second (00-59). The ".s" is optional, and represents tenths of a second.
+*/
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds) {
+    int year, month, day, hour, minute, second;
+    struct tm tm;
+
+    if (sscanf(dateTime, "%04d%02d%02dT%02d%02d%02d",
+            &year, &month, &day, &hour, &minute, &second) != 6)
+        return false;
+    const char* tail = dateTime + 15;
+    // skip optional tenth of second
+    if (tail[0] == '.' && tail[1])
+        tail += 2;
+    //FIXME - support +/-hhmm
+    bool useUTC = (tail[0] == 'Z');
+
+    // hack to compute timezone
+    time_t dummy;
+    localtime_r(&dummy, &tm);
+
+    tm.tm_sec = second;
+    tm.tm_min = minute;
+    tm.tm_hour = hour;
+    tm.tm_mday = day;
+    tm.tm_mon = month;
+    tm.tm_year = year - 1900;
+    tm.tm_wday = 0;
+    tm.tm_isdst = -1;
+    if (useUTC)
+        outSeconds = mktime(&tm);
+    else
+        outSeconds = mktime_tz(&tm, tm.tm_zone);
+
+    return true;
+}
+
+void formatDateTime(time_t seconds, char* buffer, int bufferLength) {
+    struct tm tm;
+
+    localtime_r(&seconds, &tm);
+    snprintf(buffer, bufferLength, "%04d%02d%02dT%02d%02d%02d",
+        tm.tm_year + 1900, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
+}
+
+}  // namespace android
diff --git a/media/mtp/MtpUtils.h b/media/mtp/MtpUtils.h
new file mode 100644
index 0000000..61f9055
--- /dev/null
+++ b/media/mtp/MtpUtils.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_UTILS_H
+#define _MTP_UTILS_H
+
+#include <stdint.h>
+
+namespace android {
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds);
+void formatDateTime(time_t seconds, char* buffer, int bufferLength);
+
+}; // namespace android
+
+#endif // _MTP_UTILS_H
diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h
new file mode 100644
index 0000000..8bc2e22
--- /dev/null
+++ b/media/mtp/mtp.h
@@ -0,0 +1,479 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_H
+#define _MTP_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#define MTP_STANDARD_VERSION            100
+
+// Container Types
+#define MTP_CONTAINER_TYPE_UNDEFINED    0
+#define MTP_CONTAINER_TYPE_COMMAND      1
+#define MTP_CONTAINER_TYPE_DATA         2
+#define MTP_CONTAINER_TYPE_RESPONSE     3
+#define MTP_CONTAINER_TYPE_EVENT        4
+
+// Container Offsets
+#define MTP_CONTAINER_LENGTH_OFFSET             0
+#define MTP_CONTAINER_TYPE_OFFSET               4
+#define MTP_CONTAINER_CODE_OFFSET               6
+#define MTP_CONTAINER_TRANSACTION_ID_OFFSET     8
+#define MTP_CONTAINER_PARAMETER_OFFSET          12
+#define MTP_CONTAINER_HEADER_SIZE               12
+
+// MTP Data Types
+#define MTP_TYPE_UNDEFINED      0x0000          // Undefined
+#define MTP_TYPE_INT8           0x0001          // Signed 8-bit integer
+#define MTP_TYPE_UINT8          0x0002          // Unsigned 8-bit integer
+#define MTP_TYPE_INT16          0x0003          // Signed 16-bit integer
+#define MTP_TYPE_UINT16         0x0004          // Unsigned 16-bit integer
+#define MTP_TYPE_INT32          0x0005          // Signed 32-bit integer
+#define MTP_TYPE_UINT32         0x0006          // Unsigned 32-bit integer
+#define MTP_TYPE_INT64          0x0007          // Signed 64-bit integer
+#define MTP_TYPE_UINT64         0x0008          // Unsigned 64-bit integer
+#define MTP_TYPE_INT128         0x0009          // Signed 128-bit integer
+#define MTP_TYPE_UINT128        0x000A          // Unsigned 128-bit integer
+#define MTP_TYPE_AINT8          0x4001          // Array of signed 8-bit integers
+#define MTP_TYPE_AUINT8         0x4002          // Array of unsigned 8-bit integers
+#define MTP_TYPE_AINT16         0x4003          // Array of signed 16-bit integers
+#define MTP_TYPE_AUINT16        0x4004          // Array of unsigned 16-bit integers
+#define MTP_TYPE_AINT32         0x4005          // Array of signed 32-bit integers
+#define MTP_TYPE_AUINT32        0x4006          // Array of unsigned 32-bit integers
+#define MTP_TYPE_AINT64         0x4007          // Array of signed 64-bit integers
+#define MTP_TYPE_AUINT64        0x4008          // Array of unsigned 64-bit integers
+#define MTP_TYPE_AINT128        0x4009          // Array of signed 128-bit integers
+#define MTP_TYPE_AUINT128       0x400A          // Array of unsigned 128-bit integers
+#define MTP_TYPE_STR            0xFFFF          // Variable-length Unicode string
+
+// MTP Format Codes
+#define MTP_FORMAT_UNDEFINED                            0x3000   // Undefined object
+#define MTP_FORMAT_ASSOCIATION                          0x3001   // Association (for example, a folder)
+#define MTP_FORMAT_SCRIPT                               0x3002   // Device model-specific script
+#define MTP_FORMAT_EXECUTABLE                           0x3003   // Device model-specific binary executable
+#define MTP_FORMAT_TEXT                                 0x3004   // Text file
+#define MTP_FORMAT_HTML                                 0x3005   // Hypertext Markup Language file (text)
+#define MTP_FORMAT_DPOF                                 0x3006   // Digital Print Order Format file (text)
+#define MTP_FORMAT_AIFF                                 0x3007   // Audio clip
+#define MTP_FORMAT_WAV                                  0x3008   // Audio clip
+#define MTP_FORMAT_MP3                                  0x3009   // Audio clip
+#define MTP_FORMAT_AVI                                  0x300A   // Video clip
+#define MTP_FORMAT_MPEG                                 0x300B   // Video clip
+#define MTP_FORMAT_ASF                                  0x300C   // Microsoft Advanced Streaming Format (video)
+#define MTP_FORMAT_DEFINED                              0x3800   // Unknown image object
+#define MTP_FORMAT_EXIF_JPEG                            0x3801   // Exchangeable File Format, JEIDA standard
+#define MTP_FORMAT_TIFF_EP                              0x3802   // Tag Image File Format for Electronic Photography
+#define MTP_FORMAT_FLASHPIX                             0x3803   // Structured Storage Image Format
+#define MTP_FORMAT_BMP                                  0x3804   // Microsoft Windows Bitmap file
+#define MTP_FORMAT_CIFF                                 0x3805   // Canon Camera Image File Format
+#define MTP_FORMAT_GIF                                  0x3807   // Graphics Interchange Format
+#define MTP_FORMAT_JFIF                                 0x3808   // JPEG File Interchange Format
+#define MTP_FORMAT_CD                                   0x3809   // PhotoCD Image Pac
+#define MTP_FORMAT_PICT                                 0x380A   // Quickdraw Image Format
+#define MTP_FORMAT_PNG                                  0x380B   // Portable Network Graphics
+#define MTP_FORMAT_TIFF                                 0x380D   // Tag Image File Format
+#define MTP_FORMAT_TIFF_IT                              0x380E   // Tag Image File Format for Information Technology (graphic arts)
+#define MTP_FORMAT_JP2                                  0x380F   // JPEG2000 Baseline File Format
+#define MTP_FORMAT_JPX                                  0x3810   // JPEG2000 Extended File Format
+#define MTP_FORMAT_UNDEFINED_FIRMWARE                   0xB802
+#define MTP_FORMAT_WINDOWS_IMAGE_FORMAT                 0xB881
+#define MTP_FORMAT_UNDEFINED_AUDIO                      0xB900
+#define MTP_FORMAT_WMA                                  0xB901
+#define MTP_FORMAT_OGG                                  0xB902
+#define MTP_FORMAT_AAC                                  0xB903
+#define MTP_FORMAT_AUDIBLE                              0xB904
+#define MTP_FORMAT_FLAC                                 0xB906
+#define MTP_FORMAT_UNDEFINED_VIDEO                      0xB980
+#define MTP_FORMAT_WMV                                  0xB981
+#define MTP_FORMAT_MP4_CONTAINER                        0xB982  // ISO 14496-1
+#define MTP_FORMAT_MP2                                  0xB983
+#define MTP_FORMAT_3GP_CONTAINER                        0xB984  // 3GPP file format. Details: http://www.3gpp.org/ftp/Specs/html-info/26244.htm (page title - \u201cTransparent end-to-end packet switched streaming service, 3GPP file format\u201d).
+#define MTP_FORMAT_UNDEFINED_COLLECTION                 0xBA00
+#define MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM            0xBA01
+#define MTP_FORMAT_ABSTRACT_IMAGE_ALBUM                 0xBA02
+#define MTP_FORMAT_ABSTRACT_AUDIO_ALBUM                 0xBA03
+#define MTP_FORMAT_ABSTRACT_VIDEO_ALBUM                 0xBA04
+#define MTP_FORMAT_ABSTRACT_AV_PLAYLIST                 0xBA05
+#define MTP_FORMAT_ABSTRACT_CONTACT_GROUP               0xBA06
+#define MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER              0xBA07
+#define MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION        0xBA08
+#define MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST              0xBA09
+#define MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST              0xBA0A
+#define MTP_FORMAT_ABSTRACT_MEDIACAST                   0xBA0B // For use with mediacasts; references multimedia enclosures of RSS feeds or episodic content
+#define MTP_FORMAT_WPL_PLAYLIST                         0xBA10
+#define MTP_FORMAT_M3U_PLAYLIST                         0xBA11
+#define MTP_FORMAT_MPL_PLAYLIST                         0xBA12
+#define MTP_FORMAT_ASX_PLAYLIST                         0xBA13
+#define MTP_FORMAT_PLS_PLAYLIST                         0xBA14
+#define MTP_FORMAT_UNDEFINED_DOCUMENT                   0xBA80
+#define MTP_FORMAT_ABSTRACT_DOCUMENT                    0xBA81
+#define MTP_FORMAT_XML_DOCUMENT                         0xBA82
+#define MTP_FORMAT_MS_WORD_DOCUMENT                     0xBA83
+#define MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT           0xBA84
+#define MTP_FORMAT_MS_EXCEL_SPREADSHEET                 0xBA85
+#define MTP_FORMAT_MS_POWERPOINT_PRESENTATION           0xBA86
+#define MTP_FORMAT_UNDEFINED_MESSAGE                    0xBB00
+#define MTP_FORMAT_ABSTRACT_MESSSAGE                    0xBB01
+#define MTP_FORMAT_UNDEFINED_CONTACT                    0xBB80
+#define MTP_FORMAT_ABSTRACT_CONTACT                     0xBB81
+#define MTP_FORMAT_VCARD_2                              0xBB82
+
+// MTP Object Property Codes
+#define MTP_PROPERTY_STORAGE_ID                             0xDC01
+#define MTP_PROPERTY_OBJECT_FORMAT                          0xDC02
+#define MTP_PROPERTY_PROTECTION_STATUS                      0xDC03
+#define MTP_PROPERTY_OBJECT_SIZE                            0xDC04
+#define MTP_PROPERTY_ASSOCIATION_TYPE                       0xDC05
+#define MTP_PROPERTY_ASSOCIATION_DESC                       0xDC06
+#define MTP_PROPERTY_OBJECT_FILE_NAME                       0xDC07
+#define MTP_PROPERTY_DATE_CREATED                           0xDC08
+#define MTP_PROPERTY_DATE_MODIFIED                          0xDC09
+#define MTP_PROPERTY_KEYWORDS                               0xDC0A
+#define MTP_PROPERTY_PARENT_OBJECT                          0xDC0B
+#define MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS                0xDC0C
+#define MTP_PROPERTY_HIDDEN                                 0xDC0D
+#define MTP_PROPERTY_SYSTEM_OBJECT                          0xDC0E
+#define MTP_PROPERTY_PERSISTENT_UID                         0xDC41
+#define MTP_PROPERTY_SYNC_ID                                0xDC42
+#define MTP_PROPERTY_PROPERTY_BAG                           0xDC43
+#define MTP_PROPERTY_NAME                                   0xDC44
+#define MTP_PROPERTY_CREATED_BY                             0xDC45
+#define MTP_PROPERTY_ARTIST                                 0xDC46
+#define MTP_PROPERTY_DATE_AUTHORED                          0xDC47
+#define MTP_PROPERTY_DESCRIPTION                            0xDC48
+#define MTP_PROPERTY_URL_REFERENCE                          0xDC49
+#define MTP_PROPERTY_LANGUAGE_LOCALE                        0xDC4A
+#define MTP_PROPERTY_COPYRIGHT_INFORMATION                  0xDC4B
+#define MTP_PROPERTY_SOURCE                                 0xDC4C
+#define MTP_PROPERTY_ORIGIN_LOCATION                        0xDC4D
+#define MTP_PROPERTY_DATE_ADDED                             0xDC4E
+#define MTP_PROPERTY_NON_CONSUMABLE                         0xDC4F
+#define MTP_PROPERTY_CORRUPT_UNPLAYABLE                     0xDC50
+#define MTP_PROPERTY_PRODUCER_SERIAL_NUMBER                 0xDC51
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT           0xDC81
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE             0xDC82
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT           0xDC83
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH            0xDC84
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION         0xDC85
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA             0xDC86
+#define MTP_PROPERTY_WIDTH                                  0xDC87
+#define MTP_PROPERTY_HEIGHT                                 0xDC88
+#define MTP_PROPERTY_DURATION                               0xDC89
+#define MTP_PROPERTY_RATING                                 0xDC8A
+#define MTP_PROPERTY_TRACK                                  0xDC8B
+#define MTP_PROPERTY_GENRE                                  0xDC8C
+#define MTP_PROPERTY_CREDITS                                0xDC8D
+#define MTP_PROPERTY_LYRICS                                 0xDC8E
+#define MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID                0xDC8F
+#define MTP_PROPERTY_PRODUCED_BY                            0xDC90
+#define MTP_PROPERTY_USE_COUNT                              0xDC91
+#define MTP_PROPERTY_SKIP_COUNT                             0xDC92
+#define MTP_PROPERTY_LAST_ACCESSED                          0xDC93
+#define MTP_PROPERTY_PARENTAL_RATING                        0xDC94
+#define MTP_PROPERTY_META_GENRE                             0xDC95
+#define MTP_PROPERTY_COMPOSER                               0xDC96
+#define MTP_PROPERTY_EFFECTIVE_RATING                       0xDC97
+#define MTP_PROPERTY_SUBTITLE                               0xDC98
+#define MTP_PROPERTY_ORIGINAL_RELEASE_DATE                  0xDC99
+#define MTP_PROPERTY_ALBUM_NAME                             0xDC9A
+#define MTP_PROPERTY_ALBUM_ARTIST                           0xDC9B
+#define MTP_PROPERTY_MOOD                                   0xDC9C
+#define MTP_PROPERTY_DRM_STATUS                             0xDC9D
+#define MTP_PROPERTY_SUB_DESCRIPTION                        0xDC9E
+#define MTP_PROPERTY_IS_CROPPED                             0xDCD1
+#define MTP_PROPERTY_IS_COLOUR_CORRECTED                    0xDCD2
+#define MTP_PROPERTY_IMAGE_BIT_DEPTH                        0xDCD3
+#define MTP_PROPERTY_F_NUMBER                               0xDCD4
+#define MTP_PROPERTY_EXPOSURE_TIME                          0xDCD5
+#define MTP_PROPERTY_EXPOSURE_INDEX                         0xDCD6
+#define MTP_PROPERTY_TOTAL_BITRATE                          0xDE91
+#define MTP_PROPERTY_BITRATE_TYPE                           0xDE92
+#define MTP_PROPERTY_SAMPLE_RATE                            0xDE93
+#define MTP_PROPERTY_NUMBER_OF_CHANNELS                     0xDE94
+#define MTP_PROPERTY_AUDIO_BIT_DEPTH                        0xDE95
+#define MTP_PROPERTY_SCAN_TYPE                              0xDE97
+#define MTP_PROPERTY_AUDIO_WAVE_CODEC                       0xDE99
+#define MTP_PROPERTY_AUDIO_BITRATE                          0xDE9A
+#define MTP_PROPERTY_VIDEO_FOURCC_CODEC                     0xDE9B
+#define MTP_PROPERTY_VIDEO_BITRATE                          0xDE9C
+#define MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS            0xDE9D
+#define MTP_PROPERTY_KEYFRAME_DISTANCE                      0xDE9E
+#define MTP_PROPERTY_BUFFER_SIZE                            0xDE9F
+#define MTP_PROPERTY_ENCODING_QUALITY                       0xDEA0
+#define MTP_PROPERTY_ENCODING_PROFILE                       0xDEA1
+#define MTP_PROPERTY_DISPLAY_NAME                           0xDCE0
+#define MTP_PROPERTY_BODY_TEXT                              0xDCE1
+#define MTP_PROPERTY_SUBJECT                                0xDCE2
+#define MTP_PROPERTY_PRIORITY                               0xDCE3
+#define MTP_PROPERTY_GIVEN_NAME                             0xDD00
+#define MTP_PROPERTY_MIDDLE_NAMES                           0xDD01
+#define MTP_PROPERTY_FAMILY_NAME                            0xDD02
+#define MTP_PROPERTY_PREFIX                                 0xDD03
+#define MTP_PROPERTY_SUFFIX                                 0xDD04
+#define MTP_PROPERTY_PHONETIC_GIVEN_NAME                    0xDD05
+#define MTP_PROPERTY_PHONETIC_FAMILY_NAME                   0xDD06
+#define MTP_PROPERTY_EMAIL_PRIMARY                          0xDD07
+#define MTP_PROPERTY_EMAIL_PERSONAL_1                       0xDD08
+#define MTP_PROPERTY_EMAIL_PERSONAL_2                       0xDD09
+#define MTP_PROPERTY_EMAIL_BUSINESS_1                       0xDD0A
+#define MTP_PROPERTY_EMAIL_BUSINESS_2                       0xDD0B
+#define MTP_PROPERTY_EMAIL_OTHERS                           0xDD0C
+#define MTP_PROPERTY_PHONE_NUMBER_PRIMARY                   0xDD0D
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL                  0xDD0E
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2                0xDD0F
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS                  0xDD10
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2                0xDD11
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE                    0xDD12
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE_2                  0xDD13
+#define MTP_PROPERTY_FAX_NUMBER_PRIMARY                     0xDD14
+#define MTP_PROPERTY_FAX_NUMBER_PERSONAL                    0xDD15
+#define MTP_PROPERTY_FAX_NUMBER_BUSINESS                    0xDD16
+#define MTP_PROPERTY_PAGER_NUMBER                           0xDD17
+#define MTP_PROPERTY_PHONE_NUMBER_OTHERS                    0xDD18
+#define MTP_PROPERTY_PRIMARY_WEB_ADDRESS                    0xDD19
+#define MTP_PROPERTY_PERSONAL_WEB_ADDRESS                   0xDD1A
+#define MTP_PROPERTY_BUSINESS_WEB_ADDRESS                   0xDD1B
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS              0xDD1C
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2            0xDD1D
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3            0xDD1E
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL           0xDD1F
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1         0xDD20
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2         0xDD21
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY           0xDD22
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION         0xDD23
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE    0xDD24
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY        0xDD25
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL           0xDD26
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1         0xDD27
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2         0xDD28
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY           0xDD29
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION         0xDD2A
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE    0xDD2B
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY        0xDD2C
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL              0xDD2D
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1            0xDD2E
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2            0xDD2F
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY              0xDD30
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION            0xDD31
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE       0xDD32
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY           0xDD33
+#define MTP_PROPERTY_ORGANIZATION_NAME                      0xDD34
+#define MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME             0xDD35
+#define MTP_PROPERTY_ROLE                                   0xDD36
+#define MTP_PROPERTY_BIRTHDATE                              0xDD37
+#define MTP_PROPERTY_MESSAGE_TO                             0xDD40
+#define MTP_PROPERTY_MESSAGE_CC                             0xDD41
+#define MTP_PROPERTY_MESSAGE_BCC                            0xDD42
+#define MTP_PROPERTY_MESSAGE_READ                           0xDD43
+#define MTP_PROPERTY_MESSAGE_RECEIVED_TIME                  0xDD44
+#define MTP_PROPERTY_MESSAGE_SENDER                         0xDD45
+#define MTP_PROPERTY_ACTIVITY_BEGIN_TIME                    0xDD50
+#define MTP_PROPERTY_ACTIVITY_END_TIME                      0xDD51
+#define MTP_PROPERTY_ACTIVITY_LOCATION                      0xDD52
+#define MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES            0xDD54
+#define MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES            0xDD55
+#define MTP_PROPERTY_ACTIVITY_RESOURCES                     0xDD56
+#define MTP_PROPERTY_ACTIVITY_ACCEPTED                      0xDD57
+#define MTP_PROPERTY_ACTIVITY_TENTATIVE                     0xDD58
+#define MTP_PROPERTY_ACTIVITY_DECLINED                      0xDD59
+#define MTP_PROPERTY_ACTIVITY_REMAINDER_TIME                0xDD5A
+#define MTP_PROPERTY_ACTIVITY_OWNER                         0xDD5B
+#define MTP_PROPERTY_ACTIVITY_STATUS                        0xDD5C
+#define MTP_PROPERTY_OWNER                                  0xDD5D
+#define MTP_PROPERTY_EDITOR                                 0xDD5E
+#define MTP_PROPERTY_WEBMASTER                              0xDD5F
+#define MTP_PROPERTY_URL_SOURCE                             0xDD60
+#define MTP_PROPERTY_URL_DESTINATION                        0xDD61
+#define MTP_PROPERTY_TIME_BOOKMARK                          0xDD62
+#define MTP_PROPERTY_OBJECT_BOOKMARK                        0xDD63
+#define MTP_PROPERTY_BYTE_BOOKMARK                          0xDD64
+#define MTP_PROPERTY_LAST_BUILD_DATE                        0xDD70
+#define MTP_PROPERTY_TIME_TO_LIVE                           0xDD71
+#define MTP_PROPERTY_MEDIA_GUID                             0xDD72
+
+// MTP Device Property Codes
+#define MTP_DEVICE_PROPERTY_UNDEFINED                       0x5000
+#define MTP_DEVICE_PROPERTY_BATTERY_LEVEL                   0x5001
+#define MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE                 0x5002
+#define MTP_DEVICE_PROPERTY_IMAGE_SIZE                      0x5003
+#define MTP_DEVICE_PROPERTY_COMPRESSION_SETTING             0x5004
+#define MTP_DEVICE_PROPERTY_WHITE_BALANCE                   0x5005
+#define MTP_DEVICE_PROPERTY_RGB_GAIN                        0x5006
+#define MTP_DEVICE_PROPERTY_F_NUMBER                        0x5007
+#define MTP_DEVICE_PROPERTY_FOCAL_LENGTH                    0x5008
+#define MTP_DEVICE_PROPERTY_FOCUS_DISTANCE                  0x5009
+#define MTP_DEVICE_PROPERTY_FOCUS_MODE                      0x500A
+#define MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE          0x500B
+#define MTP_DEVICE_PROPERTY_FLASH_MODE                      0x500C
+#define MTP_DEVICE_PROPERTY_EXPOSURE_TIME                   0x500D
+#define MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE           0x500E
+#define MTP_DEVICE_PROPERTY_EXPOSURE_INDEX                  0x500F
+#define MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION      0x5010
+#define MTP_DEVICE_PROPERTY_DATETIME                        0x5011
+#define MTP_DEVICE_PROPERTY_CAPTURE_DELAY                   0x5012
+#define MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE              0x5013
+#define MTP_DEVICE_PROPERTY_CONTRAST                        0x5014
+#define MTP_DEVICE_PROPERTY_SHARPNESS                       0x5015
+#define MTP_DEVICE_PROPERTY_DIGITAL_ZOOM                    0x5016
+#define MTP_DEVICE_PROPERTY_EFFECT_MODE                     0x5017
+#define MTP_DEVICE_PROPERTY_BURST_NUMBER                    0x5018
+#define MTP_DEVICE_PROPERTY_BURST_INTERVAL                  0x5019
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER                0x501A
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL              0x501B
+#define MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE             0x501C
+#define MTP_DEVICE_PROPERTY_UPLOAD_URL                      0x501D
+#define MTP_DEVICE_PROPERTY_ARTIST                          0x501E
+#define MTP_DEVICE_PROPERTY_COPYRIGHT_INFO                  0x501F
+#define MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER         0xD401
+#define MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME            0xD402
+#define MTP_DEVICE_PROPERTY_VOLUME                          0xD403
+#define MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED       0xD404
+#define MTP_DEVICE_PROPERTY_DEVICE_ICON                     0xD405
+#define MTP_DEVICE_PROPERTY_PLAYBACK_RATE                   0xD410
+#define MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT                 0xD411
+#define MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX        0xD412
+#define MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO  0xD406
+#define MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE           0xD407
+
+// MTP Operation Codes
+#define MTP_OPERATION_GET_DEVICE_INFO                       0x1001
+#define MTP_OPERATION_OPEN_SESSION                          0x1002
+#define MTP_OPERATION_CLOSE_SESSION                         0x1003
+#define MTP_OPERATION_GET_STORAGE_IDS                       0x1004
+#define MTP_OPERATION_GET_STORAGE_INFO                      0x1005
+#define MTP_OPERATION_GET_NUM_OBJECTS                       0x1006
+#define MTP_OPERATION_GET_OBJECT_HANDLES                    0x1007
+#define MTP_OPERATION_GET_OBJECT_INFO                       0x1008
+#define MTP_OPERATION_GET_OBJECT                            0x1009
+#define MTP_OPERATION_GET_THUMB                             0x100A
+#define MTP_OPERATION_DELETE_OBJECT                         0x100B
+#define MTP_OPERATION_SEND_OBJECT_INFO                      0x100C
+#define MTP_OPERATION_SEND_OBJECT                           0x100D
+#define MTP_OPERATION_INITIATE_CAPTURE                      0x100E
+#define MTP_OPERATION_FORMAT_STORE                          0x100F
+#define MTP_OPERATION_RESET_DEVICE                          0x1010
+#define MTP_OPERATION_SELF_TEST                             0x1011
+#define MTP_OPERATION_SET_OBJECT_PROTECTION                 0x1012
+#define MTP_OPERATION_POWER_DOWN                            0x1013
+#define MTP_OPERATION_GET_DEVICE_PROP_DESC                  0x1014
+#define MTP_OPERATION_GET_DEVICE_PROP_VALUE                 0x1015
+#define MTP_OPERATION_SET_DEVICE_PROP_VALUE                 0x1016
+#define MTP_OPERATION_RESET_DEVICE_PROP_VALUE               0x1017
+#define MTP_OPERATION_TERMINATE_OPEN_CAPTURE                0x1018
+#define MTP_OPERATION_MOVE_OBJECT                           0x1019
+#define MTP_OPERATION_COPY_OBJECT                           0x101A
+#define MTP_OPERATION_GET_PARTIAL_OBJECT                    0x101B
+#define MTP_OPERATION_INITIATE_OPEN_CAPTURE                 0x101C
+#define MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED            0x9801
+#define MTP_OPERATION_GET_OBJECT_PROP_DESC                  0x9802
+#define MTP_OPERATION_GET_OBJECT_PROP_VALUE                 0x9803
+#define MTP_OPERATION_SET_OBJECT_PROP_VALUE                 0x9804
+#define MTP_OPERATION_GET_OBJECT_PROP_LIST                  0x9805
+#define MTP_OPERATION_SET_OBJECT_PROP_LIST                  0x9806
+#define MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC          0x9807
+#define MTP_OPERATION_SEND_OBJECT_PROP_LIST                 0x9808
+#define MTP_OPERATION_GET_OBJECT_REFERENCES                 0x9810
+#define MTP_OPERATION_SET_OBJECT_REFERENCES                 0x9811
+#define MTP_OPERATION_SKIP                                  0x9820
+
+// MTP Response Codes
+#define MTP_RESPONSE_UNDEFINED                                  0x2000
+#define MTP_RESPONSE_OK                                         0x2001
+#define MTP_RESPONSE_GENERAL_ERROR                              0x2002
+#define MTP_RESPONSE_SESSION_NOT_OPEN                           0x2003
+#define MTP_RESPONSE_INVALID_TRANSACTION_ID                     0x2004
+#define MTP_RESPONSE_OPERATION_NOT_SUPPORTED                    0x2005
+#define MTP_RESPONSE_PARAMETER_NOT_SUPPORTED                    0x2006
+#define MTP_RESPONSE_INCOMPLETE_TRANSFER                        0x2007
+#define MTP_RESPONSE_INVALID_STORAGE_ID                         0x2008
+#define MTP_RESPONSE_INVALID_OBJECT_HANDLE                      0x2009
+#define MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED                  0x200A
+#define MTP_RESPONSE_INVALID_OBJECT_FORMAT_CODE                 0x200B
+#define MTP_RESPONSE_STORAGE_FULL                               0x200C
+#define MTP_RESPONSE_OBJECT_WRITE_PROTECTED                     0x200D
+#define MTP_RESPONSE_STORE_READ_ONLY                            0x200E
+#define MTP_RESPONSE_ACCESS_DENIED                              0x200F
+#define MTP_RESPONSE_NO_THUMBNAIL_PRESENT                       0x2010
+#define MTP_RESPONSE_SELF_TEST_FAILED                           0x2011
+#define MTP_RESPONSE_PARTIAL_DELETION                           0x2012
+#define MTP_RESPONSE_STORE_NOT_AVAILABLE                        0x2013
+#define MTP_RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED        0x2014
+#define MTP_RESPONSE_NO_VALID_OBJECT_INFO                       0x2015
+#define MTP_RESPONSE_INVALID_CODE_FORMAT                        0x2016
+#define MTP_RESPONSE_UNKNOWN_VENDOR_CODE                        0x2017
+#define MTP_RESPONSE_CAPTURE_ALREADY_TERMINATED                 0x2018
+#define MTP_RESPONSE_DEVICE_BUSY                                0x2019
+#define MTP_RESPONSE_INVALID_PARENT_OBJECT                      0x201A
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT                 0x201B
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_VALUE                  0x201C
+#define MTP_RESPONSE_INVALID_PARAMETER                          0x201D
+#define MTP_RESPONSE_SESSION_ALREADY_OPEN                       0x201E
+#define MTP_RESPONSE_TRANSACTION_CANCELLED                      0x201F
+#define MTP_RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED   0x2020
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_CODE                   0xA801
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT                 0xA802
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_VALUE                  0xA803
+#define MTP_RESPONSE_INVALID_OBJECT_REFERENCE                   0xA804
+#define MTP_RESPONSE_GROUP_NOT_SUPPORTED                        0xA805
+#define MTP_RESPONSE_INVALID_DATASET                            0xA806
+#define MTP_RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED         0xA807
+#define MTP_RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED         0xA808
+#define MTP_RESPONSE_OBJECT_TOO_LARGE                           0xA809
+#define MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED                  0xA80A
+
+// MTP Event Codes
+#define MTP_EVENT_UNDEFINED                         0x4000
+#define MTP_EVENT_CANCEL_TRANSACTION                0x4001
+#define MTP_EVENT_OBJECT_ADDED                      0x4002
+#define MTP_EVENT_OBJECT_REMOVED                    0x4003
+#define MTP_EVENT_STORE_ADDED                       0x4004
+#define MTP_EVENT_STORE_REMOVED                     0x4005
+#define MTP_EVENT_DEVICE_PROP_CHANGED               0x4006
+#define MTP_EVENT_OBJECT_INFO_CHANGED               0x4007
+#define MTP_EVENT_DEVICE_INFO_CHANGED               0x4008
+#define MTP_EVENT_REQUEST_OBJECT_TRANSFER           0x4009
+#define MTP_EVENT_STORE_FULL                        0x400A
+#define MTP_EVENT_DEVICE_RESET                      0x400B
+#define MTP_EVENT_STORAGE_INFO_CHANGED              0x400C
+#define MTP_EVENT_CAPTURE_COMPLETE                  0x400D
+#define MTP_EVENT_UNREPORTED_STATUS                 0x400E
+#define MTP_EVENT_OBJECT_PROP_CHANGED               0xC801
+#define MTP_EVENT_OBJECT_PROP_DESC_CHANGED          0xC802
+#define MTP_EVENT_OBJECT_REFERENCES_CHANGED         0xC803
+
+// Storage Type
+#define MTP_STORAGE_FIXED_ROM                       0x0001
+#define MTP_STORAGE_REMOVABLE_ROM                   0x0002
+#define MTP_STORAGE_FIXED_RAM                       0x0003
+#define MTP_STORAGE_REMOVABLE_RAM                   0x0004
+
+// Storage File System
+#define MTP_STORAGE_FILESYSTEM_FLAT                 0x0001
+#define MTP_STORAGE_FILESYSTEM_HIERARCHICAL         0x0002
+#define MTP_STORAGE_FILESYSTEM_DCF                  0x0003
+
+// Storage Access Capability
+#define MTP_STORAGE_READ_WRITE                      0x0000
+#define MTP_STORAGE_READ_ONLY_WITHOUT_DELETE        0x0001
+#define MTP_STORAGE_READ_ONLY_WITH_DELETE           0x0002
+
+// Association Type
+#define MTP_ASSOCIATION_TYPE_UNDEFINED              0x0000
+#define MTP_ASSOCIATION_TYPE_GENERIC_FOLDER         0x0001
+
+#endif // _MTP_H
diff --git a/services/audioflinger/A2dpAudioInterface.cpp b/services/audioflinger/A2dpAudioInterface.cpp
index 995e31c..d926cb1 100644
--- a/services/audioflinger/A2dpAudioInterface.cpp
+++ b/services/audioflinger/A2dpAudioInterface.cpp
@@ -23,10 +23,13 @@
 
 #include "A2dpAudioInterface.h"
 #include "audio/liba2dp.h"
-
+#include <hardware_legacy/power.h>
 
 namespace android {
 
+static const char *sA2dpWakeLock = "A2dpOutputStream";
+#define MAX_WRITE_RETRIES  5
+
 // ----------------------------------------------------------------------------
 
 //AudioHardwareInterface* A2dpAudioInterface::createA2dpInterface()
@@ -257,52 +260,74 @@
     if (pRate) *pRate = lRate;
 
     mDevice = device;
+    mBufferDurationUs = ((bufferSize() * 1000 )/ frameSize() / sampleRate()) * 1000;
     return NO_ERROR;
 }
 
 A2dpAudioInterface::A2dpAudioStreamOut::~A2dpAudioStreamOut()
 {
     LOGV("A2dpAudioStreamOut destructor");
-    standby();
     close();
     LOGV("A2dpAudioStreamOut destructor returning from close()");
 }
 
 ssize_t A2dpAudioInterface::A2dpAudioStreamOut::write(const void* buffer, size_t bytes)
 {
-    Mutex::Autolock lock(mLock);
-
-    size_t remaining = bytes;
     status_t status = -1;
+    {
+        Mutex::Autolock lock(mLock);
 
-    if (!mBluetoothEnabled || mClosing || mSuspended) {
-        LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \
-               mBluetoothEnabled %d, mClosing %d, mSuspended %d",
-                mBluetoothEnabled, mClosing, mSuspended);
-        goto Error;
-    }
+        size_t remaining = bytes;
 
-    status = init();
-    if (status < 0)
-        goto Error;
-
-    while (remaining > 0) {
-        status = a2dp_write(mData, buffer, remaining);
-        if (status <= 0) {
-            LOGE("a2dp_write failed err: %d\n", status);
+        if (!mBluetoothEnabled || mClosing || mSuspended) {
+            LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \
+                   mBluetoothEnabled %d, mClosing %d, mSuspended %d",
+                    mBluetoothEnabled, mClosing, mSuspended);
             goto Error;
         }
-        remaining -= status;
-        buffer = ((char *)buffer) + status;
+
+        if (mStandby) {
+            acquire_wake_lock (PARTIAL_WAKE_LOCK, sA2dpWakeLock);
+            mStandby = false;
+            mLastWriteTime = systemTime();
+        }
+
+        status = init();
+        if (status < 0)
+            goto Error;
+
+        int retries = MAX_WRITE_RETRIES;
+        while (remaining > 0 && retries) {
+            status = a2dp_write(mData, buffer, remaining);
+            if (status < 0) {
+                LOGE("a2dp_write failed err: %d\n", status);
+                goto Error;
+            }
+            if (status == 0) {
+                retries--;
+            }
+            remaining -= status;
+            buffer = (char *)buffer + status;
+        }
+
+        // if A2DP sink runs abnormally fast, sleep a little so that audioflinger mixer thread
+        // does no spin and starve other threads.
+        // NOTE: It is likely that the A2DP headset is being disconnected
+        nsecs_t now = systemTime();
+        if ((uint32_t)ns2us(now - mLastWriteTime) < (mBufferDurationUs >> 2)) {
+            LOGV("A2DP sink runs too fast");
+            usleep(mBufferDurationUs - (uint32_t)ns2us(now - mLastWriteTime));
+        }
+        mLastWriteTime = now;
+        return bytes;
+
     }
-
-    mStandby = false;
-
-    return bytes;
-
 Error:
+
+    standby();
+
     // Simulate audio output timing in case of error
-    usleep(((bytes * 1000 )/ frameSize() / sampleRate()) * 1000);
+    usleep(mBufferDurationUs);
 
     return status;
 }
@@ -324,19 +349,22 @@
 
 status_t A2dpAudioInterface::A2dpAudioStreamOut::standby()
 {
-    int result = 0;
-
-    if (mClosing) {
-        LOGV("Ignore standby, closing");
-        return result;
-    }
-
     Mutex::Autolock lock(mLock);
+    return standby_l();
+}
+
+status_t A2dpAudioInterface::A2dpAudioStreamOut::standby_l()
+{
+    int result = NO_ERROR;
 
     if (!mStandby) {
-        result = a2dp_stop(mData);
-        if (result == 0)
-            mStandby = true;
+        LOGV_IF(mClosing || !mBluetoothEnabled, "Standby skip stop: closing %d enabled %d",
+                mClosing, mBluetoothEnabled);
+        if (!mClosing && mBluetoothEnabled) {
+            result = a2dp_stop(mData);
+        }
+        release_wake_lock(sA2dpWakeLock);
+        mStandby = true;
     }
 
     return result;
@@ -362,6 +390,9 @@
     key = String8("closing");
     if (param.get(key, value) == NO_ERROR) {
         mClosing = (value == "true");
+        if (mClosing) {
+            standby();
+        }
         param.remove(key);
     }
     key = AudioParameter::keyRouting;
@@ -444,6 +475,7 @@
 
 status_t A2dpAudioInterface::A2dpAudioStreamOut::close_l()
 {
+    standby_l();
     if (mData) {
         LOGV("A2dpAudioStreamOut::close_l() calling a2dp_cleanup(mData)");
         a2dp_cleanup(mData);
diff --git a/services/audioflinger/A2dpAudioInterface.h b/services/audioflinger/A2dpAudioInterface.h
index 48154f9..dbe2c6a 100644
--- a/services/audioflinger/A2dpAudioInterface.h
+++ b/services/audioflinger/A2dpAudioInterface.h
@@ -103,6 +103,7 @@
                 status_t    setAddress(const char* address);
                 status_t    setBluetoothEnabled(bool enabled);
                 status_t    setSuspended(bool onOff);
+                status_t    standby_l();
 
     private:
                 int         mFd;
@@ -116,6 +117,8 @@
                 uint32_t    mDevice;
                 bool        mClosing;
                 bool        mSuspended;
+                nsecs_t     mLastWriteTime;
+                uint32_t    mBufferDurationUs;
     };
 
     friend class A2dpAudioStreamOut;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index cd9b07e..51b5947 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -343,7 +343,7 @@
             lSessionId = *sessionId;
         } else {
             // if no audio session id is provided, create one here
-            lSessionId = nextUniqueId();
+            lSessionId = nextUniqueId_l();
             if (sessionId != NULL) {
                 *sessionId = lSessionId;
             }
@@ -3699,7 +3699,7 @@
         if (sessionId != NULL && *sessionId != AudioSystem::SESSION_OUTPUT_MIX) {
             lSessionId = *sessionId;
         } else {
-            lSessionId = nextUniqueId();
+            lSessionId = nextUniqueId_l();
             if (sessionId != NULL) {
                 *sessionId = lSessionId;
             }
@@ -4300,7 +4300,7 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
     if (output != 0) {
-        int id = nextUniqueId();
+        int id = nextUniqueId_l();
         if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) ||
             (format != AudioSystem::PCM_16_BIT) ||
             (channels != AudioSystem::CHANNEL_OUT_STEREO)) {
@@ -4348,7 +4348,7 @@
         return 0;
     }
 
-    int id = nextUniqueId();
+    int id = nextUniqueId_l();
     DuplicatingThread *thread = new DuplicatingThread(this, thread1, id);
     thread->addOutputTrack(thread2);
     mPlaybackThreads.add(id, thread);
@@ -4473,7 +4473,7 @@
     }
 
     if (input != 0) {
-        int id = nextUniqueId();
+        int id = nextUniqueId_l();
          // Start record thread
         thread = new RecordThread(this, input, reqSamplingRate, reqChannels, id);
         mRecordThreads.add(id, thread);
@@ -4543,7 +4543,8 @@
 
 int AudioFlinger::newAudioSessionId()
 {
-    return nextUniqueId();
+    AutoMutex _l(mLock);
+    return nextUniqueId_l();
 }
 
 // checkPlaybackThread_l() must be called with AudioFlinger::mLock held
@@ -4578,9 +4579,10 @@
     return thread;
 }
 
-int AudioFlinger::nextUniqueId()
+// nextUniqueId_l() must be called with AudioFlinger::mLock held
+int AudioFlinger::nextUniqueId_l()
 {
-    return android_atomic_inc(&mNextUniqueId);
+    return mNextUniqueId++;
 }
 
 // ----------------------------------------------------------------------------
@@ -4967,7 +4969,7 @@
         LOGV("createEffect_l() got effect %p on chain %p", effect == 0 ? 0 : effect.get(), chain.get());
 
         if (effect == 0) {
-            int id = mAudioFlinger->nextUniqueId();
+            int id = mAudioFlinger->nextUniqueId_l();
             // Check CPU and memory usage
             lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id);
             if (lStatus != NO_ERROR) {
@@ -5808,7 +5810,8 @@
 const uint32_t AudioFlinger::EffectModule::sModeConvTable[] = {
     AUDIO_MODE_NORMAL,   // AudioSystem::MODE_NORMAL
     AUDIO_MODE_RINGTONE, // AudioSystem::MODE_RINGTONE
-    AUDIO_MODE_IN_CALL   // AudioSystem::MODE_IN_CALL
+    AUDIO_MODE_IN_CALL,  // AudioSystem::MODE_IN_CALL
+    AUDIO_MODE_IN_CALL   // AudioSystem::MODE_IN_COMMUNICATION, same conversion as for MODE_IN_CALL
 };
 
 int AudioFlinger::EffectModule::modeAudioSystemToEffectApi(uint32_t mode)
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 5917632..f0ef867 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -785,7 +785,7 @@
               float streamVolumeInternal(int stream) const { return mStreamTypes[stream].volume; }
               void audioConfigChanged_l(int event, int ioHandle, void *param2);
 
-              int  nextUniqueId();
+              int  nextUniqueId_l();
               status_t moveEffectChain_l(int session,
                                      AudioFlinger::PlaybackThread *srcThread,
                                      AudioFlinger::PlaybackThread *dstThread,
diff --git a/services/audioflinger/AudioHardwareInterface.cpp b/services/audioflinger/AudioHardwareInterface.cpp
index 9a4a7f9..f58e4c0 100644
--- a/services/audioflinger/AudioHardwareInterface.cpp
+++ b/services/audioflinger/AudioHardwareInterface.cpp
@@ -48,14 +48,15 @@
     "CURRENT",
     "NORMAL",
     "RINGTONE",
-    "IN_CALL"
+    "IN_CALL",
+    "IN_COMMUNICATION"
 };
 
 static const char* routeNone = "NONE";
 
 static const char* displayMode(int mode)
 {
-    if ((mode < -2) || (mode > 2))
+    if ((mode < AudioSystem::MODE_INVALID) || (mode >= AudioSystem::NUM_MODES))
         return routingModeStrings[0];
     return routingModeStrings[mode+3];
 }
diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp
index 8d16ab4..eeca7ab 100644
--- a/services/audioflinger/AudioPolicyManagerBase.cpp
+++ b/services/audioflinger/AudioPolicyManagerBase.cpp
@@ -235,7 +235,7 @@
 
     // if leaving call state, handle special case of active streams
     // pertaining to sonification strategy see handleIncallSonification()
-    if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+    if (isInCall()) {
         LOGV("setPhoneState() in call state management: new state is %d", state);
         for (int stream = 0; stream < AudioSystem::NUM_STREAM_TYPES; stream++) {
             handleIncallSonification(stream, false, true);
@@ -248,16 +248,21 @@
     bool force = false;
 
     // are we entering or starting a call
-    if ((oldState != AudioSystem::MODE_IN_CALL) && (state == AudioSystem::MODE_IN_CALL)) {
+    if (!isStateInCall(oldState) && isStateInCall(state)) {
         LOGV("  Entering call in setPhoneState()");
         // force routing command to audio hardware when starting a call
         // even if no device change is needed
         force = true;
-    } else if ((oldState == AudioSystem::MODE_IN_CALL) && (state != AudioSystem::MODE_IN_CALL)) {
+    } else if (isStateInCall(oldState) && !isStateInCall(state)) {
         LOGV("  Exiting call in setPhoneState()");
         // force routing command to audio hardware when exiting a call
         // even if no device change is needed
         force = true;
+    } else if (isStateInCall(state) && (state != oldState)) {
+        LOGV("  Switching between telephony and VoIP in setPhoneState()");
+        // force routing command to audio hardware when switching between telephony and VoIP
+        // even if no device change is needed
+        force = true;
     }
 
     // check for device and output changes triggered by new phone state
@@ -272,7 +277,7 @@
 
     // force routing command to audio hardware when ending call
     // even if no device change is needed
-    if (oldState == AudioSystem::MODE_IN_CALL && newDevice == 0) {
+    if (isStateInCall(oldState) && newDevice == 0) {
         newDevice = hwOutputDesc->device();
     }
 
@@ -280,7 +285,7 @@
     // immediately and delay the route change to avoid sending the ring tone
     // tail into the earpiece or headset.
     int delayMs = 0;
-    if (state == AudioSystem::MODE_IN_CALL && oldState == AudioSystem::MODE_RINGTONE) {
+    if (isStateInCall(state) && oldState == AudioSystem::MODE_RINGTONE) {
         // delay the device change command by twice the output latency to have some margin
         // and be sure that audio buffers not yet affected by the mute are out when
         // we actually apply the route change
@@ -293,7 +298,7 @@
 
     // if entering in call state, handle special case of active streams
     // pertaining to sonification strategy see handleIncallSonification()
-    if (state == AudioSystem::MODE_IN_CALL) {
+    if (isStateInCall(state)) {
         LOGV("setPhoneState() in call state management: new state is %d", state);
         // unmute the ringing tone after a sufficient delay if it was muted before
         // setting output device above
@@ -338,7 +343,9 @@
         break;
     case AudioSystem::FOR_MEDIA:
         if (config != AudioSystem::FORCE_HEADPHONES && config != AudioSystem::FORCE_BT_A2DP &&
-            config != AudioSystem::FORCE_WIRED_ACCESSORY && config != AudioSystem::FORCE_NONE) {
+            config != AudioSystem::FORCE_WIRED_ACCESSORY &&
+            config != AudioSystem::FORCE_ANALOG_DOCK &&
+            config != AudioSystem::FORCE_DIGITAL_DOCK && config != AudioSystem::FORCE_NONE) {
             LOGW("setForceUse() invalid config %d for FOR_MEDIA", config);
             return;
         }
@@ -354,7 +361,10 @@
         break;
     case AudioSystem::FOR_DOCK:
         if (config != AudioSystem::FORCE_NONE && config != AudioSystem::FORCE_BT_CAR_DOCK &&
-            config != AudioSystem::FORCE_BT_DESK_DOCK && config != AudioSystem::FORCE_WIRED_ACCESSORY) {
+            config != AudioSystem::FORCE_BT_DESK_DOCK &&
+            config != AudioSystem::FORCE_WIRED_ACCESSORY &&
+            config != AudioSystem::FORCE_ANALOG_DOCK &&
+            config != AudioSystem::FORCE_DIGITAL_DOCK) {
             LOGW("setForceUse() invalid config %d for FOR_DOCK", config);
         }
         forceVolumeReeval = true;
@@ -564,7 +574,7 @@
     setOutputDevice(output, getNewDevice(output));
 
     // handle special case for sonification while in call
-    if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+    if (isInCall()) {
         handleIncallSonification(stream, true, false);
     }
 
@@ -589,7 +599,7 @@
     routing_strategy strategy = getStrategy((AudioSystem::stream_type)stream);
 
     // handle special case for sonification while in call
-    if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+    if (isInCall()) {
         handleIncallSonification(stream, false, false);
     }
 
@@ -738,10 +748,8 @@
     AudioParameter param = AudioParameter();
     param.addInt(String8(AudioParameter::keyRouting), (int)inputDesc->mDevice);
 
-    // use Voice Recognition mode or not for this input based on input source
-    int vr_enabled = inputDesc->mInputSource == AUDIO_SOURCE_VOICE_RECOGNITION ? 1 : 0;
-    param.addInt(String8("vr_mode"), vr_enabled);
-    LOGV("AudioPolicyManager::startInput(%d), setting vr_mode to %d", inputDesc->mInputSource, vr_enabled);
+    param.addInt(String8(AudioParameter::keyInputSource), (int)inputDesc->mInputSource);
+    LOGV("AudioPolicyManager::startInput() input source = %d", inputDesc->mInputSource);
 
     mpClientInterface->setParameters(input, param.toString());
 
@@ -1344,6 +1352,7 @@
 
 void AudioPolicyManagerBase::closeA2dpOutputs()
 {
+
     LOGV("setDeviceConnectionState() closing A2DP and duplicated output!");
 
     if (mDuplicatedOutput != 0) {
@@ -1494,7 +1503,7 @@
     //      use device for strategy media
     // 4: the strategy DTMF is active on the hardware output:
     //      use device for strategy DTMF
-    if (mPhoneState == AudioSystem::MODE_IN_CALL ||
+    if (isInCall() ||
         outputDesc->isUsedByStrategy(STRATEGY_PHONE)) {
         device = getDeviceForStrategy(STRATEGY_PHONE, fromCache);
     } else if (outputDesc->isUsedByStrategy(STRATEGY_SONIFICATION)) {
@@ -1549,7 +1558,7 @@
 
     switch (strategy) {
     case STRATEGY_DTMF:
-        if (mPhoneState != AudioSystem::MODE_IN_CALL) {
+        if (!isInCall()) {
             // when off call, DTMF strategy follows the same rules as MEDIA strategy
             device = getDeviceForStrategy(STRATEGY_MEDIA, false);
             break;
@@ -1562,7 +1571,7 @@
         // of priority
         switch (mForceUse[AudioSystem::FOR_COMMUNICATION]) {
         case AudioSystem::FORCE_BT_SCO:
-            if (mPhoneState != AudioSystem::MODE_IN_CALL || strategy != STRATEGY_DTMF) {
+            if (!isInCall() || strategy != STRATEGY_DTMF) {
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
                 if (device) break;
             }
@@ -1578,9 +1587,15 @@
             if (device) break;
             device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET;
             if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+            if (device) break;
 #ifdef WITH_A2DP
             // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP
-            if (mPhoneState != AudioSystem::MODE_IN_CALL) {
+            if (!isInCall()) {
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP;
                 if (device) break;
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
@@ -1594,14 +1609,16 @@
             break;
 
         case AudioSystem::FORCE_SPEAKER:
-            if (mPhoneState != AudioSystem::MODE_IN_CALL || strategy != STRATEGY_DTMF) {
-                device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
-                if (device) break;
-            }
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+            if (device) break;
+            device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+            if (device) break;
 #ifdef WITH_A2DP
             // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to
             // A2DP speaker when forcing to speaker output
-            if (mPhoneState != AudioSystem::MODE_IN_CALL) {
+            if (!isInCall()) {
                 device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
                 if (device) break;
             }
@@ -1618,7 +1635,7 @@
 
         // If incall, just select the STRATEGY_PHONE device: The rest of the behavior is handled by
         // handleIncallSonification().
-        if (mPhoneState == AudioSystem::MODE_IN_CALL) {
+        if (isInCall()) {
             device = getDeviceForStrategy(STRATEGY_PHONE, false);
             break;
         }
@@ -1630,13 +1647,19 @@
         // FALL THROUGH
 
     case STRATEGY_MEDIA: {
-        uint32_t device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
-        if (device2 == 0) {
-            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADPHONE;
-        }
+        uint32_t device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADPHONE;
         if (device2 == 0) {
             device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET;
         }
+        if (device2 == 0) {
+            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL;
+        }
+        if (device2 == 0) {
+            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET;
+        }
+        if (device2 == 0) {
+            device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET;
+        }
 #ifdef WITH_A2DP
         if (mA2dpOutput != 0) {
             if (strategy == STRATEGY_SONIFICATION && !a2dpUsedForSonification()) {
@@ -1739,6 +1762,7 @@
     case AUDIO_SOURCE_DEFAULT:
     case AUDIO_SOURCE_MIC:
     case AUDIO_SOURCE_VOICE_RECOGNITION:
+    case AUDIO_SOURCE_VOICE_COMMUNICATION:
         if (mForceUse[AudioSystem::FOR_RECORD] == AudioSystem::FORCE_BT_SCO &&
             mAvailableInputDevices & AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET) {
             device = AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET;
@@ -1801,7 +1825,9 @@
         (AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP |
         AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
         AudioSystem::DEVICE_OUT_WIRED_HEADSET |
-        AudioSystem::DEVICE_OUT_WIRED_HEADPHONE)) &&
+        AudioSystem::DEVICE_OUT_WIRED_HEADPHONE |
+        AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET |
+        AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET)) &&
         (getStrategy((AudioSystem::stream_type)stream) == STRATEGY_SONIFICATION) &&
         streamDesc.mCanBeMuted) {
         volume *= SONIFICATION_HEADSET_VOLUME_FACTOR;
@@ -1960,6 +1986,16 @@
     }
 }
 
+bool AudioPolicyManagerBase::isInCall()
+{
+    return isStateInCall(mPhoneState);
+}
+
+bool AudioPolicyManagerBase::isStateInCall(int state) {
+    return ((state == AudioSystem::MODE_IN_CALL) ||
+            (state == AudioSystem::MODE_IN_COMMUNICATION));
+}
+
 bool AudioPolicyManagerBase::needsDirectOuput(AudioSystem::stream_type stream,
                                     uint32_t samplingRate,
                                     uint32_t format,
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 87975af..b52fc69 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -49,7 +49,8 @@
     libcutils \
     libmedia \
     libcamera_client \
-    libsurfaceflinger_client
+    libsurfaceflinger_client \
+    libgui
 
 LOCAL_MODULE:= libcameraservice
 
diff --git a/services/camera/libcameraservice/CameraHardwareStub.cpp b/services/camera/libcameraservice/CameraHardwareStub.cpp
index b3e0ee6..07b5a37 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.cpp
+++ b/services/camera/libcameraservice/CameraHardwareStub.cpp
@@ -101,9 +101,9 @@
     mFakeCamera = 0; // paranoia
 }
 
-sp<IMemoryHeap> CameraHardwareStub::getPreviewHeap() const
+status_t CameraHardwareStub::setPreviewWindow(const sp<ANativeWindow>& buf)
 {
-    return mPreviewHeap;
+    return NO_ERROR;
 }
 
 sp<IMemoryHeap> CameraHardwareStub::getRawHeap() const
diff --git a/services/camera/libcameraservice/CameraHardwareStub.h b/services/camera/libcameraservice/CameraHardwareStub.h
index d3427ba..9b66a76 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.h
+++ b/services/camera/libcameraservice/CameraHardwareStub.h
@@ -29,7 +29,7 @@
 
 class CameraHardwareStub : public CameraHardwareInterface {
 public:
-    virtual sp<IMemoryHeap> getPreviewHeap() const;
+    virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf);
     virtual sp<IMemoryHeap> getRawHeap() const;
 
     virtual void        setCallbacks(notify_callback notify_cb,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a64ddcf..3d8ca7a 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -26,11 +26,12 @@
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <cutils/atomic.h>
+#include <cutils/properties.h>
+#include <gui/SurfaceTextureClient.h>
 #include <hardware/hardware.h>
 #include <media/AudioSystem.h>
 #include <media/mediaplayer.h>
 #include <surfaceflinger/ISurface.h>
-#include <ui/Overlay.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
@@ -305,9 +306,9 @@
     mCameraId = cameraId;
     mCameraFacing = cameraFacing;
     mClientPid = clientPid;
-    mUseOverlay = mHardware->useOverlay();
     mMsgEnabled = 0;
-
+    mSurface = 0;
+    mPreviewWindow = 0;
     mHardware->setCallbacks(notifyCallback,
                             dataCallback,
                             dataCallbackTimestamp,
@@ -317,42 +318,21 @@
     enableMsgType(CAMERA_MSG_ERROR |
                   CAMERA_MSG_ZOOM |
                   CAMERA_MSG_FOCUS);
-    mOverlayW = 0;
-    mOverlayH = 0;
 
     // Callback is disabled by default
     mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP;
     mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
-    mOrientationChanged = false;
+    mPlayShutterSound = true;
     cameraService->setCameraBusy(cameraId);
     cameraService->loadSound();
     LOG1("Client::Client X (pid %d)", callingPid);
 }
 
-static void *unregister_surface(void *arg) {
-    ISurface *surface = (ISurface *)arg;
-    surface->unregisterBuffers();
-    IPCThreadState::self()->flushCommands();
-    return NULL;
-}
-
 // tear down the client
 CameraService::Client::~Client() {
     int callingPid = getCallingPid();
     LOG1("Client::~Client E (pid %d, this %p)", callingPid, this);
 
-    if (mSurface != 0 && !mUseOverlay) {
-        pthread_t thr;
-        // We unregister the buffers in a different thread because binder does
-        // not let us make sychronous transactions in a binder destructor (that
-        // is, upon our reaching a refcount of zero.)
-        pthread_create(&thr,
-                       NULL,  // attr
-                       unregister_surface,
-                       mSurface.get());
-        pthread_join(thr, NULL);
-    }
-
     // set mClientPid to let disconnet() tear down the hardware
     mClientPid = callingPid;
     disconnect();
@@ -466,9 +446,11 @@
     mHardware->cancelPicture();
     // Release the hardware resources.
     mHardware->release();
-    // Release the held overlay resources.
-    if (mUseOverlay) {
-        mOverlayRef = 0;
+
+    // Release the held ANativeWindow resources.
+    if (mPreviewWindow != 0) {
+        mPreviewWindow = 0;
+        mHardware->setPreviewWindow(mPreviewWindow);
     }
     mHardware.clear();
 
@@ -480,8 +462,8 @@
 
 // ----------------------------------------------------------------------------
 
-// set the ISurface that the preview will use
-status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
+// set the Surface that the preview will use
+status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) {
     LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
     Mutex::Autolock lock(mLock);
     status_t result = checkPidAndHardware();
@@ -491,100 +473,64 @@
 
     // return if no change in surface.
     // asBinder() is safe on NULL (returns NULL)
-    if (surface->asBinder() == mSurface->asBinder()) {
+    if (getISurface(surface)->asBinder() == mSurface) {
         return result;
     }
 
     if (mSurface != 0) {
         LOG1("clearing old preview surface %p", mSurface.get());
-        if (mUseOverlay) {
-            // Force the destruction of any previous overlay
-            sp<Overlay> dummy;
-            mHardware->setOverlay(dummy);
-            mOverlayRef = 0;
-        } else {
-            mSurface->unregisterBuffers();
-        }
     }
-    mSurface = surface;
-    mOverlayRef = 0;
-    // If preview has been already started, set overlay or register preview
+    mSurface = getISurface(surface)->asBinder();
+    mPreviewWindow = surface;
+
+    // If preview has been already started, register preview
     // buffers now.
     if (mHardware->previewEnabled()) {
-        if (mUseOverlay) {
-            result = setOverlay();
-        } else if (mSurface != 0) {
-            result = registerPreviewBuffers();
+        if (mPreviewWindow != 0) {
+            native_window_set_buffers_transform(mPreviewWindow.get(),
+                                                mOrientation);
+            result = mHardware->setPreviewWindow(mPreviewWindow);
         }
     }
 
     return result;
 }
 
-status_t CameraService::Client::registerPreviewBuffers() {
-    int w, h;
-    CameraParameters params(mHardware->getParameters());
-    params.getPreviewSize(&w, &h);
+// set the SurfaceTexture that the preview will use
+status_t CameraService::Client::setPreviewTexture(
+        const sp<ISurfaceTexture>& surfaceTexture) {
+    LOG1("setPreviewTexture(%p) (pid %d)", surfaceTexture.get(),
+            getCallingPid());
+    Mutex::Autolock lock(mLock);
+    status_t result = checkPidAndHardware();
+    if (result != NO_ERROR) return result;
 
-    // FIXME: don't use a hardcoded format here.
-    ISurface::BufferHeap buffers(w, h, w, h,
-                                 HAL_PIXEL_FORMAT_YCrCb_420_SP,
-                                 mOrientation,
-                                 0,
-                                 mHardware->getPreviewHeap());
-
-    status_t result = mSurface->registerBuffers(buffers);
-    if (result != NO_ERROR) {
-        LOGE("registerBuffers failed with status %d", result);
-    }
-    return result;
-}
-
-status_t CameraService::Client::setOverlay() {
-    int w, h;
-    CameraParameters params(mHardware->getParameters());
-    params.getPreviewSize(&w, &h);
-
-    if (w != mOverlayW || h != mOverlayH || mOrientationChanged) {
-        // Force the destruction of any previous overlay
-        sp<Overlay> dummy;
-        mHardware->setOverlay(dummy);
-        mOverlayRef = 0;
-        mOrientationChanged = false;
-    }
-
-    status_t result = NO_ERROR;
-    if (mSurface == 0) {
-        result = mHardware->setOverlay(NULL);
-    } else {
-        if (mOverlayRef == 0) {
-            // FIXME:
-            // Surfaceflinger may hold onto the previous overlay reference for some
-            // time after we try to destroy it. retry a few times. In the future, we
-            // should make the destroy call block, or possibly specify that we can
-            // wait in the createOverlay call if the previous overlay is in the
-            // process of being destroyed.
-            for (int retry = 0; retry < 50; ++retry) {
-                mOverlayRef = mSurface->createOverlay(w, h, OVERLAY_FORMAT_DEFAULT,
-                                                      mOrientation);
-                if (mOverlayRef != 0) break;
-                LOGW("Overlay create failed - retrying");
-                usleep(20000);
-            }
-            if (mOverlayRef == 0) {
-                LOGE("Overlay Creation Failed!");
-                return -EINVAL;
-            }
-            result = mHardware->setOverlay(new Overlay(mOverlayRef));
-        }
-    }
-    if (result != NO_ERROR) {
-        LOGE("mHardware->setOverlay() failed with status %d\n", result);
+    // return if no change in surface.
+    // asBinder() is safe on NULL (returns NULL)
+    if (surfaceTexture->asBinder() == mSurface) {
         return result;
     }
 
-    mOverlayW = w;
-    mOverlayH = h;
+    if (mSurface != 0) {
+        LOG1("clearing old preview surface %p", mSurface.get());
+    }
+    mSurface = surfaceTexture->asBinder();
+    if (surfaceTexture != 0) {
+        mPreviewWindow = new SurfaceTextureClient(surfaceTexture);
+    } else {
+        mPreviewWindow = 0;
+    }
+
+    // If preview has been already started, set overlay or register preview
+    // buffers now.
+    if (mHardware->previewEnabled()) {
+        // XXX: What if the new preview window is 0?
+        if (mPreviewWindow != 0) {
+            native_window_set_buffers_transform(mPreviewWindow.get(),
+                                                mOrientation);
+            result = mHardware->setPreviewWindow(mPreviewWindow);
+        }
+    }
 
     return result;
 }
@@ -597,16 +543,10 @@
     if (checkPidAndHardware() != NO_ERROR) return;
 
     mPreviewCallbackFlag = callback_flag;
-
-    // If we don't use overlay, we always need the preview frame for display.
-    // If we do use overlay, we only need the preview frame if the user
-    // wants the data.
-    if (mUseOverlay) {
-        if(mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
-            enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        } else {
-            disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        }
+    if (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+    } else {
+        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
     }
 }
 
@@ -631,14 +571,14 @@
 
     switch(mode) {
         case CAMERA_PREVIEW_MODE:
-            if (mSurface == 0) {
+            if (mSurface == 0 && mPreviewWindow == 0) {
                 LOG1("mSurface is not set yet.");
                 // still able to start preview in this case.
             }
             return startPreviewMode();
         case CAMERA_RECORDING_MODE:
-            if (mSurface == 0) {
-                LOGE("mSurface must be set before startRecordingMode.");
+            if (mSurface == 0 && mPreviewWindow == 0) {
+                LOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
                 return INVALID_OPERATION;
             }
             return startRecordingMode();
@@ -656,25 +596,13 @@
         return NO_ERROR;
     }
 
-    if (mUseOverlay) {
-        // If preview display has been set, set overlay now.
-        if (mSurface != 0) {
-            result = setOverlay();
-        }
-        if (result != NO_ERROR) return result;
-        result = mHardware->startPreview();
-    } else {
-        enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        result = mHardware->startPreview();
-        if (result != NO_ERROR) return result;
-        // If preview display has been set, register preview buffers now.
-        if (mSurface != 0) {
-           // Unregister here because the surface may be previously registered
-           // with the raw (snapshot) heap.
-           mSurface->unregisterBuffers();
-           result = registerPreviewBuffers();
-        }
+    if (mPreviewWindow != 0) {
+        native_window_set_buffers_transform(mPreviewWindow.get(),
+                mOrientation);
     }
+    mHardware->setPreviewWindow(mPreviewWindow);
+    result = mHardware->startPreview();
+
     return result;
 }
 
@@ -711,13 +639,10 @@
     Mutex::Autolock lock(mLock);
     if (checkPidAndHardware() != NO_ERROR) return;
 
+
     disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
     mHardware->stopPreview();
 
-    if (mSurface != 0 && !mUseOverlay) {
-        mSurface->unregisterBuffers();
-    }
-
     mPreviewBuffer.clear();
 }
 
@@ -741,6 +666,30 @@
     mHardware->releaseRecordingFrame(mem);
 }
 
+int32_t CameraService::Client::getNumberOfVideoBuffers() const {
+    LOG1("getNumberOfVideoBuffers");
+    Mutex::Autolock lock(mLock);
+    if (checkPidAndHardware() != NO_ERROR) return 0;
+    return mHardware->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> CameraService::Client::getVideoBuffer(int32_t index) const {
+    LOG1("getVideoBuffer: %d", index);
+    Mutex::Autolock lock(mLock);
+    if (checkPidAndHardware() != NO_ERROR) return 0;
+    return mHardware->getVideoBuffer(index);
+}
+
+status_t CameraService::Client::storeMetaDataInBuffers(bool enabled)
+{
+    LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false");
+    Mutex::Autolock lock(mLock);
+    if (checkPidAndHardware() != NO_ERROR) {
+        return UNKNOWN_ERROR;
+    }
+    return mHardware->storeMetaDataInBuffers(enabled);
+}
+
 bool CameraService::Client::previewEnabled() {
     LOG1("previewEnabled (pid %d)", getCallingPid());
 
@@ -815,6 +764,35 @@
     return params;
 }
 
+// enable shutter sound
+status_t CameraService::Client::enableShutterSound(bool enable) {
+    LOG1("enableShutterSound (pid %d)", getCallingPid());
+
+    status_t result = checkPidAndHardware();
+    if (result != NO_ERROR) return result;
+
+    if (enable) {
+        mPlayShutterSound = true;
+        return OK;
+    }
+
+    // Disabling shutter sound may not be allowed. In that case only
+    // allow the mediaserver process to disable the sound.
+    char value[PROPERTY_VALUE_MAX];
+    property_get("ro.camera.sound.forced", value, "0");
+    if (strcmp(value, "0") != 0) {
+        // Disabling shutter sound is not allowed. Deny if the current
+        // process is not mediaserver.
+        if (getCallingPid() != getpid()) {
+            LOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid());
+            return PERMISSION_DENIED;
+        }
+    }
+
+    mPlayShutterSound = false;
+    return OK;
+}
+
 status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
     LOG1("sendCommand (pid %d)", getCallingPid());
     int orientation;
@@ -833,9 +811,22 @@
 
         if (mOrientation != orientation) {
             mOrientation = orientation;
-            if (mOverlayRef != 0) mOrientationChanged = true;
         }
         return OK;
+    } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
+        switch (arg1) {
+            case 0:
+                enableShutterSound(false);
+                break;
+            case 1:
+                enableShutterSound(true);
+                break;
+            default:
+                return BAD_VALUE;
+        }
+        return OK;
+    } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
+        mCameraService->playSound(SOUND_RECORDING);
     }
 
     return mHardware->sendCommand(cmd, arg1, arg2);
@@ -996,11 +987,8 @@
 // "size" is the width and height of yuv picture for registerBuffer.
 // If it is NULL, use the picture size from parameters.
 void CameraService::Client::handleShutter(image_rect_type *size) {
-    mCameraService->playSound(SOUND_SHUTTER);
-
-    // Screen goes black after the buffer is unregistered.
-    if (mSurface != 0 && !mUseOverlay) {
-        mSurface->unregisterBuffers();
+    if (mPlayShutterSound) {
+        mCameraService->playSound(SOUND_SHUTTER);
     }
 
     sp<ICameraClient> c = mCameraClient;
@@ -1011,29 +999,6 @@
     }
     disableMsgType(CAMERA_MSG_SHUTTER);
 
-    // It takes some time before yuvPicture callback to be called.
-    // Register the buffer for raw image here to reduce latency.
-    if (mSurface != 0 && !mUseOverlay) {
-        int w, h;
-        CameraParameters params(mHardware->getParameters());
-        if (size == NULL) {
-            params.getPictureSize(&w, &h);
-        } else {
-            w = size->width;
-            h = size->height;
-            w &= ~1;
-            h &= ~1;
-            LOG1("Snapshot image width=%d, height=%d", w, h);
-        }
-        // FIXME: don't use hardcoded format constants here
-        ISurface::BufferHeap buffers(w, h, w, h,
-            HAL_PIXEL_FORMAT_YCrCb_420_SP, mOrientation, 0,
-            mHardware->getRawHeap());
-
-        mSurface->registerBuffers(buffers);
-        IPCThreadState::self()->flushCommands();
-    }
-
     mLock.unlock();
 }
 
@@ -1043,12 +1008,6 @@
     size_t size;
     sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
 
-    if (!mUseOverlay) {
-        if (mSurface != 0) {
-            mSurface->postBuffer(offset);
-        }
-    }
-
     // local copy of the callback flags
     int flags = mPreviewCallbackFlag;
 
@@ -1069,9 +1028,7 @@
         mPreviewCallbackFlag &= ~(FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
                                   FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
                                   FRAME_CALLBACK_FLAG_ENABLE_MASK);
-        if (mUseOverlay) {
-            disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
-        }
+        disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
     }
 
     if (c != 0) {
@@ -1108,11 +1065,6 @@
     size_t size;
     sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
 
-    // Put the YUV version of the snapshot in the preview display.
-    if (mSurface != 0 && !mUseOverlay) {
-        mSurface->postBuffer(offset);
-    }
-
     sp<ICameraClient> c = mCameraClient;
     mLock.unlock();
     if (c != 0) {
@@ -1292,4 +1244,12 @@
     return NO_ERROR;
 }
 
+sp<ISurface> CameraService::getISurface(const sp<Surface>& surface) {
+    if (surface != 0) {
+        return surface->getISurface();
+    } else {
+        return sp<ISurface>(0);
+    }
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f09773d..ccb9cf7 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -79,6 +79,12 @@
     sp<MediaPlayer>     mSoundPlayer[NUM_SOUNDS];
     int                 mSoundRef;  // reference count (release all MediaPlayer when 0)
 
+    // Used by Client objects to extract the ISurface from a Surface object.
+    // This is used because making Client a friend class of Surface would
+    // require including this header in Surface.h since Client is a nested
+    // class.
+    static sp<ISurface> getISurface(const sp<Surface>& surface);
+
     class Client : public BnCamera
     {
     public:
@@ -87,11 +93,15 @@
         virtual status_t        connect(const sp<ICameraClient>& client);
         virtual status_t        lock();
         virtual status_t        unlock();
-        virtual status_t        setPreviewDisplay(const sp<ISurface>& surface);
+        virtual status_t        setPreviewDisplay(const sp<Surface>& surface);
+        virtual status_t        setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
         virtual void            setPreviewCallbackFlag(int flag);
         virtual status_t        startPreview();
         virtual void            stopPreview();
         virtual bool            previewEnabled();
+        virtual int32_t         getNumberOfVideoBuffers() const;
+        virtual sp<IMemory>     getVideoBuffer(int32_t index) const;
+        virtual status_t        storeMetaDataInBuffers(bool enabled);
         virtual status_t        startRecording();
         virtual void            stopRecording();
         virtual bool            recordingEnabled();
@@ -121,7 +131,6 @@
 
         // these are internal functions used to set up preview buffers
         status_t                registerPreviewBuffers();
-        status_t                setOverlay();
 
         // camera operation mode
         enum camera_mode {
@@ -133,6 +142,9 @@
         status_t                startPreviewMode();
         status_t                startRecordingMode();
 
+        // internal function used by sendCommand to enable/disable shutter sound.
+        status_t                enableShutterSound(bool enable);
+
         // these are static callback functions
         static void             notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
         static void             dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, void* user);
@@ -163,18 +175,15 @@
         int                             mCameraFacing;   // immutable after constructor
         pid_t                           mClientPid;
         sp<CameraHardwareInterface>     mHardware;       // cleared after disconnect()
-        bool                            mUseOverlay;     // immutable after constructor
-        sp<OverlayRef>                  mOverlayRef;
-        int                             mOverlayW;
-        int                             mOverlayH;
         int                             mPreviewCallbackFlag;
         int                             mOrientation;     // Current display orientation
-        // True if display orientation has been changed. This is only used in overlay.
-        int                             mOrientationChanged;
+        bool                            mPlayShutterSound;
 
         // Ensures atomicity among the public methods
         mutable Mutex                   mLock;
-        sp<ISurface>                    mSurface;
+        // This is a binder of Surface or SurfaceTexture.
+        sp<IBinder>                     mSurface;
+        sp<ANativeWindow>               mPreviewWindow;
 
         // If the user want us to return a copy of the preview frame (instead
         // of the original one), we allocate mPreviewBuffer and reuse it if possible.