Implementation of a java media codec interface and associated tools.

Change-Id: I13e54062d4de584355c5d82bb027a68aeaf2923b
diff --git a/media/java/android/media/MediaCodec.java b/media/java/android/media/MediaCodec.java
new file mode 100644
index 0000000..7f496ca
--- /dev/null
+++ b/media/java/android/media/MediaCodec.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.view.Surface;
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+/**
+ * MediaCodec class can be used to access low-level media codec, i.e.
+ * encoder/decoder components.
+ * @hide
+*/
+public class MediaCodec
+{
+    /** Per buffer metadata includes an offset and size specifying
+        the range of valid data in the associated codec buffer.
+    */
+    public final static class BufferInfo {
+        public void set(
+                int offset, int size, long timeUs, int flags) {
+            mOffset = offset;
+            mSize = size;
+            mPresentationTimeUs = timeUs;
+            mFlags = flags;
+        }
+
+        public int mOffset;
+        public int mSize;
+        public long mPresentationTimeUs;
+        public int mFlags;
+    };
+
+    public static int FLAG_SYNCFRAME   = 1;
+    public static int FLAG_CODECCONFIG = 2;
+    public static int FLAG_EOS         = 4;
+
+    /** Instantiate a codec component by mime type. For decoder components
+        this is the mime type of media that this decoder should be able to
+        decoder, for encoder components it's the type of media this encoder
+        should encode _to_.
+    */
+    public static MediaCodec CreateByType(String type, boolean encoder) {
+        return new MediaCodec(type, true /* nameIsType */, encoder);
+    }
+
+    /** If you know the exact name of the component you want to instantiate
+        use this method to instantiate it. Use with caution.
+    */
+    public static MediaCodec CreateByComponentName(String name) {
+        return new MediaCodec(
+                name, false /* nameIsType */, false /* unused */);
+    }
+
+    private MediaCodec(
+            String name, boolean nameIsType, boolean encoder) {
+        native_setup(name, nameIsType, encoder);
+    }
+
+    @Override
+    protected void finalize() {
+        native_finalize();
+    }
+
+    // Make sure you call this when you're done to free up any opened
+    // component instance instead of relying on the garbage collector
+    // to do this for you at some point in the future.
+    public native final void release();
+
+    public static int CONFIGURE_FLAG_ENCODE = 1;
+
+    /** Configures a component.
+     *  @param format A map of string/value pairs describing the input format
+     *                (decoder) or the desired output format.
+     *
+     *                Video formats have the following fields:
+     *                  "mime"          - String
+     *                  "width"         - Integer
+     *                  "height"        - Integer
+     *                  optional "max-input-size"       - Integer
+     *                  optional "csd-0", "csd-1" ...   - ByteBuffer
+     *
+     *                Audio formats have the following fields:
+     *                  "mime"          - String
+     *                  "channel-count" - Integer
+     *                  "sample-rate"   - Integer
+     *                  optional "max-input-size"       - Integer
+     *                  optional "csd-0", "csd-1" ...   - ByteBuffer
+     *
+     *                If the format is used to configure an encoder, additional
+     *                fields must be included:
+     *                  "bitrate" - Integer (in bits/sec)
+     *
+     *                for video formats:
+     *                  "color-format"          - Integer
+     *                  "frame-rate"            - Integer or Float
+     *                  "i-frame-interval"      - Integer
+     *                  optional "stride"       - Integer, defaults to "width"
+     *                  optional "slice-height" - Integer, defaults to "height"
+     *
+     *  @param surface Specify a surface on which to render the output of this
+     *                 decoder.
+     *  @param flags   Specify {@see #CONFIGURE_FLAG_ENCODE} to configure the
+     *                 component as an encoder.
+    */
+    public void configure(
+            Map<String, Object> format, Surface surface, int flags) {
+        String[] keys = null;
+        Object[] values = null;
+
+        if (format != null) {
+            keys = new String[format.size()];
+            values = new Object[format.size()];
+
+            int i = 0;
+            for (Map.Entry<String, Object> entry: format.entrySet()) {
+                keys[i] = entry.getKey();
+                values[i] = entry.getValue();
+                ++i;
+            }
+        }
+
+        native_configure(keys, values, surface, flags);
+    }
+
+    private native final void native_configure(
+            String[] keys, Object[] values, Surface surface, int flags);
+
+    /** After successfully configuring the component, call start. On return
+     *  you can query the component for its input/output buffers.
+    */
+    public native final void start();
+
+    public native final void stop();
+
+    /** Flush both input and output ports of the component, all indices
+     *  previously returned in calls to dequeueInputBuffer and
+     *  dequeueOutputBuffer become invalid.
+    */
+    public native final void flush();
+
+    /** After filling a range of the input buffer at the specified index
+     *  submit it to the component.
+    */
+    public native final void queueInputBuffer(
+            int index,
+            int offset, int size, long presentationTimeUs, int flags);
+
+    // Returns the index of an input buffer to be filled with valid data
+    // or -1 if no such buffer is currently available.
+    // This method will return immediately if timeoutUs == 0, wait indefinitely
+    // for the availability of an input buffer if timeoutUs < 0 or wait up
+    // to "timeoutUs" microseconds if timeoutUs > 0.
+    public native final int dequeueInputBuffer(long timeoutUs);
+
+    // Returns the index of an output buffer that has been successfully
+    // decoded or one of the INFO_* constants below.
+    // The provided "info" will be filled with buffer meta data.
+    public static final int INFO_TRY_AGAIN_LATER        = -1;
+    public static final int INFO_OUTPUT_FORMAT_CHANGED  = -2;
+    public static final int INFO_OUTPUT_BUFFERS_CHANGED = -3;
+
+    /** Dequeue an output buffer, block at most "timeoutUs" microseconds. */
+    public native final int dequeueOutputBuffer(
+            BufferInfo info, long timeoutUs);
+
+    // If you are done with a buffer, use this call to return the buffer to
+    // the codec. If you previously specified a surface when configuring this
+    // video decoder you can optionally render the buffer.
+    public native final void releaseOutputBuffer(int index, boolean render);
+
+    /** Call this after dequeueOutputBuffer signals a format change by returning
+     *  {@see #INFO_OUTPUT_FORMAT_CHANGED}
+     */
+    public native final Map<String, Object> getOutputFormat();
+
+    /** Call this after start() returns and whenever dequeueOutputBuffer
+     *  signals an output buffer change by returning
+     *  {@see #INFO_OUTPUT_BUFFERS_CHANGED}
+     */
+    public native final ByteBuffer[] getBuffers(boolean input);
+
+    private static native final void native_init();
+
+    private native final void native_setup(
+            String name, boolean nameIsType, boolean encoder);
+
+    private native final void native_finalize();
+
+    static {
+        System.loadLibrary("media_jni");
+        native_init();
+    }
+
+    private int mNativeContext;
+}
diff --git a/media/java/android/media/MediaExtractor.java b/media/java/android/media/MediaExtractor.java
new file mode 100644
index 0000000..6a7f2f5
--- /dev/null
+++ b/media/java/android/media/MediaExtractor.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+/**
+ * MediaExtractor
+ * @hide
+*/
+public class MediaExtractor
+{
+    public MediaExtractor(String path) {
+        native_setup(path);
+    }
+
+    @Override
+    protected void finalize() {
+        native_finalize();
+    }
+
+    // Make sure you call this when you're done to free up any resources
+    // instead of relying on the garbage collector to do this for you at
+    // some point in the future.
+    public native final void release();
+
+    public native int countTracks();
+    public native Map<String, Object> getTrackFormat(int index);
+
+    // Subsequent calls to "readSampleData", "getSampleTrackIndex" and
+    // "getSampleTime" only retrieve information for the subset of tracks
+    // selected by the call below.
+    // Selecting the same track multiple times has no effect, the track
+    // is only selected once.
+    public native void selectTrack(int index);
+
+    // All selected tracks seek near the requested time. The next sample
+    // returned for each selected track will be a sync sample.
+    public native void seekTo(long timeUs);
+
+    public native boolean advance();
+
+    // Retrieve the current encoded sample and store it in the byte buffer
+    // starting at the given offset.
+    public native int readSampleData(ByteBuffer byteBuf, int offset);
+
+    // Returns the track index the current sample originates from.
+    public native int getSampleTrackIndex();
+
+    // Returns the current sample's presentation time in microseconds.
+    public native long getSampleTime();
+
+    private static native final void native_init();
+    private native final void native_setup(String path);
+    private native final void native_finalize();
+
+    static {
+        System.loadLibrary("media_jni");
+        native_init();
+    }
+
+    private int mNativeContext;
+}
diff --git a/media/jni/Android.mk b/media/jni/Android.mk
index 23cc0e2..070d2d9 100644
--- a/media/jni/Android.mk
+++ b/media/jni/Android.mk
@@ -2,6 +2,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:= \
+    android_media_MediaCodec.cpp \
+    android_media_MediaExtractor.cpp \
     android_media_MediaPlayer.cpp \
     android_media_MediaRecorder.cpp \
     android_media_MediaScanner.cpp \
@@ -25,6 +27,7 @@
     libcutils \
     libgui \
     libstagefright \
+    libstagefright_foundation \
     libcamera_client \
     libmtp \
     libusbhost \
@@ -39,10 +42,12 @@
     external/tremor/Tremor \
     frameworks/base/core/jni \
     frameworks/base/media/libmedia \
+    frameworks/base/media/libstagefright \
     frameworks/base/media/libstagefright/codecs/amrnb/enc/src \
     frameworks/base/media/libstagefright/codecs/amrnb/common \
     frameworks/base/media/libstagefright/codecs/amrnb/common/include \
     frameworks/base/media/mtp \
+    frameworks/base/include/media/stagefright/openmax \
     $(PV_INCLUDES) \
     $(JNI_H_INCLUDE) \
     $(call include-path-for, corecg graphics)
diff --git a/media/jni/android_media_MediaCodec.cpp b/media/jni/android_media_MediaCodec.cpp
new file mode 100644
index 0000000..43ca263
--- /dev/null
+++ b/media/jni/android_media_MediaCodec.cpp
@@ -0,0 +1,550 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodec-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaCodec.h"
+
+#include "android_media_Utils.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/android_view_Surface.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <surfaceflinger/Surface.h>
+
+namespace android {
+
+// Keep these in sync with their equivalents in MediaCodec.java !!!
+enum {
+    DEQUEUE_INFO_TRY_AGAIN_LATER            = -1,
+    DEQUEUE_INFO_OUTPUT_FORMAT_CHANGED      = -2,
+    DEQUEUE_INFO_OUTPUT_BUFFERS_CHANGED     = -3,
+};
+
+struct fields_t {
+    jfieldID context;
+};
+
+static fields_t gFields;
+
+////////////////////////////////////////////////////////////////////////////////
+
+JMediaCodec::JMediaCodec(
+        JNIEnv *env, jobject thiz,
+        const char *name, bool nameIsType, bool encoder)
+    : mClass(NULL),
+      mObject(NULL) {
+    jclass clazz = env->GetObjectClass(thiz);
+    CHECK(clazz != NULL);
+
+    mClass = (jclass)env->NewGlobalRef(clazz);
+    mObject = env->NewWeakGlobalRef(thiz);
+
+    mLooper = new ALooper;
+    mLooper->setName("MediaCodec_looper");
+
+    mLooper->start(
+            false,      // runOnCallingThread
+            false,       // canCallJava
+            PRIORITY_DEFAULT);
+
+    if (nameIsType) {
+        mCodec = MediaCodec::CreateByType(mLooper, name, encoder);
+    } else {
+        mCodec = MediaCodec::CreateByComponentName(mLooper, name);
+    }
+}
+
+status_t JMediaCodec::initCheck() const {
+    return mCodec != NULL ? OK : NO_INIT;
+}
+
+JMediaCodec::~JMediaCodec() {
+    mCodec->stop();
+
+    JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+    env->DeleteWeakGlobalRef(mObject);
+    mObject = NULL;
+    env->DeleteGlobalRef(mClass);
+    mClass = NULL;
+}
+
+status_t JMediaCodec::configure(
+        const sp<AMessage> &format,
+        const sp<ISurfaceTexture> &surfaceTexture,
+        int flags) {
+    sp<SurfaceTextureClient> client;
+    if (surfaceTexture != NULL) {
+        client = new SurfaceTextureClient(surfaceTexture);
+    }
+    return mCodec->configure(format, client, flags);
+}
+
+status_t JMediaCodec::start() {
+    return mCodec->start();
+}
+
+status_t JMediaCodec::stop() {
+    return mCodec->stop();
+}
+
+status_t JMediaCodec::flush() {
+    return mCodec->flush();
+}
+
+status_t JMediaCodec::queueInputBuffer(
+        size_t index,
+        size_t offset, size_t size, int64_t timeUs, uint32_t flags) {
+    return mCodec->queueInputBuffer(index, offset, size, timeUs, flags);
+}
+
+status_t JMediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
+    return mCodec->dequeueInputBuffer(index, timeoutUs);
+}
+
+status_t JMediaCodec::dequeueOutputBuffer(
+        JNIEnv *env, jobject bufferInfo, size_t *index, int64_t timeoutUs) {
+    size_t size, offset;
+    int64_t timeUs;
+    uint32_t flags;
+    status_t err;
+    if ((err = mCodec->dequeueOutputBuffer(
+                    index, &size, &offset, &timeUs, &flags, timeoutUs)) != OK) {
+        return err;
+    }
+
+    jclass clazz = env->FindClass("android/media/MediaCodec$BufferInfo");
+
+    jmethodID method = env->GetMethodID(clazz, "set", "(IIJI)V");
+    env->CallVoidMethod(bufferInfo, method, offset, size, timeUs, flags);
+
+    return OK;
+}
+
+status_t JMediaCodec::releaseOutputBuffer(size_t index, bool render) {
+    return render
+        ? mCodec->renderOutputBufferAndRelease(index)
+        : mCodec->releaseOutputBuffer(index);
+}
+
+status_t JMediaCodec::getOutputFormat(JNIEnv *env, jobject *format) const {
+    sp<AMessage> msg;
+    status_t err;
+    if ((err = mCodec->getOutputFormat(&msg)) != OK) {
+        return err;
+    }
+
+    return ConvertMessageToMap(env, msg, format);
+}
+
+status_t JMediaCodec::getBuffers(
+        JNIEnv *env, bool input, jobjectArray *bufArray) const {
+    Vector<sp<ABuffer> > buffers;
+
+    status_t err =
+        input
+            ? mCodec->getInputBuffers(&buffers)
+            : mCodec->getOutputBuffers(&buffers);
+
+    if (err != OK) {
+        return err;
+    }
+
+    jclass byteBufferClass = env->FindClass("java/nio/ByteBuffer");
+
+    *bufArray = (jobjectArray)env->NewObjectArray(
+            buffers.size(), byteBufferClass, NULL);
+
+    for (size_t i = 0; i < buffers.size(); ++i) {
+        const sp<ABuffer> &buffer = buffers.itemAt(i);
+
+        jobject byteBuffer =
+            env->NewDirectByteBuffer(
+                buffer->base(),
+                buffer->capacity());
+
+        env->SetObjectArrayElement(
+                *bufArray, i, byteBuffer);
+
+        env->DeleteLocalRef(byteBuffer);
+        byteBuffer = NULL;
+    }
+
+    return OK;
+}
+
+}  // namespace android
+
+////////////////////////////////////////////////////////////////////////////////
+
+using namespace android;
+
+static sp<JMediaCodec> setMediaCodec(
+        JNIEnv *env, jobject thiz, const sp<JMediaCodec> &codec) {
+    sp<JMediaCodec> old = (JMediaCodec *)env->GetIntField(thiz, gFields.context);
+    if (codec != NULL) {
+        codec->incStrong(thiz);
+    }
+    if (old != NULL) {
+        old->decStrong(thiz);
+    }
+    env->SetIntField(thiz, gFields.context, (int)codec.get());
+
+    return old;
+}
+
+static sp<JMediaCodec> getMediaCodec(JNIEnv *env, jobject thiz) {
+    return (JMediaCodec *)env->GetIntField(thiz, gFields.context);
+}
+
+static void android_media_MediaCodec_release(JNIEnv *env, jobject thiz) {
+    setMediaCodec(env, thiz, NULL);
+}
+
+static jint throwExceptionAsNecessary(JNIEnv *env, status_t err) {
+    switch (err) {
+        case OK:
+            return 0;
+
+        case -EAGAIN:
+            return DEQUEUE_INFO_TRY_AGAIN_LATER;
+
+        case INFO_FORMAT_CHANGED:
+            return DEQUEUE_INFO_OUTPUT_FORMAT_CHANGED;
+
+        case INFO_OUTPUT_BUFFERS_CHANGED:
+            return DEQUEUE_INFO_OUTPUT_BUFFERS_CHANGED;
+
+        default:
+        {
+            jniThrowException(env, "java/lang/IllegalStateException", NULL);
+            break;
+        }
+    }
+
+    return 0;
+}
+
+static void android_media_MediaCodec_native_configure(
+        JNIEnv *env,
+        jobject thiz,
+        jobjectArray keys, jobjectArray values,
+        jobject jsurface,
+        jint flags) {
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    sp<AMessage> format;
+    status_t err = ConvertKeyValueArraysToMessage(env, keys, values, &format);
+
+    if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return;
+    }
+
+    sp<ISurfaceTexture> surfaceTexture;
+    if (jsurface != NULL) {
+        sp<Surface> surface(Surface_getSurface(env, jsurface));
+        if (surface != NULL) {
+            surfaceTexture = surface->getSurfaceTexture();
+        } else {
+            jniThrowException(
+                    env,
+                    "java/lang/IllegalArgumentException",
+                    "The surface has been released");
+            return;
+        }
+    }
+
+    err = codec->configure(format, surfaceTexture, flags);
+
+    throwExceptionAsNecessary(env, err);
+}
+
+static void android_media_MediaCodec_start(JNIEnv *env, jobject thiz) {
+    ALOGV("android_media_MediaCodec_start");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = codec->start();
+
+    throwExceptionAsNecessary(env, err);
+}
+
+static void android_media_MediaCodec_stop(JNIEnv *env, jobject thiz) {
+    ALOGV("android_media_MediaCodec_stop");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = codec->stop();
+
+    throwExceptionAsNecessary(env, err);
+}
+
+static void android_media_MediaCodec_flush(JNIEnv *env, jobject thiz) {
+    ALOGV("android_media_MediaCodec_flush");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = codec->flush();
+
+    throwExceptionAsNecessary(env, err);
+}
+
+static void android_media_MediaCodec_queueInputBuffer(
+        JNIEnv *env,
+        jobject thiz,
+        jint index,
+        jint offset,
+        jint size,
+        jlong timestampUs,
+        jint flags) {
+    ALOGV("android_media_MediaCodec_queueInputBuffer");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = codec->queueInputBuffer(
+            index, offset, size, timestampUs, flags);
+
+    throwExceptionAsNecessary(env, err);
+}
+
+static jint android_media_MediaCodec_dequeueInputBuffer(
+        JNIEnv *env, jobject thiz, jlong timeoutUs) {
+    ALOGV("android_media_MediaCodec_dequeueInputBuffer");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return -1;
+    }
+
+    size_t index;
+    status_t err = codec->dequeueInputBuffer(&index, timeoutUs);
+
+    if (err == OK) {
+        return index;
+    }
+
+    return throwExceptionAsNecessary(env, err);
+}
+
+static jint android_media_MediaCodec_dequeueOutputBuffer(
+        JNIEnv *env, jobject thiz, jobject bufferInfo, jlong timeoutUs) {
+    ALOGV("android_media_MediaCodec_dequeueOutputBuffer");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return NULL;
+    }
+
+    size_t index;
+    status_t err = codec->dequeueOutputBuffer(
+            env, bufferInfo, &index, timeoutUs);
+
+    if (err == OK) {
+        return index;
+    }
+
+    return throwExceptionAsNecessary(env, err);
+}
+
+static void android_media_MediaCodec_releaseOutputBuffer(
+        JNIEnv *env, jobject thiz, jint index, jboolean render) {
+    ALOGV("android_media_MediaCodec_renderOutputBufferAndRelease");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = codec->releaseOutputBuffer(index, render);
+
+    throwExceptionAsNecessary(env, err);
+}
+
+static jobject android_media_MediaCodec_getOutputFormat(
+        JNIEnv *env, jobject thiz) {
+    ALOGV("android_media_MediaCodec_getOutputFormat");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return NULL;
+    }
+
+    jobject format;
+    status_t err = codec->getOutputFormat(env, &format);
+
+    if (err == OK) {
+        return format;
+    }
+
+    throwExceptionAsNecessary(env, err);
+
+    return NULL;
+}
+
+static jobjectArray android_media_MediaCodec_getBuffers(
+        JNIEnv *env, jobject thiz, jboolean input) {
+    ALOGV("android_media_MediaCodec_getBuffers");
+
+    sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+    if (codec == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return NULL;
+    }
+
+    jobjectArray buffers;
+    status_t err = codec->getBuffers(env, input, &buffers);
+
+    if (err == OK) {
+        return buffers;
+    }
+
+    throwExceptionAsNecessary(env, err);
+
+    return NULL;
+}
+
+static void android_media_MediaCodec_native_init(JNIEnv *env) {
+    jclass clazz = env->FindClass("android/media/MediaCodec");
+    CHECK(clazz != NULL);
+
+    gFields.context = env->GetFieldID(clazz, "mNativeContext", "I");
+    CHECK(gFields.context != NULL);
+}
+
+static void android_media_MediaCodec_native_setup(
+        JNIEnv *env, jobject thiz,
+        jstring name, jboolean nameIsType, jboolean encoder) {
+    if (name == NULL) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return;
+    }
+
+    const char *tmp = env->GetStringUTFChars(name, NULL);
+
+    if (tmp == NULL) {
+        return;
+    }
+
+    sp<JMediaCodec> codec = new JMediaCodec(env, thiz, tmp, nameIsType, encoder);
+
+    status_t err = codec->initCheck();
+
+    env->ReleaseStringUTFChars(name, tmp);
+    tmp = NULL;
+
+    if (err != OK) {
+        jniThrowException(
+                env,
+                "java/io/IOException",
+                "Failed to allocate component instance");
+        return;
+    }
+
+    setMediaCodec(env,thiz, codec);
+}
+
+static void android_media_MediaCodec_native_finalize(
+        JNIEnv *env, jobject thiz) {
+    android_media_MediaCodec_release(env, thiz);
+}
+
+static JNINativeMethod gMethods[] = {
+    { "release", "()V", (void *)android_media_MediaCodec_release },
+
+    { "native_configure",
+      "([Ljava/lang/String;[Ljava/lang/Object;Landroid/view/Surface;I)V",
+      (void *)android_media_MediaCodec_native_configure },
+
+    { "start", "()V", (void *)android_media_MediaCodec_start },
+    { "stop", "()V", (void *)android_media_MediaCodec_stop },
+    { "flush", "()V", (void *)android_media_MediaCodec_flush },
+
+    { "queueInputBuffer", "(IIIJI)V",
+      (void *)android_media_MediaCodec_queueInputBuffer },
+
+    { "dequeueInputBuffer", "(J)I",
+      (void *)android_media_MediaCodec_dequeueInputBuffer },
+
+    { "dequeueOutputBuffer", "(Landroid/media/MediaCodec$BufferInfo;J)I",
+      (void *)android_media_MediaCodec_dequeueOutputBuffer },
+
+    { "releaseOutputBuffer", "(IZ)V",
+      (void *)android_media_MediaCodec_releaseOutputBuffer },
+
+    { "getOutputFormat", "()Ljava/util/Map;",
+      (void *)android_media_MediaCodec_getOutputFormat },
+
+    { "getBuffers", "(Z)[Ljava/nio/ByteBuffer;",
+      (void *)android_media_MediaCodec_getBuffers },
+
+    { "native_init", "()V", (void *)android_media_MediaCodec_native_init },
+
+    { "native_setup", "(Ljava/lang/String;ZZ)V",
+      (void *)android_media_MediaCodec_native_setup },
+
+    { "native_finalize", "()V",
+      (void *)android_media_MediaCodec_native_finalize },
+};
+
+int register_android_media_MediaCodec(JNIEnv *env) {
+    return AndroidRuntime::registerNativeMethods(env,
+                "android/media/MediaCodec", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MediaCodec.h b/media/jni/android_media_MediaCodec.h
new file mode 100644
index 0000000..6b1257d
--- /dev/null
+++ b/media/jni/android_media_MediaCodec.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIACODEC_H_
+#define _ANDROID_MEDIA_MEDIACODEC_H_
+
+#include "jni.h"
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ALooper;
+struct AMessage;
+struct ISurfaceTexture;
+struct MediaCodec;
+
+struct JMediaCodec : public RefBase {
+    JMediaCodec(
+            JNIEnv *env, jobject thiz,
+            const char *name, bool nameIsType, bool encoder);
+
+    status_t initCheck() const;
+
+    status_t configure(
+            const sp<AMessage> &format,
+            const sp<ISurfaceTexture> &surfaceTexture,
+            int flags);
+
+    status_t start();
+    status_t stop();
+
+    status_t flush();
+
+    status_t queueInputBuffer(
+            size_t index,
+            size_t offset, size_t size, int64_t timeUs, uint32_t flags);
+
+    status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs);
+
+    status_t dequeueOutputBuffer(
+            JNIEnv *env, jobject bufferInfo, size_t *index, int64_t timeoutUs);
+
+    status_t releaseOutputBuffer(size_t index, bool render);
+
+    status_t getOutputFormat(JNIEnv *env, jobject *format) const;
+
+    status_t getBuffers(
+            JNIEnv *env, bool input, jobjectArray *bufArray) const;
+
+protected:
+    virtual ~JMediaCodec();
+
+private:
+    jclass mClass;
+    jweak mObject;
+
+    sp<ALooper> mLooper;
+    sp<MediaCodec> mCodec;
+
+    DISALLOW_EVIL_CONSTRUCTORS(JMediaCodec);
+};
+
+}  // namespace android
+
+#endif  // _ANDROID_MEDIA_MEDIACODEC_H_
diff --git a/media/jni/android_media_MediaExtractor.cpp b/media/jni/android_media_MediaExtractor.cpp
new file mode 100644
index 0000000..4757adf
--- /dev/null
+++ b/media/jni/android_media_MediaExtractor.cpp
@@ -0,0 +1,400 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaExtractor-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaExtractor.h"
+
+#include "android_media_Utils.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/NuMediaExtractor.h>
+
+namespace android {
+
+struct fields_t {
+    jfieldID context;
+};
+
+static fields_t gFields;
+
+////////////////////////////////////////////////////////////////////////////////
+
+JMediaExtractor::JMediaExtractor(JNIEnv *env, jobject thiz)
+    : mClass(NULL),
+      mObject(NULL) {
+    jclass clazz = env->GetObjectClass(thiz);
+    CHECK(clazz != NULL);
+
+    mClass = (jclass)env->NewGlobalRef(clazz);
+    mObject = env->NewWeakGlobalRef(thiz);
+
+    mImpl = new NuMediaExtractor;
+}
+
+JMediaExtractor::~JMediaExtractor() {
+    JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+    env->DeleteWeakGlobalRef(mObject);
+    mObject = NULL;
+    env->DeleteGlobalRef(mClass);
+    mClass = NULL;
+}
+
+status_t JMediaExtractor::setDataSource(const char *path) {
+    return mImpl->setDataSource(path);
+}
+
+size_t JMediaExtractor::countTracks() const {
+    return mImpl->countTracks();
+}
+
+status_t JMediaExtractor::getTrackFormat(size_t index, jobject *format) const {
+    sp<AMessage> msg;
+    status_t err;
+    if ((err = mImpl->getTrackFormat(index, &msg)) != OK) {
+        return err;
+    }
+
+    JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+    return ConvertMessageToMap(env, msg, format);
+}
+
+status_t JMediaExtractor::selectTrack(size_t index) {
+    return mImpl->selectTrack(index);
+}
+
+status_t JMediaExtractor::seekTo(int64_t timeUs) {
+    return mImpl->seekTo(timeUs);
+}
+
+status_t JMediaExtractor::advance() {
+    return mImpl->advance();
+}
+
+status_t JMediaExtractor::readSampleData(
+        jobject byteBuf, size_t offset, size_t *sampleSize) {
+    JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+    void *dst = env->GetDirectBufferAddress(byteBuf);
+
+    if (dst == NULL) {
+        // XXX if dst is NULL also fall back to "array()"
+        return INVALID_OPERATION;
+    }
+
+    jlong dstSize = env->GetDirectBufferCapacity(byteBuf);
+
+    if (dstSize < offset) {
+        return -ERANGE;
+    }
+
+    sp<ABuffer> buffer = new ABuffer((char *)dst + offset, dstSize - offset);
+
+    status_t err = mImpl->readSampleData(buffer);
+
+    if (err != OK) {
+        return err;
+    }
+
+    *sampleSize = buffer->size();
+
+    return OK;
+}
+
+status_t JMediaExtractor::getSampleTrackIndex(size_t *trackIndex) {
+    return mImpl->getSampleTrackIndex(trackIndex);
+}
+
+status_t JMediaExtractor::getSampleTime(int64_t *sampleTimeUs) {
+    return mImpl->getSampleTime(sampleTimeUs);
+}
+
+}  // namespace android
+
+////////////////////////////////////////////////////////////////////////////////
+
+using namespace android;
+
+static sp<JMediaExtractor> setMediaExtractor(
+        JNIEnv *env, jobject thiz, const sp<JMediaExtractor> &extractor) {
+    sp<JMediaExtractor> old =
+        (JMediaExtractor *)env->GetIntField(thiz, gFields.context);
+
+    if (extractor != NULL) {
+        extractor->incStrong(thiz);
+    }
+    if (old != NULL) {
+        old->decStrong(thiz);
+    }
+    env->SetIntField(thiz, gFields.context, (int)extractor.get());
+
+    return old;
+}
+
+static sp<JMediaExtractor> getMediaExtractor(JNIEnv *env, jobject thiz) {
+    return (JMediaExtractor *)env->GetIntField(thiz, gFields.context);
+}
+
+static void android_media_MediaExtractor_release(JNIEnv *env, jobject thiz) {
+    setMediaExtractor(env, thiz, NULL);
+}
+
+static jint android_media_MediaExtractor_countTracks(
+        JNIEnv *env, jobject thiz) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return NULL;
+    }
+
+    return extractor->countTracks();
+}
+
+static jobject android_media_MediaExtractor_getTrackFormat(
+        JNIEnv *env, jobject thiz, jint index) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return NULL;
+    }
+
+    jobject format;
+    status_t err = extractor->getTrackFormat(index, &format);
+
+    if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return NULL;
+    }
+
+    return format;
+}
+
+static void android_media_MediaExtractor_selectTrack(
+        JNIEnv *env, jobject thiz, jint index) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = extractor->selectTrack(index);
+
+    if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return;
+    }
+}
+
+static void android_media_MediaExtractor_seekTo(
+        JNIEnv *env, jobject thiz, jlong timeUs) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return;
+    }
+
+    status_t err = extractor->seekTo(timeUs);
+
+    if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return;
+    }
+}
+
+static jboolean android_media_MediaExtractor_advance(
+        JNIEnv *env, jobject thiz) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return false;
+    }
+
+    status_t err = extractor->advance();
+
+    if (err == ERROR_END_OF_STREAM) {
+        return false;
+    } else if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return false;
+    }
+
+    return true;
+}
+
+static jint android_media_MediaExtractor_readSampleData(
+        JNIEnv *env, jobject thiz, jobject byteBuf, jint offset) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return -1;
+    }
+
+    size_t sampleSize;
+    status_t err = extractor->readSampleData(byteBuf, offset, &sampleSize);
+
+    if (err == ERROR_END_OF_STREAM) {
+        return -1;
+    } else if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return false;
+    }
+
+    return sampleSize;
+}
+
+static jint android_media_MediaExtractor_getSampleTrackIndex(
+        JNIEnv *env, jobject thiz) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return -1;
+    }
+
+    size_t trackIndex;
+    status_t err = extractor->getSampleTrackIndex(&trackIndex);
+
+    if (err == ERROR_END_OF_STREAM) {
+        return -1;
+    } else if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return false;
+    }
+
+    return trackIndex;
+}
+
+static jlong android_media_MediaExtractor_getSampleTime(
+        JNIEnv *env, jobject thiz) {
+    sp<JMediaExtractor> extractor = getMediaExtractor(env, thiz);
+
+    if (extractor == NULL) {
+        jniThrowException(env, "java/lang/IllegalStateException", NULL);
+        return -1ll;
+    }
+
+    int64_t sampleTimeUs;
+    status_t err = extractor->getSampleTime(&sampleTimeUs);
+
+    if (err == ERROR_END_OF_STREAM) {
+        return -1ll;
+    } else if (err != OK) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return false;
+    }
+
+    return sampleTimeUs;
+}
+
+static void android_media_MediaExtractor_native_init(JNIEnv *env) {
+    jclass clazz = env->FindClass("android/media/MediaExtractor");
+    CHECK(clazz != NULL);
+
+    gFields.context = env->GetFieldID(clazz, "mNativeContext", "I");
+    CHECK(gFields.context != NULL);
+
+    DataSource::RegisterDefaultSniffers();
+}
+
+static void android_media_MediaExtractor_native_setup(
+        JNIEnv *env, jobject thiz, jstring path) {
+    sp<JMediaExtractor> extractor = new JMediaExtractor(env, thiz);
+
+    if (path == NULL) {
+        jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+        return;
+    }
+
+    const char *tmp = env->GetStringUTFChars(path, NULL);
+
+    if (tmp == NULL) {
+        return;
+    }
+
+    status_t err = extractor->setDataSource(tmp);
+
+    env->ReleaseStringUTFChars(path, tmp);
+    tmp = NULL;
+
+    if (err != OK) {
+        jniThrowException(
+                env,
+                "java/io/IOException",
+                "Failed to instantiate extractor.");
+        return;
+    }
+
+    setMediaExtractor(env,thiz, extractor);
+}
+
+static void android_media_MediaExtractor_native_finalize(
+        JNIEnv *env, jobject thiz) {
+    android_media_MediaExtractor_release(env, thiz);
+}
+
+static JNINativeMethod gMethods[] = {
+    { "release", "()V", (void *)android_media_MediaExtractor_release },
+
+    { "countTracks", "()I", (void *)android_media_MediaExtractor_countTracks },
+
+    { "getTrackFormat", "(I)Ljava/util/Map;",
+        (void *)android_media_MediaExtractor_getTrackFormat },
+
+    { "selectTrack", "(I)V", (void *)android_media_MediaExtractor_selectTrack },
+
+    { "seekTo", "(J)V", (void *)android_media_MediaExtractor_seekTo },
+
+    { "advance", "()Z", (void *)android_media_MediaExtractor_advance },
+
+    { "readSampleData", "(Ljava/nio/ByteBuffer;I)I",
+        (void *)android_media_MediaExtractor_readSampleData },
+
+    { "getSampleTrackIndex", "()I",
+        (void *)android_media_MediaExtractor_getSampleTrackIndex },
+
+    { "getSampleTime", "()J",
+        (void *)android_media_MediaExtractor_getSampleTime },
+
+    { "native_init", "()V", (void *)android_media_MediaExtractor_native_init },
+
+    { "native_setup", "(Ljava/lang/String;)V",
+      (void *)android_media_MediaExtractor_native_setup },
+
+    { "native_finalize", "()V",
+      (void *)android_media_MediaExtractor_native_finalize },
+};
+
+int register_android_media_MediaExtractor(JNIEnv *env) {
+    return AndroidRuntime::registerNativeMethods(env,
+                "android/media/MediaExtractor", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MediaExtractor.h b/media/jni/android_media_MediaExtractor.h
new file mode 100644
index 0000000..70e58c6
--- /dev/null
+++ b/media/jni/android_media_MediaExtractor.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIAEXTRACTOR_H_
+#define _ANDROID_MEDIA_MEDIAEXTRACTOR_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+#include "jni.h"
+
+namespace android {
+
+struct NuMediaExtractor;
+
+struct JMediaExtractor : public RefBase {
+    JMediaExtractor(JNIEnv *env, jobject thiz);
+
+    status_t setDataSource(const char *path);
+
+    size_t countTracks() const;
+    status_t getTrackFormat(size_t index, jobject *format) const;
+
+    status_t selectTrack(size_t index);
+
+    status_t seekTo(int64_t timeUs);
+
+    status_t advance();
+    status_t readSampleData(jobject byteBuf, size_t offset, size_t *sampleSize);
+    status_t getSampleTrackIndex(size_t *trackIndex);
+    status_t getSampleTime(int64_t *sampleTimeUs);
+
+protected:
+    virtual ~JMediaExtractor();
+
+private:
+    jclass mClass;
+    jweak mObject;
+    sp<NuMediaExtractor> mImpl;
+
+    DISALLOW_EVIL_CONSTRUCTORS(JMediaExtractor);
+};
+
+}  // namespace android
+
+#endif  // _ANDROID_MEDIA_MEDIAEXTRACTOR_H_
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 8ff9dd3..199d56e4 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -810,6 +810,8 @@
                 "android/media/MediaPlayer", gMethods, NELEM(gMethods));
 }
 
+extern int register_android_media_MediaCodec(JNIEnv *env);
+extern int register_android_media_MediaExtractor(JNIEnv *env);
 extern int register_android_media_MediaMetadataRetriever(JNIEnv *env);
 extern int register_android_media_MediaRecorder(JNIEnv *env);
 extern int register_android_media_MediaScanner(JNIEnv *env);
@@ -881,6 +883,16 @@
         goto bail;
     }
 
+    if (register_android_media_MediaCodec(env) < 0) {
+        ALOGE("ERROR: MediaCodec native registration failed");
+        goto bail;
+    }
+
+    if (register_android_media_MediaExtractor(env) < 0) {
+        ALOGE("ERROR: MediaCodec native registration failed");
+        goto bail;
+    }
+
     /* success -- return valid version number */
     result = JNI_VERSION_1_4;
 
diff --git a/media/jni/android_media_Utils.cpp b/media/jni/android_media_Utils.cpp
index 47963b1..7dacdcd 100644
--- a/media/jni/android_media_Utils.cpp
+++ b/media/jni/android_media_Utils.cpp
@@ -20,6 +20,10 @@
 #include <utils/Log.h>
 #include "android_media_Utils.h"
 
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+
 namespace android {
 
 bool ConvertKeyValueArraysToKeyedVector(
@@ -71,5 +75,266 @@
     return true;
 }
 
+static jobject makeIntegerObject(JNIEnv *env, int32_t value) {
+    jclass clazz = env->FindClass("java/lang/Integer");
+    CHECK(clazz != NULL);
+
+    jmethodID integerConstructID = env->GetMethodID(clazz, "<init>", "(I)V");
+    CHECK(integerConstructID != NULL);
+
+    return env->NewObject(clazz, integerConstructID, value);
+}
+
+static jobject makeFloatObject(JNIEnv *env, float value) {
+    jclass clazz = env->FindClass("java/lang/Float");
+    CHECK(clazz != NULL);
+
+    jmethodID floatConstructID = env->GetMethodID(clazz, "<init>", "(F)V");
+    CHECK(floatConstructID != NULL);
+
+    return env->NewObject(clazz, floatConstructID, value);
+}
+
+static jobject makeByteBufferObject(
+        JNIEnv *env, const void *data, size_t size) {
+    jbyteArray byteArrayObj = env->NewByteArray(size);
+    env->SetByteArrayRegion(byteArrayObj, 0, size, (const jbyte *)data);
+
+    jclass clazz = env->FindClass("java/nio/ByteBuffer");
+    CHECK(clazz != NULL);
+
+    jmethodID byteBufWrapID =
+        env->GetStaticMethodID(clazz, "wrap", "([B)Ljava/nio/ByteBuffer;");
+    CHECK(byteBufWrapID != NULL);
+
+    jobject byteBufObj = env->CallStaticObjectMethod(
+            clazz, byteBufWrapID, byteArrayObj);
+
+    env->DeleteLocalRef(byteArrayObj); byteArrayObj = NULL;
+
+    return byteBufObj;
+}
+
+status_t ConvertMessageToMap(
+        JNIEnv *env, const sp<AMessage> &msg, jobject *map) {
+    jclass hashMapClazz = env->FindClass("java/util/HashMap");
+
+    if (hashMapClazz == NULL) {
+        return -EINVAL;
+    }
+
+    jmethodID hashMapConstructID =
+        env->GetMethodID(hashMapClazz, "<init>", "()V");
+
+    if (hashMapConstructID == NULL) {
+        return -EINVAL;
+    }
+
+    jmethodID hashMapPutID =
+        env->GetMethodID(
+                hashMapClazz,
+                "put",
+                "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
+
+    if (hashMapPutID == NULL) {
+        return -EINVAL;
+    }
+
+    jobject hashMap = env->NewObject(hashMapClazz, hashMapConstructID);
+
+    for (size_t i = 0; i < msg->countEntries(); ++i) {
+        AMessage::Type valueType;
+        const char *key = msg->getEntryNameAt(i, &valueType);
+
+        jobject valueObj = NULL;
+
+        switch (valueType) {
+            case AMessage::kTypeInt32:
+            {
+                int32_t val;
+                CHECK(msg->findInt32(key, &val));
+
+                valueObj = makeIntegerObject(env, val);
+                break;
+            }
+
+            case AMessage::kTypeFloat:
+            {
+                float val;
+                CHECK(msg->findFloat(key, &val));
+
+                valueObj = makeFloatObject(env, val);
+                break;
+            }
+
+            case AMessage::kTypeString:
+            {
+                AString val;
+                CHECK(msg->findString(key, &val));
+
+                valueObj = env->NewStringUTF(val.c_str());
+                break;
+            }
+
+            case AMessage::kTypeObject:
+            {
+                sp<RefBase> obj;
+                CHECK(msg->findObject(key, &obj));
+
+                // XXX dangerous, object is not guaranteed to be a buffer.
+                sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+                valueObj = makeByteBufferObject(
+                        env, buffer->data(), buffer->size());
+                break;
+            }
+
+            default:
+                break;
+        }
+
+        if (valueObj != NULL) {
+            jstring keyObj = env->NewStringUTF(key);
+
+            jobject res = env->CallObjectMethod(
+                    hashMap, hashMapPutID, keyObj, valueObj);
+
+            env->DeleteLocalRef(keyObj); keyObj = NULL;
+            env->DeleteLocalRef(valueObj); valueObj = NULL;
+        }
+    }
+
+    *map = hashMap;
+
+    return OK;
+}
+
+status_t ConvertKeyValueArraysToMessage(
+        JNIEnv *env, jobjectArray keys, jobjectArray values,
+        sp<AMessage> *out) {
+    jclass stringClass = env->FindClass("java/lang/String");
+    CHECK(stringClass != NULL);
+
+    jclass integerClass = env->FindClass("java/lang/Integer");
+    CHECK(integerClass != NULL);
+
+    jclass floatClass = env->FindClass("java/lang/Float");
+    CHECK(floatClass != NULL);
+
+    jclass byteBufClass = env->FindClass("java/nio/ByteBuffer");
+    CHECK(byteBufClass != NULL);
+
+    sp<AMessage> msg = new AMessage;
+
+    jsize numEntries = 0;
+
+    if (keys != NULL) {
+        if (values == NULL) {
+            return -EINVAL;
+        }
+
+        numEntries = env->GetArrayLength(keys);
+
+        if (numEntries != env->GetArrayLength(values)) {
+            return -EINVAL;
+        }
+    } else if (values != NULL) {
+        return -EINVAL;
+    }
+
+    for (jsize i = 0; i < numEntries; ++i) {
+        jobject keyObj = env->GetObjectArrayElement(keys, i);
+
+        if (!env->IsInstanceOf(keyObj, stringClass)) {
+            return -EINVAL;
+        }
+
+        const char *tmp = env->GetStringUTFChars((jstring)keyObj, NULL);
+
+        if (tmp == NULL) {
+            return -ENOMEM;
+        }
+
+        AString key = tmp;
+
+        env->ReleaseStringUTFChars((jstring)keyObj, tmp);
+        tmp = NULL;
+
+        jobject valueObj = env->GetObjectArrayElement(values, i);
+
+        if (env->IsInstanceOf(valueObj, stringClass)) {
+            const char *value = env->GetStringUTFChars((jstring)valueObj, NULL);
+
+            if (value == NULL) {
+                return -ENOMEM;
+            }
+
+            msg->setString(key.c_str(), value);
+
+            env->ReleaseStringUTFChars((jstring)valueObj, value);
+            value = NULL;
+        } else if (env->IsInstanceOf(valueObj, integerClass)) {
+            jmethodID intValueID =
+                env->GetMethodID(integerClass, "intValue", "()I");
+            CHECK(intValueID != NULL);
+
+            jint value = env->CallIntMethod(valueObj, intValueID);
+
+            msg->setInt32(key.c_str(), value);
+        } else if (env->IsInstanceOf(valueObj, floatClass)) {
+            jmethodID floatValueID =
+                env->GetMethodID(floatClass, "floatValue", "()F");
+            CHECK(floatValueID != NULL);
+
+            jfloat value = env->CallFloatMethod(valueObj, floatValueID);
+
+            msg->setFloat(key.c_str(), value);
+        } else if (env->IsInstanceOf(valueObj, byteBufClass)) {
+            jmethodID positionID =
+                env->GetMethodID(byteBufClass, "position", "()I");
+            CHECK(positionID != NULL);
+
+            jmethodID limitID =
+                env->GetMethodID(byteBufClass, "limit", "()I");
+            CHECK(limitID != NULL);
+
+            jint position = env->CallIntMethod(valueObj, positionID);
+            jint limit = env->CallIntMethod(valueObj, limitID);
+
+            sp<ABuffer> buffer = new ABuffer(limit - position);
+
+            void *data = env->GetDirectBufferAddress(valueObj);
+
+            if (data != NULL) {
+                memcpy(buffer->data(),
+                       (const uint8_t *)data + position,
+                       buffer->size());
+            } else {
+                jmethodID arrayID =
+                    env->GetMethodID(byteBufClass, "array", "()[B");
+                CHECK(arrayID != NULL);
+
+                jbyteArray byteArray =
+                    (jbyteArray)env->CallObjectMethod(valueObj, arrayID);
+                CHECK(byteArray != NULL);
+
+                env->GetByteArrayRegion(
+                        byteArray,
+                        position,
+                        buffer->size(),
+                        (jbyte *)buffer->data());
+
+                env->DeleteLocalRef(byteArray); byteArray = NULL;
+            }
+
+            msg->setObject(key.c_str(), buffer);
+        }
+    }
+
+    *out = msg;
+
+    return OK;
+}
+
 }  // namespace android
 
diff --git a/media/jni/android_media_Utils.h b/media/jni/android_media_Utils.h
index a2c155a..635bceb 100644
--- a/media/jni/android_media_Utils.h
+++ b/media/jni/android_media_Utils.h
@@ -33,6 +33,14 @@
     JNIEnv *env, jobjectArray keys, jobjectArray values,
     KeyedVector<String8, String8>* vector);
 
+struct AMessage;
+status_t ConvertMessageToMap(
+        JNIEnv *env, const sp<AMessage> &msg, jobject *map);
+
+status_t ConvertKeyValueArraysToMessage(
+        JNIEnv *env, jobjectArray keys, jobjectArray values,
+        sp<AMessage> *msg);
+
 };  // namespace android
 
 #endif //  _ANDROID_MEDIA_UTILS_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index b731d0f..04415cd 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -387,10 +387,10 @@
                      audio ? "audio" : "video");
 
                 mRenderer->queueEOS(audio, UNKNOWN_ERROR);
-            } else {
-                CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
-
+            } else if (what == ACodec::kWhatDrainThisBuffer) {
                 renderBuffer(audio, codecRequest);
+            } else {
+                ALOGV("Unhandled codec notification %d.", what);
             }
 
             break;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index ca44ea3..605b497 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -171,6 +171,9 @@
 
 private:
     void onSetup(const sp<AMessage> &msg);
+    void onAllocateComponent(const sp<AMessage> &msg);
+    void onConfigureComponent(const sp<AMessage> &msg);
+    void onStart();
 
     DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
 };
@@ -265,6 +268,8 @@
 private:
     void changeStateIfWeOwnAllBuffers();
 
+    bool mComponentNowIdle;
+
     DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
 };
 
@@ -309,7 +314,8 @@
 
 ACodec::ACodec()
     : mNode(NULL),
-      mSentFormat(false) {
+      mSentFormat(false),
+      mIsEncoder(false) {
     mUninitializedState = new UninitializedState(this);
     mLoadedToIdleState = new LoadedToIdleState(this);
     mIdleToExecutingState = new IdleToExecutingState(this);
@@ -341,6 +347,22 @@
     msg->post();
 }
 
+void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
+    msg->setWhat(kWhatAllocateComponent);
+    msg->setTarget(id());
+    msg->post();
+}
+
+void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
+    msg->setWhat(kWhatConfigureComponent);
+    msg->setTarget(id());
+    msg->post();
+}
+
+void ACodec::initiateStart() {
+    (new AMessage(kWhatStart, id()))->post();
+}
+
 void ACodec::signalFlush() {
     ALOGV("[%s] signalFlush", mComponentName.c_str());
     (new AMessage(kWhatFlush, id()))->post();
@@ -360,62 +382,75 @@
     CHECK(mDealer[portIndex] == NULL);
     CHECK(mBuffers[portIndex].isEmpty());
 
+    status_t err;
     if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
-        return allocateOutputBuffersFromNativeWindow();
+        err = allocateOutputBuffersFromNativeWindow();
+    } else {
+        OMX_PARAM_PORTDEFINITIONTYPE def;
+        InitOMXParams(&def);
+        def.nPortIndex = portIndex;
+
+        err = mOMX->getParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+        if (err == OK) {
+            ALOGV("[%s] Allocating %lu buffers of size %lu on %s port",
+                    mComponentName.c_str(),
+                    def.nBufferCountActual, def.nBufferSize,
+                    portIndex == kPortIndexInput ? "input" : "output");
+
+            size_t totalSize = def.nBufferCountActual * def.nBufferSize;
+            mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
+
+            for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
+                sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
+                CHECK(mem.get() != NULL);
+
+                IOMX::buffer_id buffer;
+
+                if (!strncasecmp(
+                            mComponentName.c_str(), "OMX.TI.DUCATI1.VIDEO.", 21)) {
+                    if (portIndex == kPortIndexInput && i == 0) {
+                        // Only log this warning once per allocation round.
+
+                        ALOGW("OMX.TI.DUCATI1.VIDEO.* require the use of "
+                             "OMX_AllocateBuffer instead of the preferred "
+                             "OMX_UseBuffer. Vendor must fix this.");
+                    }
+
+                    err = mOMX->allocateBufferWithBackup(
+                            mNode, portIndex, mem, &buffer);
+                } else {
+                    err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
+                }
+
+                BufferInfo info;
+                info.mBufferID = buffer;
+                info.mStatus = BufferInfo::OWNED_BY_US;
+                info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
+                mBuffers[portIndex].push(info);
+            }
+        }
     }
 
-    OMX_PARAM_PORTDEFINITIONTYPE def;
-    InitOMXParams(&def);
-    def.nPortIndex = portIndex;
-
-    status_t err = mOMX->getParameter(
-            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
-
     if (err != OK) {
         return err;
     }
 
-    ALOGV("[%s] Allocating %lu buffers of size %lu on %s port",
-            mComponentName.c_str(),
-            def.nBufferCountActual, def.nBufferSize,
-            portIndex == kPortIndexInput ? "input" : "output");
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", ACodec::kWhatBuffersAllocated);
 
-    size_t totalSize = def.nBufferCountActual * def.nBufferSize;
-    mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec");
+    notify->setInt32("portIndex", portIndex);
+    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+        AString name = StringPrintf("buffer-id_%d", i);
+        notify->setPointer(name.c_str(), mBuffers[portIndex][i].mBufferID);
 
-    for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) {
-        sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
-        CHECK(mem.get() != NULL);
-
-        IOMX::buffer_id buffer;
-
-        if (!strcasecmp(
-                    mComponentName.c_str(), "OMX.TI.DUCATI1.VIDEO.DECODER")) {
-            if (portIndex == kPortIndexInput && i == 0) {
-                // Only log this warning once per allocation round.
-
-                ALOGW("OMX.TI.DUCATI1.VIDEO.DECODER requires the use of "
-                     "OMX_AllocateBuffer instead of the preferred "
-                     "OMX_UseBuffer. Vendor must fix this.");
-            }
-
-            err = mOMX->allocateBufferWithBackup(
-                    mNode, portIndex, mem, &buffer);
-        } else {
-            err = mOMX->useBuffer(mNode, portIndex, mem, &buffer);
-        }
-
-        if (err != OK) {
-            return err;
-        }
-
-        BufferInfo info;
-        info.mBufferID = buffer;
-        info.mStatus = BufferInfo::OWNED_BY_US;
-        info.mData = new ABuffer(mem->pointer(), def.nBufferSize);
-        mBuffers[portIndex].push(info);
+        name = StringPrintf("data_%d", i);
+        notify->setObject(name.c_str(), mBuffers[portIndex][i].mData);
     }
 
+    notify->post();
+
     return OK;
 }
 
@@ -671,7 +706,7 @@
     return NULL;
 }
 
-void ACodec::setComponentRole(
+status_t ACodec::setComponentRole(
         bool isEncoder, const char *mime) {
     struct MimeToRole {
         const char *mime;
@@ -700,6 +735,8 @@
             "video_decoder.mpeg4", "video_encoder.mpeg4" },
         { MEDIA_MIMETYPE_VIDEO_H263,
             "video_decoder.h263", "video_encoder.h263" },
+        { MEDIA_MIMETYPE_VIDEO_VPX,
+            "video_decoder.vpx", "video_encoder.vpx" },
     };
 
     static const size_t kNumMimeToRole =
@@ -713,7 +750,7 @@
     }
 
     if (i == kNumMimeToRole) {
-        return;
+        return ERROR_UNSUPPORTED;
     }
 
     const char *role =
@@ -736,50 +773,83 @@
         if (err != OK) {
             ALOGW("[%s] Failed to set standard component role '%s'.",
                  mComponentName.c_str(), role);
+
+            return err;
         }
     }
+
+    return OK;
 }
 
-void ACodec::configureCodec(
+status_t ACodec::configureCodec(
         const char *mime, const sp<AMessage> &msg) {
-    setComponentRole(false /* isEncoder */, mime);
+    int32_t encoder;
+    if (!msg->findInt32("encoder", &encoder)) {
+        encoder = false;
+    }
+
+    mIsEncoder = encoder;
+
+    status_t err = setComponentRole(encoder /* isEncoder */, mime);
+
+    if (err != OK) {
+        return err;
+    }
+
+    int32_t bitRate = 0;
+    if (encoder && !msg->findInt32("bitrate", &bitRate)) {
+        return INVALID_OPERATION;
+    }
 
     if (!strncasecmp(mime, "video/", 6)) {
-        int32_t width, height;
-        CHECK(msg->findInt32("width", &width));
-        CHECK(msg->findInt32("height", &height));
-
-        CHECK_EQ(setupVideoDecoder(mime, width, height),
-                 (status_t)OK);
+        if (encoder) {
+            err = setupVideoEncoder(mime, msg);
+        } else {
+            int32_t width, height;
+            if (!msg->findInt32("width", &width)
+                    || !msg->findInt32("height", &height)) {
+                err = INVALID_OPERATION;
+            } else {
+                err = setupVideoDecoder(mime, width, height);
+            }
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         int32_t numChannels, sampleRate;
-        CHECK(msg->findInt32("channel-count", &numChannels));
-        CHECK(msg->findInt32("sample-rate", &sampleRate));
-
-        CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK);
+        if (!msg->findInt32("channel-count", &numChannels)
+                || !msg->findInt32("sample-rate", &sampleRate)) {
+            err = INVALID_OPERATION;
+        } else {
+            err = setupAACCodec(encoder, numChannels, sampleRate, bitRate);
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
-        CHECK_EQ(setupAMRDecoder(false /* isWAMR */), (status_t)OK);
+        err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
-        CHECK_EQ(setupAMRDecoder(true /* isWAMR */), (status_t)OK);
+        err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
             || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
         // These are PCM-like formats with a fixed sample rate but
         // a variable number of channels.
 
         int32_t numChannels;
-        CHECK(msg->findInt32("channel-count", &numChannels));
+        if (!msg->findInt32("channel-count", &numChannels)) {
+            err = INVALID_OPERATION;
+        } else {
+            err = setupG711Codec(encoder, numChannels);
+        }
+    }
 
-        CHECK_EQ(setupG711Decoder(numChannels), (status_t)OK);
+    if (err != OK) {
+        return err;
     }
 
     int32_t maxInputSize;
     if (msg->findInt32("max-input-size", &maxInputSize)) {
-        CHECK_EQ(setMinBufferSize(kPortIndexInput, (size_t)maxInputSize),
-                 (status_t)OK);
+        err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
     } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
-        CHECK_EQ(setMinBufferSize(kPortIndexInput, 8192),  // XXX
-                 (status_t)OK);
+        err = setMinBufferSize(kPortIndexInput, 8192);  // XXX
     }
+
+    return err;
 }
 
 status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
@@ -819,12 +889,113 @@
     return OK;
 }
 
-status_t ACodec::setupAACDecoder(int32_t numChannels, int32_t sampleRate) {
+status_t ACodec::selectAudioPortFormat(
+        OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) {
+    OMX_AUDIO_PARAM_PORTFORMATTYPE format;
+    InitOMXParams(&format);
+
+    format.nPortIndex = portIndex;
+    for (OMX_U32 index = 0;; ++index) {
+        format.nIndex = index;
+
+        status_t err = mOMX->getParameter(
+                mNode, OMX_IndexParamAudioPortFormat,
+                &format, sizeof(format));
+
+        if (err != OK) {
+            return err;
+        }
+
+        if (format.eEncoding == desiredFormat) {
+            break;
+        }
+    }
+
+    return mOMX->setParameter(
+            mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format));
+}
+
+status_t ACodec::setupAACCodec(
+        bool encoder,
+        int32_t numChannels, int32_t sampleRate, int32_t bitRate) {
+    status_t err = setupRawAudioFormat(
+            encoder ? kPortIndexInput : kPortIndexOutput,
+            sampleRate,
+            numChannels);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (encoder) {
+        err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC);
+
+        if (err != OK) {
+            return err;
+        }
+
+        OMX_PARAM_PORTDEFINITIONTYPE def;
+        InitOMXParams(&def);
+        def.nPortIndex = kPortIndexOutput;
+
+        err = mOMX->getParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+        if (err != OK) {
+            return err;
+        }
+
+        def.format.audio.bFlagErrorConcealment = OMX_TRUE;
+        def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
+
+        err = mOMX->setParameter(
+                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+        if (err != OK) {
+            return err;
+        }
+
+        OMX_AUDIO_PARAM_AACPROFILETYPE profile;
+        InitOMXParams(&profile);
+        profile.nPortIndex = kPortIndexOutput;
+
+        err = mOMX->getParameter(
+                mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+        if (err != OK) {
+            return err;
+        }
+
+        profile.nChannels = numChannels;
+
+        profile.eChannelMode =
+            (numChannels == 1)
+                ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo;
+
+        profile.nSampleRate = sampleRate;
+        profile.nBitRate = bitRate;
+        profile.nAudioBandWidth = 0;
+        profile.nFrameLength = 0;
+        profile.nAACtools = OMX_AUDIO_AACToolAll;
+        profile.nAACERtools = OMX_AUDIO_AACERNone;
+        profile.eAACProfile = OMX_AUDIO_AACObjectLC;
+        profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
+
+        err = mOMX->setParameter(
+                mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+
+        if (err != OK) {
+            return err;
+        }
+
+        return err;
+    }
+
     OMX_AUDIO_PARAM_AACPROFILETYPE profile;
     InitOMXParams(&profile);
     profile.nPortIndex = kPortIndexInput;
 
-    status_t err = mOMX->getParameter(
+    err = mOMX->getParameter(
             mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
 
     if (err != OK) {
@@ -835,16 +1006,59 @@
     profile.nSampleRate = sampleRate;
     profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS;
 
-    err = mOMX->setParameter(
+    return mOMX->setParameter(
             mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
-
-    return err;
 }
 
-status_t ACodec::setupAMRDecoder(bool isWAMR) {
+static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
+        bool isAMRWB, int32_t bps) {
+    if (isAMRWB) {
+        if (bps <= 6600) {
+            return OMX_AUDIO_AMRBandModeWB0;
+        } else if (bps <= 8850) {
+            return OMX_AUDIO_AMRBandModeWB1;
+        } else if (bps <= 12650) {
+            return OMX_AUDIO_AMRBandModeWB2;
+        } else if (bps <= 14250) {
+            return OMX_AUDIO_AMRBandModeWB3;
+        } else if (bps <= 15850) {
+            return OMX_AUDIO_AMRBandModeWB4;
+        } else if (bps <= 18250) {
+            return OMX_AUDIO_AMRBandModeWB5;
+        } else if (bps <= 19850) {
+            return OMX_AUDIO_AMRBandModeWB6;
+        } else if (bps <= 23050) {
+            return OMX_AUDIO_AMRBandModeWB7;
+        }
+
+        // 23850 bps
+        return OMX_AUDIO_AMRBandModeWB8;
+    } else {  // AMRNB
+        if (bps <= 4750) {
+            return OMX_AUDIO_AMRBandModeNB0;
+        } else if (bps <= 5150) {
+            return OMX_AUDIO_AMRBandModeNB1;
+        } else if (bps <= 5900) {
+            return OMX_AUDIO_AMRBandModeNB2;
+        } else if (bps <= 6700) {
+            return OMX_AUDIO_AMRBandModeNB3;
+        } else if (bps <= 7400) {
+            return OMX_AUDIO_AMRBandModeNB4;
+        } else if (bps <= 7950) {
+            return OMX_AUDIO_AMRBandModeNB5;
+        } else if (bps <= 10200) {
+            return OMX_AUDIO_AMRBandModeNB6;
+        }
+
+        // 12200 bps
+        return OMX_AUDIO_AMRBandModeNB7;
+    }
+}
+
+status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
     OMX_AUDIO_PARAM_AMRTYPE def;
     InitOMXParams(&def);
-    def.nPortIndex = kPortIndexInput;
+    def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput;
 
     status_t err =
         mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
@@ -854,14 +1068,24 @@
     }
 
     def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
+    def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate);
 
-    def.eAMRBandMode =
-        isWAMR ? OMX_AUDIO_AMRBandModeWB0 : OMX_AUDIO_AMRBandModeNB0;
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
 
-    return mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
+    if (err != OK) {
+        return err;
+    }
+
+    return setupRawAudioFormat(
+            encoder ? kPortIndexInput : kPortIndexOutput,
+            isWAMR ? 16000 : 8000 /* sampleRate */,
+            1 /* numChannels */);
 }
 
-status_t ACodec::setupG711Decoder(int32_t numChannels) {
+status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) {
+    CHECK(!encoder);  // XXX TODO
+
     return setupRawAudioFormat(
             kPortIndexInput, 8000 /* sampleRate */, numChannels);
 }
@@ -1001,22 +1225,36 @@
             &format, sizeof(format));
 }
 
-status_t ACodec::setupVideoDecoder(
-        const char *mime, int32_t width, int32_t height) {
-    OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
+static status_t GetVideoCodingTypeFromMime(
+        const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
     if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
-        compressionFormat = OMX_VIDEO_CodingAVC;
+        *codingType = OMX_VIDEO_CodingAVC;
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
-        compressionFormat = OMX_VIDEO_CodingMPEG4;
+        *codingType = OMX_VIDEO_CodingMPEG4;
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
-        compressionFormat = OMX_VIDEO_CodingH263;
+        *codingType = OMX_VIDEO_CodingH263;
     } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
-        compressionFormat = OMX_VIDEO_CodingMPEG2;
+        *codingType = OMX_VIDEO_CodingMPEG2;
+    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) {
+        *codingType = OMX_VIDEO_CodingVPX;
     } else {
-        TRESPASS();
+        *codingType = OMX_VIDEO_CodingUnused;
+        return ERROR_UNSUPPORTED;
     }
 
-    status_t err = setVideoPortFormatType(
+    return OK;
+}
+
+status_t ACodec::setupVideoDecoder(
+        const char *mime, int32_t width, int32_t height) {
+    OMX_VIDEO_CODINGTYPE compressionFormat;
+    status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setVideoPortFormatType(
             kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
 
     if (err != OK) {
@@ -1046,6 +1284,489 @@
     return OK;
 }
 
+status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
+    int32_t tmp;
+    if (!msg->findInt32("color-format", &tmp)) {
+        return INVALID_OPERATION;
+    }
+
+    OMX_COLOR_FORMATTYPE colorFormat =
+        static_cast<OMX_COLOR_FORMATTYPE>(tmp);
+
+    status_t err = setVideoPortFormatType(
+            kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat);
+
+    if (err != OK) {
+        ALOGE("[%s] does not support color format %d",
+              mComponentName.c_str(), colorFormat);
+
+        return err;
+    }
+
+    /* Input port configuration */
+
+    OMX_PARAM_PORTDEFINITIONTYPE def;
+    InitOMXParams(&def);
+
+    OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+    def.nPortIndex = kPortIndexInput;
+
+    err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    int32_t width, height, bitrate;
+    if (!msg->findInt32("width", &width)
+            || !msg->findInt32("height", &height)
+            || !msg->findInt32("bitrate", &bitrate)) {
+        return INVALID_OPERATION;
+    }
+
+    video_def->nFrameWidth = width;
+    video_def->nFrameHeight = height;
+
+    int32_t stride;
+    if (!msg->findInt32("stride", &stride)) {
+        stride = width;
+    }
+
+    video_def->nStride = stride;
+
+    int32_t sliceHeight;
+    if (!msg->findInt32("slice-height", &sliceHeight)) {
+        sliceHeight = height;
+    }
+
+    video_def->nSliceHeight = sliceHeight;
+
+    def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2;
+
+    float frameRate;
+    if (!msg->findFloat("frame-rate", &frameRate)) {
+        int32_t tmp;
+        if (!msg->findInt32("frame-rate", &tmp)) {
+            return INVALID_OPERATION;
+        }
+        frameRate = (float)tmp;
+    }
+
+    video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
+    video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
+    video_def->eColorFormat = colorFormat;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        ALOGE("[%s] failed to set input port definition parameters.",
+              mComponentName.c_str());
+
+        return err;
+    }
+
+    /* Output port configuration */
+
+    OMX_VIDEO_CODINGTYPE compressionFormat;
+    err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = setVideoPortFormatType(
+            kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused);
+
+    if (err != OK) {
+        ALOGE("[%s] does not support compression format %d",
+             mComponentName.c_str(), compressionFormat);
+
+        return err;
+    }
+
+    def.nPortIndex = kPortIndexOutput;
+
+    err = mOMX->getParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        return err;
+    }
+
+    video_def->nFrameWidth = width;
+    video_def->nFrameHeight = height;
+    video_def->xFramerate = 0;
+    video_def->nBitrate = bitrate;
+    video_def->eCompressionFormat = compressionFormat;
+    video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+    if (err != OK) {
+        ALOGE("[%s] failed to set output port definition parameters.",
+              mComponentName.c_str());
+
+        return err;
+    }
+
+    switch (compressionFormat) {
+        case OMX_VIDEO_CodingMPEG4:
+            err = setupMPEG4EncoderParameters(msg);
+            break;
+
+        case OMX_VIDEO_CodingH263:
+            err = setupH263EncoderParameters(msg);
+            break;
+
+        case OMX_VIDEO_CodingAVC:
+            err = setupAVCEncoderParameters(msg);
+            break;
+
+        default:
+            break;
+    }
+
+    ALOGI("setupVideoEncoder succeeded");
+
+    return err;
+}
+
+static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
+    if (iFramesInterval < 0) {
+        return 0xFFFFFFFF;
+    } else if (iFramesInterval == 0) {
+        return 0;
+    }
+    OMX_U32 ret = frameRate * iFramesInterval;
+    CHECK(ret > 1);
+    return ret;
+}
+
+status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
+    int32_t bitrate, iFrameInterval;
+    if (!msg->findInt32("bitrate", &bitrate)
+            || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+        return INVALID_OPERATION;
+    }
+
+    float frameRate;
+    if (!msg->findFloat("frame-rate", &frameRate)) {
+        int32_t tmp;
+        if (!msg->findInt32("frame-rate", &tmp)) {
+            return INVALID_OPERATION;
+        }
+        frameRate = (float)tmp;
+    }
+
+    OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
+    InitOMXParams(&mpeg4type);
+    mpeg4type.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+
+    if (err != OK) {
+        return err;
+    }
+
+    mpeg4type.nSliceHeaderSpacing = 0;
+    mpeg4type.bSVH = OMX_FALSE;
+    mpeg4type.bGov = OMX_FALSE;
+
+    mpeg4type.nAllowedPictureTypes =
+        OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+    mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+    if (mpeg4type.nPFrames == 0) {
+        mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+    }
+    mpeg4type.nBFrames = 0;
+    mpeg4type.nIDCVLCThreshold = 0;
+    mpeg4type.bACPred = OMX_TRUE;
+    mpeg4type.nMaxPacketSize = 256;
+    mpeg4type.nTimeIncRes = 1000;
+    mpeg4type.nHeaderExtension = 0;
+    mpeg4type.bReversibleVLC = OMX_FALSE;
+
+    int32_t profile;
+    if (msg->findInt32("profile", &profile)) {
+        int32_t level;
+        if (!msg->findInt32("level", &level)) {
+            return INVALID_OPERATION;
+        }
+
+        err = verifySupportForProfileAndLevel(profile, level);
+
+        if (err != OK) {
+            return err;
+        }
+
+        mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile);
+        mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level);
+    }
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = configureBitrate(bitrate);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return setupErrorCorrectionParameters();
+}
+
+status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) {
+    int32_t bitrate, iFrameInterval;
+    if (!msg->findInt32("bitrate", &bitrate)
+            || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+        return INVALID_OPERATION;
+    }
+
+    float frameRate;
+    if (!msg->findFloat("frame-rate", &frameRate)) {
+        int32_t tmp;
+        if (!msg->findInt32("frame-rate", &tmp)) {
+            return INVALID_OPERATION;
+        }
+        frameRate = (float)tmp;
+    }
+
+    OMX_VIDEO_PARAM_H263TYPE h263type;
+    InitOMXParams(&h263type);
+    h263type.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+
+    if (err != OK) {
+        return err;
+    }
+
+    h263type.nAllowedPictureTypes =
+        OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+    h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+    if (h263type.nPFrames == 0) {
+        h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+    }
+    h263type.nBFrames = 0;
+
+    int32_t profile;
+    if (msg->findInt32("profile", &profile)) {
+        int32_t level;
+        if (!msg->findInt32("level", &level)) {
+            return INVALID_OPERATION;
+        }
+
+        err = verifySupportForProfileAndLevel(profile, level);
+
+        if (err != OK) {
+            return err;
+        }
+
+        h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile);
+        h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level);
+    }
+
+    h263type.bPLUSPTYPEAllowed = OMX_FALSE;
+    h263type.bForceRoundingTypeToZero = OMX_FALSE;
+    h263type.nPictureHeaderRepetition = 0;
+    h263type.nGOBHeaderInterval = 0;
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = configureBitrate(bitrate);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return setupErrorCorrectionParameters();
+}
+
+status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
+    int32_t bitrate, iFrameInterval;
+    if (!msg->findInt32("bitrate", &bitrate)
+            || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+        return INVALID_OPERATION;
+    }
+
+    float frameRate;
+    if (!msg->findFloat("frame-rate", &frameRate)) {
+        int32_t tmp;
+        if (!msg->findInt32("frame-rate", &tmp)) {
+            return INVALID_OPERATION;
+        }
+        frameRate = (float)tmp;
+    }
+
+    OMX_VIDEO_PARAM_AVCTYPE h264type;
+    InitOMXParams(&h264type);
+    h264type.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+
+    if (err != OK) {
+        return err;
+    }
+
+    h264type.nAllowedPictureTypes =
+        OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
+
+    int32_t profile;
+    if (msg->findInt32("profile", &profile)) {
+        int32_t level;
+        if (!msg->findInt32("level", &level)) {
+            return INVALID_OPERATION;
+        }
+
+        err = verifySupportForProfileAndLevel(profile, level);
+
+        if (err != OK) {
+            return err;
+        }
+
+        h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile);
+        h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level);
+    }
+
+    // XXX
+    if (!strncmp(mComponentName.c_str(), "OMX.TI.DUCATI1", 14)) {
+        h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
+    }
+
+    if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
+        h264type.nSliceHeaderSpacing = 0;
+        h264type.bUseHadamard = OMX_TRUE;
+        h264type.nRefFrames = 1;
+        h264type.nBFrames = 0;
+        h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate);
+        if (h264type.nPFrames == 0) {
+            h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
+        }
+        h264type.nRefIdx10ActiveMinus1 = 0;
+        h264type.nRefIdx11ActiveMinus1 = 0;
+        h264type.bEntropyCodingCABAC = OMX_FALSE;
+        h264type.bWeightedPPrediction = OMX_FALSE;
+        h264type.bconstIpred = OMX_FALSE;
+        h264type.bDirect8x8Inference = OMX_FALSE;
+        h264type.bDirectSpatialTemporal = OMX_FALSE;
+        h264type.nCabacInitIdc = 0;
+    }
+
+    if (h264type.nBFrames != 0) {
+        h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
+    }
+
+    h264type.bEnableUEP = OMX_FALSE;
+    h264type.bEnableFMO = OMX_FALSE;
+    h264type.bEnableASO = OMX_FALSE;
+    h264type.bEnableRS = OMX_FALSE;
+    h264type.bFrameMBsOnly = OMX_TRUE;
+    h264type.bMBAFF = OMX_FALSE;
+    h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+
+    if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName.c_str())) {
+        h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+    }
+
+    err = mOMX->setParameter(
+            mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
+
+    if (err != OK) {
+        return err;
+    }
+
+    return configureBitrate(bitrate);
+}
+
+status_t ACodec::verifySupportForProfileAndLevel(
+        int32_t profile, int32_t level) {
+    OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
+    InitOMXParams(&params);
+    params.nPortIndex = kPortIndexOutput;
+
+    for (params.nProfileIndex = 0;; ++params.nProfileIndex) {
+        status_t err = mOMX->getParameter(
+                mNode,
+                OMX_IndexParamVideoProfileLevelQuerySupported,
+                &params,
+                sizeof(params));
+
+        if (err != OK) {
+            return err;
+        }
+
+        int32_t supportedProfile = static_cast<int32_t>(params.eProfile);
+        int32_t supportedLevel = static_cast<int32_t>(params.eLevel);
+
+        if (profile == supportedProfile && level <= supportedLevel) {
+            return OK;
+        }
+    }
+}
+
+status_t ACodec::configureBitrate(int32_t bitrate) {
+    OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
+    InitOMXParams(&bitrateType);
+    bitrateType.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoBitrate,
+            &bitrateType, sizeof(bitrateType));
+
+    if (err != OK) {
+        return err;
+    }
+
+    bitrateType.eControlRate = OMX_Video_ControlRateVariable;
+    bitrateType.nTargetBitrate = bitrate;
+
+    return mOMX->setParameter(
+            mNode, OMX_IndexParamVideoBitrate,
+            &bitrateType, sizeof(bitrateType));
+}
+
+status_t ACodec::setupErrorCorrectionParameters() {
+    OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
+    InitOMXParams(&errorCorrectionType);
+    errorCorrectionType.nPortIndex = kPortIndexOutput;
+
+    status_t err = mOMX->getParameter(
+            mNode, OMX_IndexParamVideoErrorCorrection,
+            &errorCorrectionType, sizeof(errorCorrectionType));
+
+    if (err != OK) {
+        return OK;  // Optional feature. Ignore this failure
+    }
+
+    errorCorrectionType.bEnableHEC = OMX_FALSE;
+    errorCorrectionType.bEnableResync = OMX_TRUE;
+    errorCorrectionType.nResynchMarkerSpacing = 256;
+    errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
+    errorCorrectionType.bEnableRVLC = OMX_FALSE;
+
+    return mOMX->setParameter(
+            mNode, OMX_IndexParamVideoErrorCorrection,
+            &errorCorrectionType, sizeof(errorCorrectionType));
+}
+
 status_t ACodec::setVideoFormatOnPort(
         OMX_U32 portIndex,
         int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) {
@@ -1166,6 +1887,9 @@
             notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
             notify->setInt32("width", videoDef->nFrameWidth);
             notify->setInt32("height", videoDef->nFrameHeight);
+            notify->setInt32("stride", videoDef->nStride);
+            notify->setInt32("slice-height", videoDef->nSliceHeight);
+            notify->setInt32("color-format", videoDef->eColorFormat);
 
             OMX_CONFIG_RECTTYPE rect;
             InitOMXParams(&rect);
@@ -1241,10 +1965,11 @@
     mSentFormat = true;
 }
 
-void ACodec::signalError(OMX_ERRORTYPE error) {
+void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
     sp<AMessage> notify = mNotify->dup();
     notify->setInt32("what", ACodec::kWhatError);
     notify->setInt32("omx-error", error);
+    notify->setInt32("err", internalError);
     notify->post();
 }
 
@@ -1435,6 +2160,8 @@
 
     sp<RefBase> obj;
     int32_t err = OK;
+    bool eos = false;
+
     if (!msg->findObject("buffer", &obj)) {
         CHECK(msg->findInt32("err", &err));
 
@@ -1442,10 +2169,18 @@
              mCodec->mComponentName.c_str(), err);
 
         obj.clear();
+
+        eos = true;
     }
 
     sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
 
+    int32_t tmp;
+    if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
+        eos = true;
+        err = ERROR_END_OF_STREAM;
+    }
+
     BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
     CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM);
 
@@ -1456,7 +2191,7 @@
     switch (mode) {
         case KEEP_BUFFERS:
         {
-            if (buffer == NULL) {
+            if (eos) {
                 if (!mCodec->mPortEOS[kPortIndexInput]) {
                     mCodec->mPortEOS[kPortIndexInput] = true;
                     mCodec->mInputEOSResult = err;
@@ -1467,9 +2202,7 @@
 
         case RESUBMIT_BUFFERS:
         {
-            if (buffer != NULL) {
-                CHECK(!mCodec->mPortEOS[kPortIndexInput]);
-
+            if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) {
                 int64_t timeUs;
                 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
 
@@ -1480,6 +2213,10 @@
                     flags |= OMX_BUFFERFLAG_CODECCONFIG;
                 }
 
+                if (eos) {
+                    flags |= OMX_BUFFERFLAG_EOS;
+                }
+
                 if (buffer != info->mData) {
                     if (0 && !(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
                         ALOGV("[%s] Needs to copy input data.",
@@ -1493,6 +2230,9 @@
                 if (flags & OMX_BUFFERFLAG_CODECCONFIG) {
                     ALOGV("[%s] calling emptyBuffer %p w/ codec specific data",
                          mCodec->mComponentName.c_str(), bufferID);
+                } else if (flags & OMX_BUFFERFLAG_EOS) {
+                    ALOGV("[%s] calling emptyBuffer %p w/ EOS",
+                         mCodec->mComponentName.c_str(), bufferID);
                 } else {
                     ALOGV("[%s] calling emptyBuffer %p w/ time %lld us",
                          mCodec->mComponentName.c_str(), bufferID, timeUs);
@@ -1509,7 +2249,15 @@
 
                 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
 
-                getMoreInputDataIfPossible();
+                if (!eos) {
+                    getMoreInputDataIfPossible();
+                } else {
+                    ALOGV("[%s] Signalled EOS on the input port",
+                         mCodec->mComponentName.c_str());
+
+                    mCodec->mPortEOS[kPortIndexInput] = true;
+                    mCodec->mInputEOSResult = err;
+                }
             } else if (!mCodec->mPortEOS[kPortIndexInput]) {
                 if (err != ERROR_END_OF_STREAM) {
                     ALOGV("[%s] Signalling EOS on the input port "
@@ -1582,8 +2330,8 @@
         int64_t timeUs,
         void *platformPrivate,
         void *dataPtr) {
-    ALOGV("[%s] onOMXFillBufferDone %p time %lld us",
-         mCodec->mComponentName.c_str(), bufferID, timeUs);
+    ALOGV("[%s] onOMXFillBufferDone %p time %lld us, flags = 0x%08lx",
+         mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
 
     ssize_t index;
     BufferInfo *info =
@@ -1601,46 +2349,48 @@
 
         case RESUBMIT_BUFFERS:
         {
-            if (rangeLength == 0) {
-                if (!(flags & OMX_BUFFERFLAG_EOS)) {
-                    ALOGV("[%s] calling fillBuffer %p",
-                         mCodec->mComponentName.c_str(), info->mBufferID);
+            if (rangeLength == 0 && !(flags & OMX_BUFFERFLAG_EOS)) {
+                ALOGV("[%s] calling fillBuffer %p",
+                     mCodec->mComponentName.c_str(), info->mBufferID);
 
-                    CHECK_EQ(mCodec->mOMX->fillBuffer(
-                                mCodec->mNode, info->mBufferID),
-                             (status_t)OK);
+                CHECK_EQ(mCodec->mOMX->fillBuffer(
+                            mCodec->mNode, info->mBufferID),
+                         (status_t)OK);
 
-                    info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
-                }
-            } else {
-                if (!mCodec->mSentFormat) {
-                    mCodec->sendFormatChange();
-                }
-
-                if (mCodec->mNativeWindow == NULL) {
-                    info->mData->setRange(rangeOffset, rangeLength);
-                }
-
-                info->mData->meta()->setInt64("timeUs", timeUs);
-
-                sp<AMessage> notify = mCodec->mNotify->dup();
-                notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
-                notify->setPointer("buffer-id", info->mBufferID);
-                notify->setObject("buffer", info->mData);
-
-                sp<AMessage> reply =
-                    new AMessage(kWhatOutputBufferDrained, mCodec->id());
-
-                reply->setPointer("buffer-id", info->mBufferID);
-
-                notify->setMessage("reply", reply);
-
-                notify->post();
-
-                info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+                info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
+                break;
             }
 
+            if (!mCodec->mIsEncoder && !mCodec->mSentFormat) {
+                mCodec->sendFormatChange();
+            }
+
+            if (mCodec->mNativeWindow == NULL) {
+                info->mData->setRange(rangeOffset, rangeLength);
+            }
+
+            info->mData->meta()->setInt64("timeUs", timeUs);
+
+            sp<AMessage> notify = mCodec->mNotify->dup();
+            notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
+            notify->setPointer("buffer-id", info->mBufferID);
+            notify->setObject("buffer", info->mData);
+            notify->setInt32("flags", flags);
+
+            sp<AMessage> reply =
+                new AMessage(kWhatOutputBufferDrained, mCodec->id());
+
+            reply->setPointer("buffer-id", info->mBufferID);
+
+            notify->setMessage("reply", reply);
+
+            notify->post();
+
+            info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
+
             if (flags & OMX_BUFFERFLAG_EOS) {
+                ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str());
+
                 sp<AMessage> notify = mCodec->mNotify->dup();
                 notify->setInt32("what", ACodec::kWhatEOS);
                 notify->setInt32("err", mCodec->mInputEOSResult);
@@ -1678,12 +2428,13 @@
             && msg->findInt32("render", &render) && render != 0) {
         // The client wants this buffer to be rendered.
 
-        if (mCodec->mNativeWindow->queueBuffer(
+        status_t err;
+        if ((err = mCodec->mNativeWindow->queueBuffer(
                     mCodec->mNativeWindow.get(),
-                    info->mGraphicBuffer.get()) == OK) {
+                    info->mGraphicBuffer.get())) == OK) {
             info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
         } else {
-            mCodec->signalError();
+            mCodec->signalError(OMX_ErrorUndefined, err);
             info->mStatus = BufferInfo::OWNED_BY_US;
         }
     } else {
@@ -1758,6 +2509,27 @@
             break;
         }
 
+        case ACodec::kWhatAllocateComponent:
+        {
+            onAllocateComponent(msg);
+            handled = true;
+            break;
+        }
+
+        case ACodec::kWhatConfigureComponent:
+        {
+            onConfigureComponent(msg);
+            handled = true;
+            break;
+        }
+
+        case ACodec::kWhatStart:
+        {
+            onStart();
+            handled = true;
+            break;
+        }
+
         case ACodec::kWhatShutdown:
         {
             sp<AMessage> notify = mCodec->mNotify->dup();
@@ -1787,27 +2559,54 @@
 
 void ACodec::UninitializedState::onSetup(
         const sp<AMessage> &msg) {
+    onAllocateComponent(msg);
+    onConfigureComponent(msg);
+    onStart();
+}
+
+void ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
+    ALOGV("onAllocateComponent");
+
+    if (mCodec->mNode != NULL) {
+        CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
+
+        mCodec->mNativeWindow.clear();
+        mCodec->mNode = NULL;
+        mCodec->mOMX.clear();
+        mCodec->mComponentName.clear();
+    }
+
     OMXClient client;
     CHECK_EQ(client.connect(), (status_t)OK);
 
     sp<IOMX> omx = client.interface();
 
-    AString mime;
-    CHECK(msg->findString("mime", &mime));
-
     Vector<String8> matchingCodecs;
-    OMXCodec::findMatchingCodecs(
-            mime.c_str(),
-            false, // createEncoder
-            NULL,  // matchComponentName
-            0,     // flags
-            &matchingCodecs);
+
+    AString mime;
+
+    AString componentName;
+    if (msg->findString("componentName", &componentName)) {
+        matchingCodecs.push_back(String8(componentName.c_str()));
+    } else {
+        CHECK(msg->findString("mime", &mime));
+
+        int32_t encoder;
+        if (!msg->findInt32("encoder", &encoder)) {
+            encoder = false;
+        }
+
+        OMXCodec::findMatchingCodecs(
+                mime.c_str(),
+                encoder, // createEncoder
+                NULL,  // matchComponentName
+                0,     // flags
+                &matchingCodecs);
+    }
 
     sp<CodecObserver> observer = new CodecObserver;
     IOMX::node_id node = NULL;
 
-    AString componentName;
-
     for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
             ++matchIndex) {
         componentName = matchingCodecs.itemAt(matchIndex).string();
@@ -1826,7 +2625,12 @@
     }
 
     if (node == NULL) {
-        ALOGE("Unable to instantiate a decoder for type '%s'.", mime.c_str());
+        if (!mime.empty()) {
+            ALOGE("Unable to instantiate a decoder for type '%s'.",
+                 mime.c_str());
+        } else {
+            ALOGE("Unable to instantiate decoder '%s'.", componentName.c_str());
+        }
 
         mCodec->signalError(OMX_ErrorComponentNotFound);
         return;
@@ -1844,20 +2648,52 @@
 
     mCodec->mInputEOSResult = OK;
 
-    mCodec->configureCodec(mime.c_str(), msg);
+    {
+        sp<AMessage> notify = mCodec->mNotify->dup();
+        notify->setInt32("what", ACodec::kWhatComponentAllocated);
+        notify->setString("componentName", mCodec->mComponentName.c_str());
+        notify->post();
+    }
+}
+
+void ACodec::UninitializedState::onConfigureComponent(
+        const sp<AMessage> &msg) {
+    ALOGV("onConfigureComponent");
+
+    CHECK(mCodec->mNode != NULL);
+
+    AString mime;
+    CHECK(msg->findString("mime", &mime));
+
+    status_t err = mCodec->configureCodec(mime.c_str(), msg);
+
+    if (err != OK) {
+        mCodec->signalError(OMX_ErrorUndefined, err);
+        return;
+    }
 
     sp<RefBase> obj;
     if (msg->findObject("native-window", &obj)
-            && strncmp("OMX.google.", componentName.c_str(), 11)) {
+            && strncmp("OMX.google.", mCodec->mComponentName.c_str(), 11)) {
         sp<NativeWindowWrapper> nativeWindow(
                 static_cast<NativeWindowWrapper *>(obj.get()));
         CHECK(nativeWindow != NULL);
         mCodec->mNativeWindow = nativeWindow->getNativeWindow();
     }
-
     CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
 
-    CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle),
+    {
+        sp<AMessage> notify = mCodec->mNotify->dup();
+        notify->setInt32("what", ACodec::kWhatComponentConfigured);
+        notify->post();
+    }
+}
+
+void ACodec::UninitializedState::onStart() {
+    ALOGV("onStart");
+
+    CHECK_EQ(mCodec->mOMX->sendCommand(
+                mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle),
              (status_t)OK);
 
     mCodec->changeState(mCodec->mLoadedToIdleState);
@@ -1878,7 +2714,7 @@
              "(error 0x%08x)",
              err);
 
-        mCodec->signalError();
+        mCodec->signalError(OMX_ErrorUndefined, err);
     }
 }
 
@@ -2202,7 +3038,7 @@
                          "port reconfiguration (error 0x%08x)",
                          err);
 
-                    mCodec->signalError();
+                    mCodec->signalError(OMX_ErrorUndefined, err);
 
                     // This is technically not correct, since we were unable
                     // to allocate output buffers and therefore the output port
@@ -2240,7 +3076,8 @@
 ////////////////////////////////////////////////////////////////////////////////
 
 ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec)
-    : BaseState(codec) {
+    : BaseState(codec),
+      mComponentNowIdle(false) {
 }
 
 bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {
@@ -2274,6 +3111,7 @@
 void ACodec::ExecutingToIdleState::stateEntered() {
     ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
 
+    mComponentNowIdle = false;
     mCodec->mSentFormat = false;
 }
 
@@ -2285,6 +3123,8 @@
             CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet);
             CHECK_EQ(data2, (OMX_U32)OMX_StateIdle);
 
+            mComponentNowIdle = true;
+
             changeStateIfWeOwnAllBuffers();
 
             return true;
@@ -2303,7 +3143,7 @@
 }
 
 void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
-    if (mCodec->allYourBuffersAreBelongToUs()) {
+    if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) {
         CHECK_EQ(mCodec->mOMX->sendCommand(
                     mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded),
                  (status_t)OK);
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 3f9ba47..829f657 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -29,12 +29,14 @@
         MPEG4Writer.cpp                   \
         MediaBuffer.cpp                   \
         MediaBufferGroup.cpp              \
+        MediaCodec.cpp                    \
         MediaDefs.cpp                     \
         MediaExtractor.cpp                \
         MediaSource.cpp                   \
         MediaSourceSplitter.cpp           \
         MetaData.cpp                      \
         NuCachedSource2.cpp               \
+        NuMediaExtractor.cpp              \
         OMXClient.cpp                     \
         OMXCodec.cpp                      \
         OggExtractor.cpp                  \
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
new file mode 100644
index 0000000..6702a1c
--- /dev/null
+++ b/media/libstagefright/MediaCodec.cpp
@@ -0,0 +1,1185 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodec"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaCodec.h>
+
+#include "include/SoftwareRenderer.h"
+
+#include <gui/SurfaceTextureClient.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+
+namespace android {
+
+// static
+sp<MediaCodec> MediaCodec::CreateByType(
+        const sp<ALooper> &looper, const char *mime, bool encoder) {
+    sp<MediaCodec> codec = new MediaCodec(looper);
+    if (codec->init(mime, true /* nameIsType */, encoder) != OK) {
+        return NULL;
+    }
+
+    return codec;
+}
+
+// static
+sp<MediaCodec> MediaCodec::CreateByComponentName(
+        const sp<ALooper> &looper, const char *name) {
+    sp<MediaCodec> codec = new MediaCodec(looper);
+    if (codec->init(name, false /* nameIsType */, false /* encoder */) != OK) {
+        return NULL;
+    }
+
+    return codec;
+}
+
+MediaCodec::MediaCodec(const sp<ALooper> &looper)
+    : mState(UNINITIALIZED),
+      mLooper(looper),
+      mCodec(new ACodec),
+      mFlags(0),
+      mSoftRenderer(NULL),
+      mDequeueInputTimeoutGeneration(0),
+      mDequeueInputReplyID(0),
+      mDequeueOutputTimeoutGeneration(0),
+      mDequeueOutputReplyID(0) {
+}
+
+MediaCodec::~MediaCodec() {
+    CHECK_EQ(mState, UNINITIALIZED);
+}
+
+// static
+status_t MediaCodec::PostAndAwaitResponse(
+        const sp<AMessage> &msg, sp<AMessage> *response) {
+    status_t err = msg->postAndAwaitResponse(response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!(*response)->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) {
+    // Current video decoders do not return from OMX_FillThisBuffer
+    // quickly, violating the OpenMAX specs, until that is remedied
+    // we need to invest in an extra looper to free the main event
+    // queue.
+    bool needDedicatedLooper = false;
+    if (nameIsType && !strncasecmp(name, "video/", 6)) {
+        needDedicatedLooper = true;
+    } else if (!nameIsType && !strncmp(name, "OMX.TI.DUCATI1.VIDEO.", 21)) {
+        needDedicatedLooper = true;
+    }
+
+    if (needDedicatedLooper) {
+        if (mCodecLooper == NULL) {
+            mCodecLooper = new ALooper;
+            mCodecLooper->setName("CodecLooper");
+            mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+        }
+
+        mCodecLooper->registerHandler(mCodec);
+    } else {
+        mLooper->registerHandler(mCodec);
+    }
+
+    mLooper->registerHandler(this);
+
+    mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, id()));
+
+    sp<AMessage> msg = new AMessage(kWhatInit, id());
+    msg->setString("name", name);
+    msg->setInt32("nameIsType", nameIsType);
+
+    if (nameIsType) {
+        msg->setInt32("encoder", encoder);
+    }
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::configure(
+        const sp<AMessage> &format,
+        const sp<SurfaceTextureClient> &nativeWindow,
+        uint32_t flags) {
+    sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+
+    msg->setMessage("format", format);
+    msg->setInt32("flags", flags);
+
+    if (nativeWindow != NULL) {
+        if (!(mFlags & kFlagIsSoftwareCodec)) {
+            msg->setObject(
+                    "native-window",
+                    new NativeWindowWrapper(nativeWindow));
+        } else {
+            mNativeWindow = nativeWindow;
+        }
+    }
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::start() {
+    sp<AMessage> msg = new AMessage(kWhatStart, id());
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::stop() {
+    sp<AMessage> msg = new AMessage(kWhatStop, id());
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::queueInputBuffer(
+        size_t index,
+        size_t offset,
+        size_t size,
+        int64_t presentationTimeUs,
+        uint32_t flags) {
+    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id());
+    msg->setSize("index", index);
+    msg->setSize("offset", offset);
+    msg->setSize("size", size);
+    msg->setInt64("timeUs", presentationTimeUs);
+    msg->setInt32("flags", flags);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
+    sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, id());
+    msg->setInt64("timeoutUs", timeoutUs);
+
+    sp<AMessage> response;
+    status_t err;
+    if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+        return err;
+    }
+
+    CHECK(response->findSize("index", index));
+
+    return OK;
+}
+
+status_t MediaCodec::dequeueOutputBuffer(
+        size_t *index,
+        size_t *offset,
+        size_t *size,
+        int64_t *presentationTimeUs,
+        uint32_t *flags,
+        int64_t timeoutUs) {
+    sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, id());
+    msg->setInt64("timeoutUs", timeoutUs);
+
+    sp<AMessage> response;
+    status_t err;
+    if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+        return err;
+    }
+
+    CHECK(response->findSize("index", index));
+    CHECK(response->findSize("offset", offset));
+    CHECK(response->findSize("size", size));
+    CHECK(response->findInt64("timeUs", presentationTimeUs));
+    CHECK(response->findInt32("flags", (int32_t *)flags));
+
+    return OK;
+}
+
+status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
+    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+    msg->setSize("index", index);
+    msg->setInt32("render", true);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::releaseOutputBuffer(size_t index) {
+    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+    msg->setSize("index", index);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
+    sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id());
+
+    sp<AMessage> response;
+    status_t err;
+    if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+        return err;
+    }
+
+    CHECK(response->findMessage("format", format));
+
+    return OK;
+}
+
+status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+    sp<AMessage> msg = new AMessage(kWhatGetBuffers, id());
+    msg->setInt32("portIndex", kPortIndexInput);
+    msg->setPointer("buffers", buffers);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const {
+    sp<AMessage> msg = new AMessage(kWhatGetBuffers, id());
+    msg->setInt32("portIndex", kPortIndexOutput);
+    msg->setPointer("buffers", buffers);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::flush() {
+    sp<AMessage> msg = new AMessage(kWhatFlush, id());
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+void MediaCodec::cancelPendingDequeueOperations() {
+    if (mFlags & kFlagDequeueInputPending) {
+        sp<AMessage> response = new AMessage;
+        response->setInt32("err", INVALID_OPERATION);
+        response->postReply(mDequeueInputReplyID);
+
+        ++mDequeueInputTimeoutGeneration;
+        mDequeueInputReplyID = 0;
+        mFlags &= ~kFlagDequeueInputPending;
+    }
+
+    if (mFlags & kFlagDequeueOutputPending) {
+        sp<AMessage> response = new AMessage;
+        response->setInt32("err", INVALID_OPERATION);
+        response->postReply(mDequeueOutputReplyID);
+
+        ++mDequeueOutputTimeoutGeneration;
+        mDequeueOutputReplyID = 0;
+        mFlags &= ~kFlagDequeueOutputPending;
+    }
+}
+
+bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
+    if (mState != STARTED
+            || (mFlags & kFlagStickyError)
+            || (newRequest && (mFlags & kFlagDequeueInputPending))) {
+        sp<AMessage> response = new AMessage;
+        response->setInt32("err", INVALID_OPERATION);
+
+        response->postReply(replyID);
+
+        return true;
+    }
+
+    ssize_t index = dequeuePortBuffer(kPortIndexInput);
+
+    if (index < 0) {
+        CHECK_EQ(index, -EAGAIN);
+        return false;
+    }
+
+    sp<AMessage> response = new AMessage;
+    response->setSize("index", index);
+    response->postReply(replyID);
+
+    return true;
+}
+
+bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) {
+    sp<AMessage> response = new AMessage;
+
+    if (mState != STARTED
+            || (mFlags & kFlagStickyError)
+            || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
+        response->setInt32("err", INVALID_OPERATION);
+    } else if (mFlags & kFlagOutputBuffersChanged) {
+        response->setInt32("err", INFO_OUTPUT_BUFFERS_CHANGED);
+        mFlags &= ~kFlagOutputBuffersChanged;
+    } else if (mFlags & kFlagOutputFormatChanged) {
+        response->setInt32("err", INFO_FORMAT_CHANGED);
+        mFlags &= ~kFlagOutputFormatChanged;
+    } else {
+        ssize_t index = dequeuePortBuffer(kPortIndexOutput);
+
+        if (index < 0) {
+            CHECK_EQ(index, -EAGAIN);
+            return false;
+        }
+
+        const sp<ABuffer> &buffer =
+            mPortBuffers[kPortIndexOutput].itemAt(index).mData;
+
+        response->setSize("index", index);
+        response->setSize("offset", buffer->offset());
+        response->setSize("size", buffer->size());
+
+        int64_t timeUs;
+        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+        response->setInt64("timeUs", timeUs);
+
+        int32_t omxFlags;
+        CHECK(buffer->meta()->findInt32("omxFlags", &omxFlags));
+
+        uint32_t flags = 0;
+        if (omxFlags & OMX_BUFFERFLAG_SYNCFRAME) {
+            flags |= BUFFER_FLAG_SYNCFRAME;
+        }
+        if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+            flags |= BUFFER_FLAG_CODECCONFIG;
+        }
+        if (omxFlags & OMX_BUFFERFLAG_EOS) {
+            flags |= BUFFER_FLAG_EOS;
+        }
+
+        response->setInt32("flags", flags);
+    }
+
+    response->postReply(replyID);
+
+    return true;
+}
+
+void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatCodecNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            switch (what) {
+                case ACodec::kWhatError:
+                {
+                    int32_t omxError, internalError;
+                    CHECK(msg->findInt32("omx-error", &omxError));
+                    CHECK(msg->findInt32("err", &internalError));
+
+                    ALOGE("Codec reported an error. "
+                          "(omx error 0x%08x, internalError %d)",
+                          omxError, internalError);
+
+                    bool sendErrorReponse = true;
+
+                    switch (mState) {
+                        case INITIALIZING:
+                        {
+                            setState(UNINITIALIZED);
+                            break;
+                        }
+
+                        case CONFIGURING:
+                        {
+                            setState(INITIALIZED);
+                            break;
+                        }
+
+                        case STARTING:
+                        {
+                            setState(CONFIGURED);
+                            break;
+                        }
+
+                        case STOPPING:
+                        {
+                            // Ignore the error, assuming we'll still get
+                            // the shutdown complete notification.
+
+                            sendErrorReponse = false;
+                            break;
+                        }
+
+                        case FLUSHING:
+                        {
+                            setState(STARTED);
+                            break;
+                        }
+
+                        case STARTED:
+                        {
+                            sendErrorReponse = false;
+
+                            mFlags |= kFlagStickyError;
+
+                            cancelPendingDequeueOperations();
+                            break;
+                        }
+
+                        default:
+                        {
+                            sendErrorReponse = false;
+
+                            mFlags |= kFlagStickyError;
+                            break;
+                        }
+                    }
+
+                    if (sendErrorReponse) {
+                        sp<AMessage> response = new AMessage;
+                        response->setInt32("err", UNKNOWN_ERROR);
+
+                        response->postReply(mReplyID);
+                    }
+                    break;
+                }
+
+                case ACodec::kWhatComponentAllocated:
+                {
+                    CHECK_EQ(mState, INITIALIZING);
+                    setState(INITIALIZED);
+
+                    AString componentName;
+                    CHECK(msg->findString("componentName", &componentName));
+
+                    if (componentName.startsWith("OMX.google.")) {
+                        mFlags |= kFlagIsSoftwareCodec;
+                    } else {
+                        mFlags &= ~kFlagIsSoftwareCodec;
+                    }
+
+                    (new AMessage)->postReply(mReplyID);
+                    break;
+                }
+
+                case ACodec::kWhatComponentConfigured:
+                {
+                    CHECK_EQ(mState, CONFIGURING);
+                    setState(CONFIGURED);
+
+                    (new AMessage)->postReply(mReplyID);
+                    break;
+                }
+
+                case ACodec::kWhatBuffersAllocated:
+                {
+                    int32_t portIndex;
+                    CHECK(msg->findInt32("portIndex", &portIndex));
+
+                    ALOGV("%s buffers allocated",
+                          portIndex == kPortIndexInput ? "input" : "output");
+
+                    CHECK(portIndex == kPortIndexInput
+                            || portIndex == kPortIndexOutput);
+
+                    mPortBuffers[portIndex].clear();
+
+                    Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+                    for (size_t i = 0;; ++i) {
+                        AString name = StringPrintf("buffer-id_%d", i);
+
+                        void *bufferID;
+                        if (!msg->findPointer(name.c_str(), &bufferID)) {
+                            break;
+                        }
+
+                        name = StringPrintf("data_%d", i);
+
+                        sp<RefBase> obj;
+                        CHECK(msg->findObject(name.c_str(), &obj));
+
+                        BufferInfo info;
+                        info.mBufferID = bufferID;
+                        info.mData = static_cast<ABuffer *>(obj.get());
+                        info.mOwnedByClient = false;
+
+                        buffers->push_back(info);
+                    }
+
+                    if (portIndex == kPortIndexOutput) {
+                        if (mState == STARTING) {
+                            // We're always allocating output buffers after
+                            // allocating input buffers, so this is a good
+                            // indication that now all buffers are allocated.
+                            setState(STARTED);
+                            (new AMessage)->postReply(mReplyID);
+                        } else {
+                            mFlags |= kFlagOutputBuffersChanged;
+                        }
+                    }
+                    break;
+                }
+
+                case ACodec::kWhatOutputFormatChanged:
+                {
+                    ALOGV("codec output format changed");
+
+                    if ((mFlags & kFlagIsSoftwareCodec)
+                            && mNativeWindow != NULL) {
+                        AString mime;
+                        CHECK(msg->findString("mime", &mime));
+
+                        if (!strncasecmp("video/", mime.c_str(), 6)) {
+                            delete mSoftRenderer;
+                            mSoftRenderer = NULL;
+
+                            int32_t width, height;
+                            CHECK(msg->findInt32("width", &width));
+                            CHECK(msg->findInt32("height", &height));
+
+                            int32_t colorFormat;
+                            CHECK(msg->findInt32(
+                                        "color-format", &colorFormat));
+
+                            sp<MetaData> meta = new MetaData;
+                            meta->setInt32(kKeyWidth, width);
+                            meta->setInt32(kKeyHeight, height);
+                            meta->setInt32(kKeyColorFormat, colorFormat);
+
+                            mSoftRenderer =
+                                new SoftwareRenderer(mNativeWindow, meta);
+                        }
+                    }
+
+                    mOutputFormat = msg;
+                    mFlags |= kFlagOutputFormatChanged;
+                    break;
+                }
+
+                case ACodec::kWhatFillThisBuffer:
+                {
+                    /* size_t index = */updateBuffers(kPortIndexInput, msg);
+
+                    if (mState == FLUSHING) {
+                        returnBuffersToCodecOnPort(kPortIndexInput);
+                        break;
+                    }
+
+                    if (mFlags & kFlagDequeueInputPending) {
+                        CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
+
+                        ++mDequeueInputTimeoutGeneration;
+                        mFlags &= ~kFlagDequeueInputPending;
+                        mDequeueInputReplyID = 0;
+                    }
+                    break;
+                }
+
+                case ACodec::kWhatDrainThisBuffer:
+                {
+                    /* size_t index = */updateBuffers(kPortIndexOutput, msg);
+
+                    if (mState == FLUSHING) {
+                        returnBuffersToCodecOnPort(kPortIndexOutput);
+                        break;
+                    }
+
+                    sp<RefBase> obj;
+                    CHECK(msg->findObject("buffer", &obj));
+
+                    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
+
+                    int32_t omxFlags;
+                    CHECK(msg->findInt32("flags", &omxFlags));
+
+                    buffer->meta()->setInt32("omxFlags", omxFlags);
+
+                    if (mFlags & kFlagDequeueOutputPending) {
+                        CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
+
+                        ++mDequeueOutputTimeoutGeneration;
+                        mFlags &= ~kFlagDequeueOutputPending;
+                        mDequeueOutputReplyID = 0;
+                    }
+                    break;
+                }
+
+                case ACodec::kWhatEOS:
+                {
+                    // We already notify the client of this by using the
+                    // corresponding flag in "onOutputBufferReady".
+                    break;
+                }
+
+                case ACodec::kWhatShutdownCompleted:
+                {
+                    CHECK_EQ(mState, STOPPING);
+                    setState(UNINITIALIZED);
+
+                    (new AMessage)->postReply(mReplyID);
+                    break;
+                }
+
+                case ACodec::kWhatFlushCompleted:
+                {
+                    CHECK_EQ(mState, FLUSHING);
+                    setState(STARTED);
+
+                    mCodec->signalResume();
+
+                    (new AMessage)->postReply(mReplyID);
+                    break;
+                }
+
+                default:
+                    TRESPASS();
+            }
+            break;
+        }
+
+        case kWhatInit:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != UNINITIALIZED) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            mReplyID = replyID;
+            setState(INITIALIZING);
+
+            AString name;
+            CHECK(msg->findString("name", &name));
+
+            int32_t nameIsType;
+            int32_t encoder = false;
+            if (!msg->findInt32("nameIsType", &nameIsType)) {
+                nameIsType = false;
+            } else {
+                CHECK(msg->findInt32("encoder", &encoder));
+            }
+
+            sp<AMessage> format = new AMessage;
+
+            if (nameIsType) {
+                format->setString("mime", name.c_str());
+                format->setInt32("encoder", encoder);
+            } else {
+                format->setString("componentName", name.c_str());
+            }
+
+            mCodec->initiateAllocateComponent(format);
+            break;
+        }
+
+        case kWhatConfigure:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != INITIALIZED) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            mReplyID = replyID;
+            setState(CONFIGURING);
+
+            sp<RefBase> obj;
+            if (!msg->findObject("native-window", &obj)) {
+                obj.clear();
+            }
+
+            sp<AMessage> format;
+            CHECK(msg->findMessage("format", &format));
+
+            if (obj != NULL) {
+                format->setObject("native-window", obj);
+            }
+
+            uint32_t flags;
+            CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+            if (flags & CONFIGURE_FLAG_ENCODE) {
+                format->setInt32("encoder", true);
+            }
+
+            mCodec->initiateConfigureComponent(format);
+            break;
+        }
+
+        case kWhatStart:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != CONFIGURED) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            mReplyID = replyID;
+            setState(STARTING);
+
+            mCodec->initiateStart();
+            break;
+        }
+
+        case kWhatStop:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != INITIALIZED
+                    && mState != CONFIGURED && mState != STARTED) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            mReplyID = replyID;
+            setState(STOPPING);
+
+            mCodec->initiateShutdown();
+            returnBuffersToCodec();
+            break;
+        }
+
+        case kWhatDequeueInputBuffer:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (handleDequeueInputBuffer(replyID, true /* new request */)) {
+                break;
+            }
+
+            int64_t timeoutUs;
+            CHECK(msg->findInt64("timeoutUs", &timeoutUs));
+
+            if (timeoutUs == 0ll) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", -EAGAIN);
+                response->postReply(replyID);
+                break;
+            }
+
+            mFlags |= kFlagDequeueInputPending;
+            mDequeueInputReplyID = replyID;
+
+            if (timeoutUs > 0ll) {
+                sp<AMessage> timeoutMsg =
+                    new AMessage(kWhatDequeueInputTimedOut, id());
+                timeoutMsg->setInt32(
+                        "generation", ++mDequeueInputTimeoutGeneration);
+                timeoutMsg->post(timeoutUs);
+            }
+            break;
+        }
+
+        case kWhatDequeueInputTimedOut:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mDequeueInputTimeoutGeneration) {
+                // Obsolete
+                break;
+            }
+
+            CHECK(mFlags & kFlagDequeueInputPending);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", -EAGAIN);
+            response->postReply(mDequeueInputReplyID);
+
+            mFlags &= ~kFlagDequeueInputPending;
+            mDequeueInputReplyID = 0;
+            break;
+        }
+
+        case kWhatQueueInputBuffer:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != STARTED || (mFlags & kFlagStickyError)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            status_t err = onQueueInputBuffer(msg);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatDequeueOutputBuffer:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
+                break;
+            }
+
+            int64_t timeoutUs;
+            CHECK(msg->findInt64("timeoutUs", &timeoutUs));
+
+            if (timeoutUs == 0ll) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", -EAGAIN);
+                response->postReply(replyID);
+                break;
+            }
+
+            mFlags |= kFlagDequeueOutputPending;
+            mDequeueOutputReplyID = replyID;
+
+            if (timeoutUs > 0ll) {
+                sp<AMessage> timeoutMsg =
+                    new AMessage(kWhatDequeueOutputTimedOut, id());
+                timeoutMsg->setInt32(
+                        "generation", ++mDequeueOutputTimeoutGeneration);
+                timeoutMsg->post(timeoutUs);
+            }
+            break;
+        }
+
+        case kWhatDequeueOutputTimedOut:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mDequeueOutputTimeoutGeneration) {
+                // Obsolete
+                break;
+            }
+
+            CHECK(mFlags & kFlagDequeueOutputPending);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", -EAGAIN);
+            response->postReply(mDequeueOutputReplyID);
+
+            mFlags &= ~kFlagDequeueOutputPending;
+            mDequeueOutputReplyID = 0;
+            break;
+        }
+
+        case kWhatReleaseOutputBuffer:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != STARTED || (mFlags & kFlagStickyError)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            status_t err = onReleaseOutputBuffer(msg);
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatGetBuffers:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != STARTED || (mFlags & kFlagStickyError)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            int32_t portIndex;
+            CHECK(msg->findInt32("portIndex", &portIndex));
+
+            Vector<sp<ABuffer> > *dstBuffers;
+            CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
+
+            dstBuffers->clear();
+            const Vector<BufferInfo> &srcBuffers = mPortBuffers[portIndex];
+
+            for (size_t i = 0; i < srcBuffers.size(); ++i) {
+                const BufferInfo &info = srcBuffers.itemAt(i);
+
+                dstBuffers->push_back(info.mData);
+            }
+
+            (new AMessage)->postReply(replyID);
+            break;
+        }
+
+        case kWhatFlush:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if (mState != STARTED || (mFlags & kFlagStickyError)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            mReplyID = replyID;
+            setState(FLUSHING);
+
+            mCodec->signalFlush();
+            returnBuffersToCodec();
+            break;
+        }
+
+        case kWhatGetOutputFormat:
+        {
+            uint32_t replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            if ((mState != STARTED && mState != FLUSHING)
+                    || (mFlags & kFlagStickyError)) {
+                sp<AMessage> response = new AMessage;
+                response->setInt32("err", INVALID_OPERATION);
+
+                response->postReply(replyID);
+                break;
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setMessage("format", mOutputFormat);
+            response->postReply(replyID);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void MediaCodec::setState(State newState) {
+    if (newState == UNINITIALIZED) {
+        delete mSoftRenderer;
+        mSoftRenderer = NULL;
+
+        mNativeWindow.clear();
+
+        mOutputFormat.clear();
+        mFlags &= ~kFlagOutputFormatChanged;
+        mFlags &= ~kFlagOutputBuffersChanged;
+        mFlags &= ~kFlagStickyError;
+    }
+
+    mState = newState;
+
+    cancelPendingDequeueOperations();
+}
+
+void MediaCodec::returnBuffersToCodec() {
+    returnBuffersToCodecOnPort(kPortIndexInput);
+    returnBuffersToCodecOnPort(kPortIndexOutput);
+}
+
+void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+    for (size_t i = 0; i < buffers->size(); ++i) {
+        BufferInfo *info = &buffers->editItemAt(i);
+
+        if (info->mNotify != NULL) {
+            sp<AMessage> msg = info->mNotify;
+            info->mNotify = NULL;
+            info->mOwnedByClient = false;
+
+            if (portIndex == kPortIndexInput) {
+                msg->setInt32("err", ERROR_END_OF_STREAM);
+            }
+            msg->post();
+        }
+    }
+
+    mAvailPortBuffers[portIndex].clear();
+}
+
+size_t MediaCodec::updateBuffers(
+        int32_t portIndex, const sp<AMessage> &msg) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    void *bufferID;
+    CHECK(msg->findPointer("buffer-id", &bufferID));
+
+    Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+    for (size_t i = 0; i < buffers->size(); ++i) {
+        BufferInfo *info = &buffers->editItemAt(i);
+
+        if (info->mBufferID == bufferID) {
+            CHECK(info->mNotify == NULL);
+            CHECK(msg->findMessage("reply", &info->mNotify));
+
+            mAvailPortBuffers[portIndex].push_back(i);
+
+            return i;
+        }
+    }
+
+    TRESPASS();
+
+    return 0;
+}
+
+status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
+    size_t index;
+    size_t offset;
+    size_t size;
+    int64_t timeUs;
+    uint32_t flags;
+    CHECK(msg->findSize("index", &index));
+    CHECK(msg->findSize("offset", &offset));
+    CHECK(msg->findSize("size", &size));
+    CHECK(msg->findInt64("timeUs", &timeUs));
+    CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+    if (index >= mPortBuffers[kPortIndexInput].size()) {
+        return -ERANGE;
+    }
+
+    BufferInfo *info = &mPortBuffers[kPortIndexInput].editItemAt(index);
+
+    if (info->mNotify == NULL || !info->mOwnedByClient) {
+        return -EACCES;
+    }
+
+    if (offset + size > info->mData->capacity()) {
+        return -EINVAL;
+    }
+
+    sp<AMessage> reply = info->mNotify;
+    info->mNotify = NULL;
+    info->mOwnedByClient = false;
+
+    info->mData->setRange(offset, size);
+    info->mData->meta()->setInt64("timeUs", timeUs);
+
+    if (flags & BUFFER_FLAG_EOS) {
+        info->mData->meta()->setInt32("eos", true);
+    }
+
+    if (flags & BUFFER_FLAG_CODECCONFIG) {
+        info->mData->meta()->setInt32("csd", true);
+    }
+
+    reply->setObject("buffer", info->mData);
+    reply->post();
+
+    return OK;
+}
+
+status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
+    size_t index;
+    CHECK(msg->findSize("index", &index));
+
+    int32_t render;
+    if (!msg->findInt32("render", &render)) {
+        render = 0;
+    }
+
+    if (mState != STARTED) {
+        return -EINVAL;
+    }
+
+    if (index >= mPortBuffers[kPortIndexOutput].size()) {
+        return -ERANGE;
+    }
+
+    BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
+
+    if (info->mNotify == NULL || !info->mOwnedByClient) {
+        return -EACCES;
+    }
+
+    if (render) {
+        info->mNotify->setInt32("render", true);
+
+        if (mSoftRenderer != NULL) {
+            mSoftRenderer->render(
+                    info->mData->data(), info->mData->size(), NULL);
+        }
+    }
+
+    info->mNotify->post();
+    info->mNotify = NULL;
+    info->mOwnedByClient = false;
+
+    return OK;
+}
+
+ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
+    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+
+    List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
+
+    if (availBuffers->empty()) {
+        return -EAGAIN;
+    }
+
+    size_t index = *availBuffers->begin();
+    availBuffers->erase(availBuffers->begin());
+
+    BufferInfo *info = &mPortBuffers[portIndex].editItemAt(index);
+    CHECK(!info->mOwnedByClient);
+    info->mOwnedByClient = true;
+
+    return index;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
new file mode 100644
index 0000000..f2f8436
--- /dev/null
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -0,0 +1,433 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuMediaExtractor"
+#include <utils/Log.h>
+
+#include <media/stagefright/NuMediaExtractor.h>
+
+#include "include/ESDS.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+NuMediaExtractor::NuMediaExtractor() {
+}
+
+NuMediaExtractor::~NuMediaExtractor() {
+    releaseTrackSamples();
+
+    for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+        TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+        CHECK_EQ((status_t)OK, info->mSource->stop());
+    }
+
+    mSelectedTracks.clear();
+}
+
+status_t NuMediaExtractor::setDataSource(const char *path) {
+    sp<DataSource> dataSource = DataSource::CreateFromURI(path);
+
+    if (dataSource == NULL) {
+        return -ENOENT;
+    }
+
+    mImpl = MediaExtractor::Create(dataSource);
+
+    if (mImpl == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    return OK;
+}
+
+size_t NuMediaExtractor::countTracks() const {
+    return mImpl == NULL ? 0 : mImpl->countTracks();
+}
+
+status_t NuMediaExtractor::getTrackFormat(
+        size_t index, sp<AMessage> *format) const {
+    *format = NULL;
+
+    if (mImpl == NULL) {
+        return -EINVAL;
+    }
+
+    if (index >= mImpl->countTracks()) {
+        return -ERANGE;
+    }
+
+    sp<MetaData> meta = mImpl->getTrackMetaData(index);
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    sp<AMessage> msg = new AMessage;
+    msg->setString("mime", mime);
+
+    if (!strncasecmp("video/", mime, 6)) {
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        msg->setInt32("width", width);
+        msg->setInt32("height", height);
+    } else {
+        CHECK(!strncasecmp("audio/", mime, 6));
+
+        int32_t numChannels, sampleRate;
+        CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+        CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+        msg->setInt32("channel-count", numChannels);
+        msg->setInt32("sample-rate", sampleRate);
+    }
+
+    int32_t maxInputSize;
+    if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+        msg->setInt32("max-input-size", maxInputSize);
+    }
+
+    uint32_t type;
+    const void *data;
+    size_t size;
+    if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+        // Parse the AVCDecoderConfigurationRecord
+
+        const uint8_t *ptr = (const uint8_t *)data;
+
+        CHECK(size >= 7);
+        CHECK_EQ((unsigned)ptr[0], 1u);  // configurationVersion == 1
+        uint8_t profile = ptr[1];
+        uint8_t level = ptr[3];
+
+        // There is decodable content out there that fails the following
+        // assertion, let's be lenient for now...
+        // CHECK((ptr[4] >> 2) == 0x3f);  // reserved
+
+        size_t lengthSize = 1 + (ptr[4] & 3);
+
+        // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+        // violates it...
+        // CHECK((ptr[5] >> 5) == 7);  // reserved
+
+        size_t numSeqParameterSets = ptr[5] & 31;
+
+        ptr += 6;
+        size -= 6;
+
+        sp<ABuffer> buffer = new ABuffer(1024);
+        buffer->setRange(0, 0);
+
+        for (size_t i = 0; i < numSeqParameterSets; ++i) {
+            CHECK(size >= 2);
+            size_t length = U16_AT(ptr);
+
+            ptr += 2;
+            size -= 2;
+
+            CHECK(size >= length);
+
+            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+            memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+            buffer->setRange(0, buffer->size() + 4 + length);
+
+            ptr += length;
+            size -= length;
+        }
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+
+        msg->setObject("csd-0", buffer);
+
+        buffer = new ABuffer(1024);
+        buffer->setRange(0, 0);
+
+        CHECK(size >= 1);
+        size_t numPictureParameterSets = *ptr;
+        ++ptr;
+        --size;
+
+        for (size_t i = 0; i < numPictureParameterSets; ++i) {
+            CHECK(size >= 2);
+            size_t length = U16_AT(ptr);
+
+            ptr += 2;
+            size -= 2;
+
+            CHECK(size >= length);
+
+            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+            memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+            buffer->setRange(0, buffer->size() + 4 + length);
+
+            ptr += length;
+            size -= length;
+        }
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setObject("csd-1", buffer);
+    } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+        ESDS esds((const char *)data, size);
+        CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+        const void *codec_specific_data;
+        size_t codec_specific_data_size;
+        esds.getCodecSpecificInfo(
+                &codec_specific_data, &codec_specific_data_size);
+
+        sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+        memcpy(buffer->data(), codec_specific_data,
+               codec_specific_data_size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setObject("csd-0", buffer);
+    } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
+        sp<ABuffer> buffer = new ABuffer(size);
+        memcpy(buffer->data(), data, size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setObject("csd-0", buffer);
+
+        if (!meta->findData(kKeyVorbisBooks, &type, &data, &size)) {
+            return -EINVAL;
+        }
+
+        buffer = new ABuffer(size);
+        memcpy(buffer->data(), data, size);
+
+        buffer->meta()->setInt32("csd", true);
+        buffer->meta()->setInt64("timeUs", 0);
+        msg->setObject("csd-1", buffer);
+    }
+
+    *format = msg;
+
+    return OK;
+}
+
+status_t NuMediaExtractor::selectTrack(size_t index) {
+    if (mImpl == NULL) {
+        return -EINVAL;
+    }
+
+    if (index >= mImpl->countTracks()) {
+        return -ERANGE;
+    }
+
+    for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+        TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+        if (info->mTrackIndex == index) {
+            // This track has already been selected.
+            return OK;
+        }
+    }
+
+    sp<MediaSource> source = mImpl->getTrack(index);
+
+    CHECK_EQ((status_t)OK, source->start());
+
+    mSelectedTracks.push();
+    TrackInfo *info = &mSelectedTracks.editItemAt(mSelectedTracks.size() - 1);
+
+    info->mSource = source;
+    info->mTrackIndex = index;
+    info->mFinalResult = OK;
+    info->mSample = NULL;
+    info->mSampleTimeUs = -1ll;
+    info->mFlags = 0;
+
+    const char *mime;
+    CHECK(source->getFormat()->findCString(kKeyMIMEType, &mime));
+
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+        info->mFlags |= kIsVorbis;
+    }
+
+    return OK;
+}
+
+void NuMediaExtractor::releaseTrackSamples() {
+    for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+        TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+        if (info->mSample != NULL) {
+            info->mSample->release();
+            info->mSample = NULL;
+
+            info->mSampleTimeUs = -1ll;
+        }
+    }
+}
+
+ssize_t NuMediaExtractor::fetchTrackSamples(int64_t seekTimeUs) {
+    TrackInfo *minInfo = NULL;
+    ssize_t minIndex = -1;
+
+    for (size_t i = 0; i < mSelectedTracks.size(); ++i) {
+        TrackInfo *info = &mSelectedTracks.editItemAt(i);
+
+        if (seekTimeUs >= 0ll) {
+            info->mFinalResult = OK;
+
+            if (info->mSample != NULL) {
+                info->mSample->release();
+                info->mSample = NULL;
+                info->mSampleTimeUs = -1ll;
+            }
+        } else if (info->mFinalResult != OK) {
+            continue;
+        }
+
+        if (info->mSample == NULL) {
+            MediaSource::ReadOptions options;
+            if (seekTimeUs >= 0ll) {
+                options.setSeekTo(seekTimeUs);
+            }
+            status_t err = info->mSource->read(&info->mSample, &options);
+
+            if (err != OK) {
+                CHECK(info->mSample == NULL);
+
+                info->mFinalResult = err;
+                info->mSampleTimeUs = -1ll;
+                continue;
+            } else {
+                CHECK(info->mSample != NULL);
+                CHECK(info->mSample->meta_data()->findInt64(
+                            kKeyTime, &info->mSampleTimeUs));
+            }
+        }
+
+        if (minInfo == NULL  || info->mSampleTimeUs < minInfo->mSampleTimeUs) {
+            minInfo = info;
+            minIndex = i;
+        }
+    }
+
+    return minIndex;
+}
+
+status_t NuMediaExtractor::seekTo(int64_t timeUs) {
+    return fetchTrackSamples(timeUs);
+}
+
+status_t NuMediaExtractor::advance() {
+    ssize_t minIndex = fetchTrackSamples();
+
+    if (minIndex < 0) {
+        return ERROR_END_OF_STREAM;
+    }
+
+    TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+
+    info->mSample->release();
+    info->mSample = NULL;
+    info->mSampleTimeUs = -1ll;
+
+    return OK;
+}
+
+status_t NuMediaExtractor::readSampleData(const sp<ABuffer> &buffer) {
+    ssize_t minIndex = fetchTrackSamples();
+
+    if (minIndex < 0) {
+        return ERROR_END_OF_STREAM;
+    }
+
+    TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+
+    size_t sampleSize = info->mSample->range_length();
+
+    if (info->mFlags & kIsVorbis) {
+        // Each sample's data is suffixed by the number of page samples
+        // or -1 if not available.
+        sampleSize += sizeof(int32_t);
+    }
+
+    if (buffer->capacity() < sampleSize) {
+        return -ENOMEM;
+    }
+
+    const uint8_t *src =
+        (const uint8_t *)info->mSample->data()
+            + info->mSample->range_offset();
+
+    memcpy((uint8_t *)buffer->data(), src, info->mSample->range_length());
+
+    if (info->mFlags & kIsVorbis) {
+        int32_t numPageSamples;
+        if (!info->mSample->meta_data()->findInt32(
+                    kKeyValidSamples, &numPageSamples)) {
+            numPageSamples = -1;
+        }
+
+        memcpy((uint8_t *)buffer->data() + info->mSample->range_length(),
+               &numPageSamples,
+               sizeof(numPageSamples));
+    }
+
+    buffer->setRange(0, sampleSize);
+
+    return OK;
+}
+
+status_t NuMediaExtractor::getSampleTrackIndex(size_t *trackIndex) {
+    ssize_t minIndex = fetchTrackSamples();
+
+    if (minIndex < 0) {
+        return ERROR_END_OF_STREAM;
+    }
+
+    TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+    *trackIndex = info->mTrackIndex;
+
+    return OK;
+}
+
+status_t NuMediaExtractor::getSampleTime(int64_t *sampleTimeUs) {
+    ssize_t minIndex = fetchTrackSamples();
+
+    if (minIndex < 0) {
+        return ERROR_END_OF_STREAM;
+    }
+
+    TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);
+    *sampleTimeUs = info->mSampleTimeUs;
+
+    return OK;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 470f750..1325462 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -1541,6 +1541,8 @@
             "video_decoder.mpeg4", "video_encoder.mpeg4" },
         { MEDIA_MIMETYPE_VIDEO_H263,
             "video_decoder.h263", "video_encoder.h263" },
+        { MEDIA_MIMETYPE_VIDEO_VPX,
+            "video_decoder.vpx", "video_encoder.vpx" },
     };
 
     static const size_t kNumMimeToRole =
@@ -3556,6 +3558,7 @@
         //////////////// output port ////////////////////
         // format
         OMX_AUDIO_PARAM_PORTFORMATTYPE format;
+        InitOMXParams(&format);
         format.nPortIndex = kPortIndexOutput;
         format.nIndex = 0;
         status_t err = OMX_ErrorNone;
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 0a6776e..0d6e07b 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -542,4 +542,20 @@
     }
 }
 
+size_t AMessage::countEntries() const {
+    return mNumItems;
+}
+
+const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
+    if (index >= mNumItems) {
+        *type = kTypeInt32;
+
+        return NULL;
+    }
+
+    *type = mItems[index].mType;
+
+    return mItems[index].mName;
+}
+
 }  // namespace android