Merge branch 'dev/10/fp2/security-aosp-qt-release' into int/10/fp2

* dev/10/fp2/security-aosp-qt-release:
  move MediaCodec metrics processing to looper thread

Change-Id: I11ab6d19f3c48046724ce01642797787197be951
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index a5937fd..af8ec06 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -37,9 +37,11 @@
 
 namespace.platform.isolated = true
 
-namespace.platform.search.paths = /system/${LIB}
+namespace.platform.search.paths  = /system/${LIB}
+namespace.platform.search.paths += /apex/com.android.runtime/${LIB}
 namespace.platform.asan.search.paths  = /data/asan/system/${LIB}
 namespace.platform.asan.search.paths +=           /system/${LIB}
+namespace.platform.asan.search.paths += /apex/com.android.runtime/${LIB}
 
 # /system/lib/libc.so, etc are symlinks to /apex/com.android.lib/lib/bionic/libc.so, etc.
 # Add /apex/... pat to the permitted paths because linker uses realpath(3)
diff --git a/apex/manifest.json b/apex/manifest.json
index 3011ee8..b11187d 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,4 +1,4 @@
 {
   "name": "com.android.media",
-  "version": 290000000
+  "version": 299900000
 }
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index 83a5178..09c436d 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,4 +1,4 @@
 {
   "name": "com.android.media.swcodec",
-  "version": 290000000
+  "version": 299900000
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index e9dcbdb..47580f8 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -83,4 +83,12 @@
      * can retry after receiving this callback.
      */
     oneway void onCameraAccessPrioritiesChanged();
+
+    /**
+     * Notify registered clients about cameras being opened/closed.
+     * Only clients with android.permission.CAMERA_OPEN_CLOSE_LISTENER permission
+     * will receive such callbacks.
+     */
+    oneway void onCameraOpened(String cameraId, String clientPackageId);
+    oneway void onCameraClosed(String cameraId);
 }
diff --git a/camera/cameraserver/main_cameraserver.cpp b/camera/cameraserver/main_cameraserver.cpp
index 53b3d84..cef8ef5 100644
--- a/camera/cameraserver/main_cameraserver.cpp
+++ b/camera/cameraserver/main_cameraserver.cpp
@@ -34,6 +34,7 @@
     sp<IServiceManager> sm = defaultServiceManager();
     ALOGI("ServiceManager: %p", sm.get());
     CameraService::instantiate();
+    ALOGI("ServiceManager: %p done instantiate", sm.get());
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
 }
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index d6f1412..68db233 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -33,7 +33,9 @@
         dev->unlockDevice();
     }
     // Fire onClosed callback
-    (*mUserSessionCallback.onClosed)(mUserSessionCallback.context, this);
+    if (mUserSessionCallback.onClosed != nullptr) {
+        (*mUserSessionCallback.onClosed)(mUserSessionCallback.context, this);
+    }
     ALOGV("~ACameraCaptureSession: %p is deleted", this);
 }
 
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index d24cb81..46a8dae 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -29,7 +29,7 @@
 #include "ACameraCaptureSession.inc"
 
 ACameraDevice::~ACameraDevice() {
-    mDevice->stopLooper();
+    mDevice->stopLooperAndDisconnect();
 }
 
 namespace android {
@@ -112,19 +112,7 @@
     }
 }
 
-// Device close implementaiton
-CameraDevice::~CameraDevice() {
-    sp<ACameraCaptureSession> session = mCurrentSession.promote();
-    {
-        Mutex::Autolock _l(mDeviceLock);
-        if (!isClosed()) {
-            disconnectLocked(session);
-        }
-        LOG_ALWAYS_FATAL_IF(mCbLooper != nullptr,
-                "CameraDevice looper should've been stopped before ~CameraDevice");
-        mCurrentSession = nullptr;
-    }
-}
+CameraDevice::~CameraDevice() { }
 
 void
 CameraDevice::postSessionMsgAndCleanup(sp<AMessage>& msg) {
@@ -892,8 +880,14 @@
     return;
 }
 
-void CameraDevice::stopLooper() {
+void CameraDevice::stopLooperAndDisconnect() {
     Mutex::Autolock _l(mDeviceLock);
+    sp<ACameraCaptureSession> session = mCurrentSession.promote();
+    if (!isClosed()) {
+        disconnectLocked(session);
+    }
+    mCurrentSession = nullptr;
+
     if (mCbLooper != nullptr) {
       mCbLooper->unregisterHandler(mHandler->id());
       mCbLooper->stop();
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 7a35bf0..6c2ceb3 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -40,6 +40,7 @@
 
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraCaptureSession.h>
+
 #include "ACameraMetadata.h"
 
 namespace android {
@@ -110,7 +111,7 @@
     inline ACameraDevice* getWrapper() const { return mWrapper; };
 
     // Stop the looper thread and unregister the handler
-    void stopLooper();
+    void stopLooperAndDisconnect();
 
   private:
     friend ACameraCaptureSession;
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index 8c1da36..28fec82 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -92,6 +92,12 @@
         }
 
         virtual binder::Status onCameraAccessPrioritiesChanged();
+        virtual binder::Status onCameraOpened(const String16&, const String16&) {
+            return binder::Status::ok();
+        }
+        virtual binder::Status onCameraClosed(const String16&) {
+            return binder::Status::ok();
+        }
 
       private:
         const wp<CameraManagerGlobal> mCameraManager;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 35c8355..e511a3f 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -45,7 +45,7 @@
 using namespace android;
 
 ACameraDevice::~ACameraDevice() {
-    mDevice->stopLooper();
+    mDevice->stopLooperAndDisconnect();
 }
 
 namespace android {
@@ -125,19 +125,7 @@
     }
 }
 
-// Device close implementaiton
-CameraDevice::~CameraDevice() {
-    sp<ACameraCaptureSession> session = mCurrentSession.promote();
-    {
-        Mutex::Autolock _l(mDeviceLock);
-        if (!isClosed()) {
-            disconnectLocked(session);
-        }
-        mCurrentSession = nullptr;
-        LOG_ALWAYS_FATAL_IF(mCbLooper != nullptr,
-            "CameraDevice looper should've been stopped before ~CameraDevice");
-    }
-}
+CameraDevice::~CameraDevice() { }
 
 void
 CameraDevice::postSessionMsgAndCleanup(sp<AMessage>& msg) {
@@ -1388,6 +1376,7 @@
             // before cbh goes out of scope and causing we call the session
             // destructor while holding device lock
             cbh.mSession.clear();
+
             postSessionMsgAndCleanup(msg);
         }
 
@@ -1400,8 +1389,13 @@
     }
 }
 
-void CameraDevice::stopLooper() {
+void CameraDevice::stopLooperAndDisconnect() {
     Mutex::Autolock _l(mDeviceLock);
+    sp<ACameraCaptureSession> session = mCurrentSession.promote();
+    if (!isClosed()) {
+        disconnectLocked(session);
+    }
+    mCurrentSession = nullptr;
     if (mCbLooper != nullptr) {
       mCbLooper->unregisterHandler(mHandler->id());
       mCbLooper->stop();
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 3328a85..7fc699e 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -36,6 +36,7 @@
 
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraCaptureSession.h>
+
 #include "ACameraMetadata.h"
 #include "utils.h"
 
@@ -134,10 +135,11 @@
     inline ACameraDevice* getWrapper() const { return mWrapper; };
 
     // Stop the looper thread and unregister the handler
-    void stopLooper();
+    void stopLooperAndDisconnect();
 
   private:
     friend ACameraCaptureSession;
+    friend ACameraDevice;
 
     camera_status_t checkCameraClosedOrErrorLocked() const;
 
@@ -387,7 +389,6 @@
             mDevice(new android::acam::CameraDevice(id, cb, std::move(chars), this)) {}
 
     ~ACameraDevice();
-
     /*******************
      * NDK public APIs *
      *******************/
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 37de30a..938b5f5 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -24,6 +24,7 @@
 #include <algorithm>
 #include <mutex>
 #include <string>
+#include <variant>
 #include <vector>
 #include <stdio.h>
 #include <stdio.h>
@@ -49,6 +50,7 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using ConfiguredWindows = std::set<native_handle_t *>;
 
 class CameraHelper {
    public:
@@ -60,9 +62,12 @@
         const char* physicalCameraId;
         native_handle_t* anw;
     };
-    int initCamera(native_handle_t* imgReaderAnw,
+
+    // Retaining the error code in case the caller needs to analyze it.
+    std::variant<int, ConfiguredWindows> initCamera(native_handle_t* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
             bool usePhysicalSettings) {
+        ConfiguredWindows configuredWindows;
         if (imgReaderAnw == nullptr) {
             ALOGE("Cannot initialize camera before image reader get initialized.");
             return -1;
@@ -78,7 +83,7 @@
         ret = ACameraManager_openCamera(mCameraManager, mCameraId, &mDeviceCb, &mDevice);
         if (ret != AMEDIA_OK || mDevice == nullptr) {
             ALOGE("Failed to open camera, ret=%d, mDevice=%p.", ret, mDevice);
-            return -1;
+            return ret;
         }
 
         // Create capture session
@@ -97,8 +102,9 @@
             ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
             return ret;
         }
-
+        configuredWindows.insert(mImgReaderAnw);
         std::vector<const char*> idPointerList;
+        std::set<const native_handle_t*> physicalStreamMap;
         for (auto& physicalStream : physicalImgReaders) {
             ACaptureSessionOutput* sessionOutput = nullptr;
             ret = ACaptureSessionPhysicalOutput_create(physicalStream.anw,
@@ -112,21 +118,25 @@
                 ALOGE("ACaptureSessionOutputContainer_add failed, ret=%d", ret);
                 return ret;
             }
-            mExtraOutputs.push_back(sessionOutput);
+            ret = ACameraDevice_isSessionConfigurationSupported(mDevice, mOutputs);
+            if (ret != ACAMERA_OK && ret != ACAMERA_ERROR_UNSUPPORTED_OPERATION) {
+                ALOGW("ACameraDevice_isSessionConfigurationSupported failed, ret=%d camera id %s",
+                      ret, mCameraId);
+                ACaptureSessionOutputContainer_remove(mOutputs, sessionOutput);
+                ACaptureSessionOutput_free(sessionOutput);
+                continue;
+            }
+            configuredWindows.insert(physicalStream.anw);
             // Assume that at most one physical stream per physical camera.
             mPhysicalCameraIds.push_back(physicalStream.physicalCameraId);
             idPointerList.push_back(physicalStream.physicalCameraId);
+            physicalStreamMap.insert(physicalStream.anw);
+            mSessionPhysicalOutputs.push_back(sessionOutput);
         }
         ACameraIdList cameraIdList;
         cameraIdList.numCameras = idPointerList.size();
         cameraIdList.cameraIds = idPointerList.data();
 
-        ret = ACameraDevice_isSessionConfigurationSupported(mDevice, mOutputs);
-        if (ret != ACAMERA_OK && ret != ACAMERA_ERROR_UNSUPPORTED_OPERATION) {
-            ALOGE("ACameraDevice_isSessionConfigurationSupported failed, ret=%d", ret);
-            return ret;
-        }
-
         ret = ACameraDevice_createCaptureSession(mDevice, mOutputs, &mSessionCb, &mSession);
         if (ret != AMEDIA_OK) {
             ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
@@ -157,6 +167,10 @@
         }
 
         for (auto& physicalStream : physicalImgReaders) {
+            if (physicalStreamMap.find(physicalStream.anw) == physicalStreamMap.end()) {
+                ALOGI("Skipping physicalStream anw=%p", physicalStream.anw);
+                continue;
+            }
             ACameraOutputTarget* outputTarget = nullptr;
             ret = ACameraOutputTarget_create(physicalStream.anw, &outputTarget);
             if (ret != AMEDIA_OK) {
@@ -168,11 +182,11 @@
                 ALOGE("ACaptureRequest_addTarget failed, ret=%d", ret);
                 return ret;
             }
-            mReqExtraOutputs.push_back(outputTarget);
+            mReqPhysicalOutputs.push_back(outputTarget);
         }
 
         mIsCameraReady = true;
-        return 0;
+        return configuredWindows;
     }
 
 
@@ -184,10 +198,10 @@
             ACameraOutputTarget_free(mReqImgReaderOutput);
             mReqImgReaderOutput = nullptr;
         }
-        for (auto& outputTarget : mReqExtraOutputs) {
+        for (auto& outputTarget : mReqPhysicalOutputs) {
             ACameraOutputTarget_free(outputTarget);
         }
-        mReqExtraOutputs.clear();
+        mReqPhysicalOutputs.clear();
         if (mStillRequest) {
             ACaptureRequest_free(mStillRequest);
             mStillRequest = nullptr;
@@ -201,10 +215,10 @@
             ACaptureSessionOutput_free(mImgReaderOutput);
             mImgReaderOutput = nullptr;
         }
-        for (auto& extraOutput : mExtraOutputs) {
+        for (auto& extraOutput : mSessionPhysicalOutputs) {
             ACaptureSessionOutput_free(extraOutput);
         }
-        mExtraOutputs.clear();
+        mSessionPhysicalOutputs.clear();
         if (mOutputs) {
             ACaptureSessionOutputContainer_free(mOutputs);
             mOutputs = nullptr;
@@ -239,21 +253,9 @@
         return true;
     }
 
-    static void onDeviceDisconnected(void* /*obj*/, ACameraDevice* /*device*/) {}
-
-    static void onDeviceError(void* /*obj*/, ACameraDevice* /*device*/, int /*errorCode*/) {}
-
-    static void onSessionClosed(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
-    static void onSessionReady(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
-    static void onSessionActive(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
    private:
-    ACameraDevice_StateCallbacks mDeviceCb{this, onDeviceDisconnected,
-                                           onDeviceError};
-    ACameraCaptureSession_stateCallbacks mSessionCb{
-        this, onSessionClosed, onSessionReady, onSessionActive};
+    ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
+    ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
     native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
 
@@ -262,13 +264,13 @@
     // Capture session
     ACaptureSessionOutputContainer* mOutputs = nullptr;
     ACaptureSessionOutput* mImgReaderOutput = nullptr;
-    std::vector<ACaptureSessionOutput*> mExtraOutputs;
+    std::vector<ACaptureSessionOutput*> mSessionPhysicalOutputs;
 
     ACameraCaptureSession* mSession = nullptr;
     // Capture request
     ACaptureRequest* mStillRequest = nullptr;
     ACameraOutputTarget* mReqImgReaderOutput = nullptr;
-    std::vector<ACameraOutputTarget*> mReqExtraOutputs;
+    std::vector<ACameraOutputTarget*> mReqPhysicalOutputs;
 
     bool mIsCameraReady = false;
     const char* mCameraId;
@@ -581,9 +583,11 @@
         }
 
         CameraHelper cameraHelper(id, mCameraManager);
-        ret = cameraHelper.initCamera(testCase.getNativeWindow(),
-                {}/*physicalImageReaders*/, false/*usePhysicalSettings*/);
-        if (ret < 0) {
+        std::variant<int, ConfiguredWindows> retInit =
+                cameraHelper.initCamera(testCase.getNativeWindow(), {}/*physicalImageReaders*/,
+                                        false/*usePhysicalSettings*/);
+        int *retp = std::get_if<int>(&retInit);
+        if (retp) {
             ALOGE("Unable to initialize camera helper");
             return false;
         }
@@ -751,10 +755,15 @@
         physicalImgReaderInfo.push_back({physicalCameraIds[0], testCases[1]->getNativeWindow()});
         physicalImgReaderInfo.push_back({physicalCameraIds[1], testCases[2]->getNativeWindow()});
 
-        int ret = cameraHelper.initCamera(testCases[0]->getNativeWindow(),
-                physicalImgReaderInfo, usePhysicalSettings);
-        ASSERT_EQ(ret, 0);
-
+        std::variant<int, ConfiguredWindows> retInit =
+                cameraHelper.initCamera(testCases[0]->getNativeWindow(), physicalImgReaderInfo,
+                                        usePhysicalSettings);
+        int *retp = std::get_if<int>(&retInit);
+        ASSERT_EQ(retp, nullptr);
+        ConfiguredWindows *configuredWindowsp = std::get_if<ConfiguredWindows>(&retInit);
+        ASSERT_NE(configuredWindowsp, nullptr);
+        ASSERT_LE(configuredWindowsp->size(), testCases.size());
+        int ret = 0;
         if (!cameraHelper.isCameraReady()) {
             ALOGW("Camera is not ready after successful initialization. It's either due to camera "
                   "on board lacks BACKWARDS_COMPATIBLE capability or the device does not have "
@@ -776,9 +785,15 @@
                 break;
             }
         }
-        ASSERT_EQ(testCases[0]->getAcquiredImageCount(), pictureCount);
-        ASSERT_EQ(testCases[1]->getAcquiredImageCount(), pictureCount);
-        ASSERT_EQ(testCases[2]->getAcquiredImageCount(), pictureCount);
+        for(auto &testCase : testCases) {
+            auto it = configuredWindowsp->find(testCase->getNativeWindow());
+            if (it == configuredWindowsp->end()) {
+                continue;
+            }
+            ALOGI("Testing window %p", testCase->getNativeWindow());
+            ASSERT_EQ(testCase->getAcquiredImageCount(), pictureCount);
+        }
+
         ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));
 
         ACameraMetadata_free(staticMetadata);
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 8fe029a..dc5afc5 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -95,6 +95,17 @@
         return binder::Status::ok();
     }
 
+    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
+            const String16& /*clientPackageName*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
+    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+        // No op
+        return binder::Status::ok();
+    }
+
     bool waitForNumCameras(size_t num) const {
         Mutex::Autolock l(mLock);
 
diff --git a/drm/libmediadrm/IDrm.cpp b/drm/libmediadrm/IDrm.cpp
index 51274d1..c2cc429 100644
--- a/drm/libmediadrm/IDrm.cpp
+++ b/drm/libmediadrm/IDrm.cpp
@@ -1071,6 +1071,7 @@
             Vector<uint8_t> keySetId;
             readVector(data, keySetId);
             DrmPlugin::OfflineLicenseState state;
+            state = DrmPlugin::OfflineLicenseState::kOfflineLicenseStateUnknown;
             status_t result = getOfflineLicenseState(keySetId, &state);
             reply->writeInt32(static_cast<DrmPlugin::OfflineLicenseState>(state));
             reply->writeInt32(result);
diff --git a/drm/mediadrm/plugins/clearkey/default/InitDataParser.cpp b/drm/mediadrm/plugins/clearkey/default/InitDataParser.cpp
index caff393..121a4e2 100644
--- a/drm/mediadrm/plugins/clearkey/default/InitDataParser.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/InitDataParser.cpp
@@ -76,10 +76,21 @@
 
 android::status_t InitDataParser::parsePssh(const Vector<uint8_t>& initData,
         Vector<const uint8_t*>* keyIds) {
+    // Description of PSSH format:
+    // https://w3c.github.io/encrypted-media/format-registry/initdata/cenc.html
     size_t readPosition = 0;
 
-    // Validate size field
     uint32_t expectedSize = initData.size();
+    const char psshIdentifier[4] = {'p', 's', 's', 'h'};
+    const uint8_t psshVersion1[4] = {1, 0, 0, 0};
+    uint32_t keyIdCount = 0;
+    size_t headerSize = sizeof(expectedSize) + sizeof(psshIdentifier) +
+                        sizeof(psshVersion1) + kSystemIdSize + sizeof(keyIdCount);
+    if (initData.size() < headerSize) {
+        return android::ERROR_DRM_CANNOT_HANDLE;
+    }
+
+    // Validate size field
     expectedSize = htonl(expectedSize);
     if (memcmp(&initData[readPosition], &expectedSize,
                sizeof(expectedSize)) != 0) {
@@ -88,7 +99,6 @@
     readPosition += sizeof(expectedSize);
 
     // Validate PSSH box identifier
-    const char psshIdentifier[4] = {'p', 's', 's', 'h'};
     if (memcmp(&initData[readPosition], psshIdentifier,
                sizeof(psshIdentifier)) != 0) {
         return android::ERROR_DRM_CANNOT_HANDLE;
@@ -96,7 +106,6 @@
     readPosition += sizeof(psshIdentifier);
 
     // Validate EME version number
-    const uint8_t psshVersion1[4] = {1, 0, 0, 0};
     if (memcmp(&initData[readPosition], psshVersion1,
                sizeof(psshVersion1)) != 0) {
         return android::ERROR_DRM_CANNOT_HANDLE;
@@ -110,12 +119,14 @@
     readPosition += kSystemIdSize;
 
     // Read key ID count
-    uint32_t keyIdCount;
     memcpy(&keyIdCount, &initData[readPosition], sizeof(keyIdCount));
     keyIdCount = ntohl(keyIdCount);
     readPosition += sizeof(keyIdCount);
-    if (readPosition + ((uint64_t)keyIdCount * kKeyIdSize) !=
-            initData.size() - sizeof(uint32_t)) {
+
+    uint64_t psshSize = 0;
+    if (__builtin_mul_overflow(keyIdCount, kKeyIdSize, &psshSize) ||
+        __builtin_add_overflow(readPosition, psshSize, &psshSize) ||
+        psshSize != initData.size() - sizeof(uint32_t) /* DataSize(0) */) {
         return android::ERROR_DRM_CANNOT_HANDLE;
     }
 
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index 4ed505d..26457be 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -111,6 +111,8 @@
 // The content in this secure stop is implementation dependent, the clearkey
 // secureStop does not serve as a reference implementation.
 void DrmPlugin::installSecureStop(const hidl_vec<uint8_t>& sessionId) {
+    Mutex::Autolock lock(mSecureStopLock);
+
     ClearkeySecureStop clearkeySecureStop;
     clearkeySecureStop.id = uint32ToVector(++mNextSecureStopId);
     clearkeySecureStop.data.assign(sessionId.begin(), sessionId.end());
@@ -757,6 +759,7 @@
 }
 
 Return<void> DrmPlugin::getSecureStops(getSecureStops_cb _hidl_cb) {
+    mSecureStopLock.lock();
     std::vector<SecureStop> stops;
     for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
         ClearkeySecureStop clearkeyStop = itr->second;
@@ -768,26 +771,32 @@
         stop.opaqueData = toHidlVec(stopVec);
         stops.push_back(stop);
     }
+    mSecureStopLock.unlock();
+
     _hidl_cb(Status::OK, stops);
     return Void();
 }
 
 Return<void> DrmPlugin::getSecureStop(const hidl_vec<uint8_t>& secureStopId,
         getSecureStop_cb _hidl_cb) {
-    SecureStop stop;
+    std::vector<uint8_t> stopVec;
+
+    mSecureStopLock.lock();
     auto itr = mSecureStops.find(toVector(secureStopId));
     if (itr != mSecureStops.end()) {
         ClearkeySecureStop clearkeyStop = itr->second;
-        std::vector<uint8_t> stopVec;
         stopVec.insert(stopVec.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
         stopVec.insert(stopVec.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
+    }
+    mSecureStopLock.unlock();
 
+    SecureStop stop;
+    if (!stopVec.empty()) {
         stop.opaqueData = toHidlVec(stopVec);
         _hidl_cb(Status::OK, stop);
     } else {
         _hidl_cb(Status::BAD_VALUE, stop);
     }
-
     return Void();
 }
 
@@ -800,23 +809,35 @@
 }
 
 Return<void> DrmPlugin::getSecureStopIds(getSecureStopIds_cb _hidl_cb) {
+    mSecureStopLock.lock();
     std::vector<SecureStopId> ids;
     for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
         ids.push_back(itr->first);
     }
+    mSecureStopLock.unlock();
 
     _hidl_cb(Status::OK, toHidlVec(ids));
     return Void();
 }
 
 Return<Status> DrmPlugin::releaseSecureStops(const SecureStopRelease& ssRelease) {
-    if (ssRelease.opaqueData.size() == 0) {
+    // OpaqueData starts with 4 byte decimal integer string
+    const size_t kFourBytesOffset = 4;
+    if (ssRelease.opaqueData.size() < kFourBytesOffset) {
+        ALOGE("Invalid secureStopRelease length");
         return Status::BAD_VALUE;
     }
 
     Status status = Status::OK;
     std::vector<uint8_t> input = toVector(ssRelease.opaqueData);
 
+    if (input.size() < kSecureStopIdSize + kFourBytesOffset) {
+        // The minimum size of SecureStopRelease has to contain
+        // a 4 bytes count and one secureStop id
+        ALOGE("Total size of secureStops is too short");
+        return Status::BAD_VALUE;
+    }
+
     // The format of opaqueData is shared between the server
     // and the drm service. The clearkey implementation consists of:
     //    count - number of secure stops
@@ -832,24 +853,35 @@
 
     // Avoid divide by 0 below.
     if (count == 0) {
+        ALOGE("Invalid 0 secureStop count");
         return Status::BAD_VALUE;
     }
 
-    size_t secureStopSize = (input.size() - countBufferSize) / count;
-    uint8_t buffer[secureStopSize];
-    size_t offset = countBufferSize; // skip the count
+    // Computes the fixed length secureStop size
+    size_t secureStopSize = (input.size() - kFourBytesOffset) / count;
+    if (secureStopSize < kSecureStopIdSize) {
+        // A valid secureStop contains the id plus data
+        ALOGE("Invalid secureStop size");
+        return Status::BAD_VALUE;
+    }
+    uint8_t* buffer = new uint8_t[secureStopSize];
+    size_t offset = kFourBytesOffset; // skip the count
     for (size_t i = 0; i < count; ++i, offset += secureStopSize) {
         memcpy(buffer, input.data() + offset, secureStopSize);
-        std::vector<uint8_t> id(buffer, buffer + kSecureStopIdSize);
 
+        // A secureStop contains id+data, we only use the id for removal
+        std::vector<uint8_t> id(buffer, buffer + kSecureStopIdSize);
         status = removeSecureStop(toHidlVec(id));
         if (Status::OK != status) break;
     }
 
+    delete[] buffer;
     return status;
 }
 
 Return<Status> DrmPlugin::removeSecureStop(const hidl_vec<uint8_t>& secureStopId) {
+    Mutex::Autolock lock(mSecureStopLock);
+
     if (1 != mSecureStops.erase(toVector(secureStopId))) {
         return Status::BAD_VALUE;
     }
@@ -857,6 +889,8 @@
 }
 
 Return<Status> DrmPlugin::removeAllSecureStops() {
+    Mutex::Autolock lock(mSecureStopLock);
+
     mSecureStops.clear();
     mNextSecureStopId = kSecureStopIdStart;
     return Status::OK;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index edb71f5..392f105 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -419,6 +419,7 @@
 
     Mutex mFileHandleLock;
     DeviceFiles mFileHandle GUARDED_BY(mFileHandleLock);
+    Mutex mSecureStopLock;
     Mutex mSecurityLevelLock;
 
     CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
diff --git a/include/media/IHDCP.h b/include/media/IHDCP.h
new file mode 120000
index 0000000..9d4568e
--- /dev/null
+++ b/include/media/IHDCP.h
@@ -0,0 +1 @@
+../../media/libmedia/include/media/IHDCP.h
\ No newline at end of file
diff --git a/media/bufferpool/1.0/AccessorImpl.cpp b/media/bufferpool/1.0/AccessorImpl.cpp
index fa17f15..a5366f6 100644
--- a/media/bufferpool/1.0/AccessorImpl.cpp
+++ b/media/bufferpool/1.0/AccessorImpl.cpp
@@ -151,6 +151,7 @@
                 newConnection->initialize(accessor, id);
                 *connection = newConnection;
                 *pConnectionId = id;
+                mBufferPool.mConnectionIds.insert(id);
                 ++sSeqId;
             }
         }
@@ -305,7 +306,12 @@
         found->second->mSenderValidated = true;
         return true;
     }
-    // TODO: verify there is target connection Id
+    if (mConnectionIds.find(message.targetConnectionId) == mConnectionIds.end()) {
+        // N.B: it could be fake or receive connection already closed.
+        ALOGD("bufferpool %p receiver connection %lld is no longer valid",
+              this, (long long)message.targetConnectionId);
+        return false;
+    }
     mStats.onBufferSent();
     mTransactions.insert(std::make_pair(
             message.transactionId,
@@ -450,6 +456,7 @@
             }
         }
     }
+    mConnectionIds.erase(connectionId);
     return true;
 }
 
diff --git a/media/bufferpool/1.0/AccessorImpl.h b/media/bufferpool/1.0/AccessorImpl.h
index c04dbf3..84cb685 100644
--- a/media/bufferpool/1.0/AccessorImpl.h
+++ b/media/bufferpool/1.0/AccessorImpl.h
@@ -94,6 +94,7 @@
 
         std::map<BufferId, std::unique_ptr<InternalBuffer>> mBuffers;
         std::set<BufferId> mFreeBuffers;
+        std::set<ConnectionId> mConnectionIds;
 
         /// Buffer pool statistics which tracks allocation and transfer statistics.
         struct Stats {
diff --git a/media/bufferpool/2.0/AccessorImpl.cpp b/media/bufferpool/2.0/AccessorImpl.cpp
index 94cf006..0d591d7 100644
--- a/media/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/bufferpool/2.0/AccessorImpl.cpp
@@ -37,7 +37,7 @@
     static constexpr int64_t kLogDurationUs = 5000000; // 5 secs
 
     static constexpr size_t kMinAllocBytesForEviction = 1024*1024*15;
-    static constexpr size_t kMinBufferCountForEviction = 40;
+    static constexpr size_t kMinBufferCountForEviction = 25;
 }
 
 // Buffer structure in bufferpool process
@@ -163,6 +163,7 @@
                 *connection = newConnection;
                 *pConnectionId = id;
                 *pMsgId = mBufferPool.mInvalidation.mInvalidationId;
+                mBufferPool.mConnectionIds.insert(id);
                 mBufferPool.mInvalidationChannel.getDesc(invDescPtr);
                 mBufferPool.mInvalidation.onConnect(id, observer);
                 ++sSeqId;
@@ -474,7 +475,12 @@
         found->second->mSenderValidated = true;
         return true;
     }
-    // TODO: verify there is target connection Id
+    if (mConnectionIds.find(message.targetConnectionId) == mConnectionIds.end()) {
+        // N.B: it could be fake or receive connection already closed.
+        ALOGD("bufferpool2 %p receiver connection %lld is no longer valid",
+              this, (long long)message.targetConnectionId);
+        return false;
+    }
     mStats.onBufferSent();
     mTransactions.insert(std::make_pair(
             message.transactionId,
@@ -644,6 +650,7 @@
             }
         }
     }
+    mConnectionIds.erase(connectionId);
     return true;
 }
 
@@ -711,8 +718,8 @@
                   mStats.mTotalFetches, mStats.mTotalTransfers);
         }
         for (auto freeIt = mFreeBuffers.begin(); freeIt != mFreeBuffers.end();) {
-            if (!clearCache && mStats.mSizeCached < kMinAllocBytesForEviction
-                    && mBuffers.size() < kMinBufferCountForEviction) {
+            if (!clearCache && (mStats.mSizeCached < kMinAllocBytesForEviction
+                    || mBuffers.size() < kMinBufferCountForEviction)) {
                 break;
             }
             auto it = mBuffers.find(*freeIt);
@@ -774,11 +781,19 @@
             std::mutex &mutex,
             std::condition_variable &cv,
             bool &ready) {
+    constexpr uint32_t NUM_SPIN_TO_INCREASE_SLEEP = 1024;
+    constexpr uint32_t NUM_SPIN_TO_LOG = 1024*8;
+    constexpr useconds_t MAX_SLEEP_US = 10000;
+    uint32_t numSpin = 0;
+    useconds_t sleepUs = 1;
+
     while(true) {
         std::map<uint32_t, const std::weak_ptr<Accessor::Impl>> copied;
         {
             std::unique_lock<std::mutex> lock(mutex);
             if (!ready) {
+                numSpin = 0;
+                sleepUs = 1;
                 cv.wait(lock);
             }
             copied.insert(accessors.begin(), accessors.end());
@@ -800,9 +815,20 @@
             if (accessors.size() == 0) {
                 ready = false;
             } else {
-                // prevent draining cpu.
+                // TODO Use an efficient way to wait over FMQ.
+                // N.B. Since there is not a efficient way to wait over FMQ,
+                // polling over the FMQ is the current way to prevent draining
+                // CPU.
                 lock.unlock();
-                std::this_thread::yield();
+                ++numSpin;
+                if (numSpin % NUM_SPIN_TO_INCREASE_SLEEP == 0 &&
+                    sleepUs < MAX_SLEEP_US) {
+                    sleepUs *= 10;
+                }
+                if (numSpin % NUM_SPIN_TO_LOG == 0) {
+                    ALOGW("invalidator thread spinning");
+                }
+                ::usleep(sleepUs);
             }
         }
     }
diff --git a/media/bufferpool/2.0/AccessorImpl.h b/media/bufferpool/2.0/AccessorImpl.h
index eea72b9..807e0f1 100644
--- a/media/bufferpool/2.0/AccessorImpl.h
+++ b/media/bufferpool/2.0/AccessorImpl.h
@@ -111,6 +111,7 @@
 
         std::map<BufferId, std::unique_ptr<InternalBuffer>> mBuffers;
         std::set<BufferId> mFreeBuffers;
+        std::set<ConnectionId> mConnectionIds;
 
         struct Invalidation {
             static std::atomic<std::uint32_t> sInvSeqId;
diff --git a/media/bufferpool/2.0/Android.bp b/media/bufferpool/2.0/Android.bp
index c71ac17..e8a69c9 100644
--- a/media/bufferpool/2.0/Android.bp
+++ b/media/bufferpool/2.0/Android.bp
@@ -1,9 +1,5 @@
-cc_library {
-    name: "libstagefright_bufferpool@2.0",
-    vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
+cc_defaults {
+    name: "libstagefright_bufferpool@2.0-default",
     srcs: [
         "Accessor.cpp",
         "AccessorImpl.cpp",
@@ -31,3 +27,23 @@
         "android.hardware.media.bufferpool@2.0",
     ],
 }
+
+cc_library {
+    name: "libstagefright_bufferpool@2.0.1",
+    defaults: ["libstagefright_bufferpool@2.0-default"],
+    vendor_available: true,
+    cflags: [
+        "-DBUFFERPOOL_CLONE_HANDLES",
+    ],
+}
+
+// Deprecated. Do not use. Use libstagefright_bufferpool@2.0.1 instead.
+cc_library {
+    name: "libstagefright_bufferpool@2.0",
+    defaults: ["libstagefright_bufferpool@2.0-default"],
+    vendor_available: true,
+    vndk: {
+        enabled: true,
+    },
+}
+
diff --git a/media/bufferpool/2.0/ClientManager.cpp b/media/bufferpool/2.0/ClientManager.cpp
index c31d313..87ee4e8 100644
--- a/media/bufferpool/2.0/ClientManager.cpp
+++ b/media/bufferpool/2.0/ClientManager.cpp
@@ -351,7 +351,21 @@
         }
         client = it->second;
     }
+#ifdef BUFFERPOOL_CLONE_HANDLES
+    native_handle_t *origHandle;
+    ResultStatus res = client->allocate(params, &origHandle, buffer);
+    if (res != ResultStatus::OK) {
+        return res;
+    }
+    *handle = native_handle_clone(origHandle);
+    if (handle == NULL) {
+        buffer->reset();
+        return ResultStatus::NO_MEMORY;
+    }
+    return ResultStatus::OK;
+#else
     return client->allocate(params, handle, buffer);
+#endif
 }
 
 ResultStatus ClientManager::Impl::receive(
@@ -367,7 +381,22 @@
         }
         client = it->second;
     }
+#ifdef BUFFERPOOL_CLONE_HANDLES
+    native_handle_t *origHandle;
+    ResultStatus res = client->receive(
+            transactionId, bufferId, timestampUs, &origHandle, buffer);
+    if (res != ResultStatus::OK) {
+        return res;
+    }
+    *handle = native_handle_clone(origHandle);
+    if (handle == NULL) {
+        buffer->reset();
+        return ResultStatus::NO_MEMORY;
+    }
+    return ResultStatus::OK;
+#else
     return client->receive(transactionId, bufferId, timestampUs, handle, buffer);
+#endif
 }
 
 ResultStatus ClientManager::Impl::postSend(
diff --git a/media/bufferpool/2.0/include/bufferpool/ClientManager.h b/media/bufferpool/2.0/include/bufferpool/ClientManager.h
index 953c304..24b61f4 100644
--- a/media/bufferpool/2.0/include/bufferpool/ClientManager.h
+++ b/media/bufferpool/2.0/include/bufferpool/ClientManager.h
@@ -104,7 +104,9 @@
     ResultStatus flush(ConnectionId connectionId);
 
     /**
-     * Allocates a buffer from the specified connection.
+     * Allocates a buffer from the specified connection. The output parameter
+     * handle is cloned from the internal handle. So it is safe to use directly,
+     * and it should be deleted and destroyed after use.
      *
      * @param connectionId  The id of the connection.
      * @param params        The allocation parameters.
@@ -123,7 +125,9 @@
                           std::shared_ptr<BufferPoolData> *buffer);
 
     /**
-     * Receives a buffer for the transaction.
+     * Receives a buffer for the transaction. The output parameter handle is
+     * cloned from the internal handle. So it is safe to use directly, and it
+     * should be deleted and destoyed after use.
      *
      * @param connectionId  The id of the receiving connection.
      * @param transactionId The id for the transaction.
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 8e3852c..be52a1d 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -157,9 +157,10 @@
       mSentCodecSpecificData(false),
       mInputTimeSet(false),
       mInputSize(0),
-      mInputTimeUs(0),
+      mNextFrameTimestampUs(0),
       mSignalledError(false),
-      mOutIndex(0u) {
+      mOutIndex(0u),
+      mRemainderLen(0u) {
 }
 
 C2SoftAacEnc::~C2SoftAacEnc() {
@@ -183,8 +184,9 @@
     mSentCodecSpecificData = false;
     mInputTimeSet = false;
     mInputSize = 0u;
-    mInputTimeUs = 0;
+    mNextFrameTimestampUs = 0;
     mSignalledError = false;
+    mRemainderLen = 0;
     return C2_OK;
 }
 
@@ -201,7 +203,7 @@
     mSentCodecSpecificData = false;
     mInputTimeSet = false;
     mInputSize = 0u;
-    mInputTimeUs = 0;
+    mNextFrameTimestampUs = 0;
     return C2_OK;
 }
 
@@ -365,21 +367,25 @@
         capacity = view.capacity();
     }
     if (!mInputTimeSet && capacity > 0) {
-        mInputTimeUs = work->input.ordinal.timestamp;
+        mNextFrameTimestampUs = work->input.ordinal.timestamp;
         mInputTimeSet = true;
     }
 
-    size_t numFrames = (capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
-            / mNumBytesPerInputFrame;
-    ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu mNumBytesPerInputFrame = %u",
-          capacity, mInputSize, numFrames, mNumBytesPerInputFrame);
+    size_t numFrames =
+        (mRemainderLen + capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
+        / mNumBytesPerInputFrame;
+    ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu "
+          "mNumBytesPerInputFrame = %u inputTS = %lld remaining = %zu",
+          capacity, mInputSize, numFrames,
+          mNumBytesPerInputFrame, work->input.ordinal.timestamp.peekll(),
+          mRemainderLen);
 
     std::shared_ptr<C2LinearBlock> block;
-    std::shared_ptr<C2Buffer> buffer;
     std::unique_ptr<C2WriteView> wView;
     uint8_t *outPtr = temp;
     size_t outAvailable = 0u;
     uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
+    size_t bytesPerSample = channelCount * sizeof(int16_t);
 
     AACENC_InArgs inargs;
     AACENC_OutArgs outargs;
@@ -442,9 +448,31 @@
         const std::shared_ptr<C2Buffer> mBuffer;
     };
 
-    C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
+    struct OutputBuffer {
+        std::shared_ptr<C2Buffer> buffer;
+        c2_cntr64_t timestampUs;
+    };
+    std::list<OutputBuffer> outputBuffers;
 
-    while (encoderErr == AACENC_OK && inargs.numInSamples > 0) {
+    if (mRemainderLen > 0) {
+        size_t offset = 0;
+        for (; mRemainderLen < bytesPerSample && offset < capacity; ++offset) {
+            mRemainder[mRemainderLen++] = data[offset];
+        }
+        data += offset;
+        capacity -= offset;
+        if (mRemainderLen == bytesPerSample) {
+            inBuffer[0] = mRemainder;
+            inBufferSize[0] = bytesPerSample;
+            inargs.numInSamples = channelCount;
+            mRemainderLen = 0;
+            ALOGV("Processing remainder");
+        } else {
+            // We have exhausted the input already
+            inargs.numInSamples = 0;
+        }
+    }
+    while (encoderErr == AACENC_OK && inargs.numInSamples >= channelCount) {
         if (numFrames && !block) {
             C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
             // TODO: error handling, proper usage, etc.
@@ -473,43 +501,40 @@
                                   &outargs);
 
         if (encoderErr == AACENC_OK) {
-            if (buffer) {
-                outOrdinal.frameIndex = mOutIndex++;
-                outOrdinal.timestamp = mInputTimeUs;
-                cloneAndSend(
-                        inputIndex,
-                        work,
-                        FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
-                buffer.reset();
-            }
-
             if (outargs.numOutBytes > 0) {
                 mInputSize = 0;
                 int consumed = (capacity / sizeof(int16_t)) - inargs.numInSamples
                         + outargs.numInSamples;
-                mInputTimeUs = work->input.ordinal.timestamp
+                c2_cntr64_t currentFrameTimestampUs = mNextFrameTimestampUs;
+                mNextFrameTimestampUs = work->input.ordinal.timestamp
                         + (consumed * 1000000ll / channelCount / sampleRate);
-                buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+                std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
+#if 0
                 hexdump(outPtr, std::min(outargs.numOutBytes, 256));
 #endif
                 outPtr = temp;
                 outAvailable = 0;
                 block.reset();
+
+                outputBuffers.push_back({buffer, currentFrameTimestampUs});
             } else {
                 mInputSize += outargs.numInSamples * sizeof(int16_t);
             }
 
-            if (outargs.numInSamples > 0) {
+            if (inBuffer[0] == mRemainder) {
+                inBuffer[0] = const_cast<uint8_t *>(data);
+                inBufferSize[0] = capacity;
+                inargs.numInSamples = capacity / sizeof(int16_t);
+            } else if (outargs.numInSamples > 0) {
                 inBuffer[0] = (int16_t *)inBuffer[0] + outargs.numInSamples;
                 inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t);
                 inargs.numInSamples -= outargs.numInSamples;
             }
         }
-        ALOGV("encoderErr = %d mInputSize = %zu inargs.numInSamples = %d, mInputTimeUs = %lld",
-              encoderErr, mInputSize, inargs.numInSamples, mInputTimeUs.peekll());
+        ALOGV("encoderErr = %d mInputSize = %zu "
+              "inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
+              encoderErr, mInputSize, inargs.numInSamples, mNextFrameTimestampUs.peekll());
     }
-
     if (eos && inBufferSize[0] > 0) {
         if (numFrames && !block) {
             C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
@@ -540,12 +565,37 @@
                            &outBufDesc,
                            &inargs,
                            &outargs);
+        inBufferSize[0] = 0;
     }
 
-    outOrdinal.frameIndex = mOutIndex++;
-    outOrdinal.timestamp = mInputTimeUs;
+    if (inBufferSize[0] > 0) {
+        for (size_t i = 0; i < inBufferSize[0]; ++i) {
+            mRemainder[i] = static_cast<uint8_t *>(inBuffer[0])[i];
+        }
+        mRemainderLen = inBufferSize[0];
+    }
+
+    while (outputBuffers.size() > 1) {
+        const OutputBuffer& front = outputBuffers.front();
+        C2WorkOrdinalStruct ordinal = work->input.ordinal;
+        ordinal.frameIndex = mOutIndex++;
+        ordinal.timestamp = front.timestampUs;
+        cloneAndSend(
+                inputIndex,
+                work,
+                FillWork(C2FrameData::FLAG_INCOMPLETE, ordinal, front.buffer));
+        outputBuffers.pop_front();
+    }
+    std::shared_ptr<C2Buffer> buffer;
+    C2WorkOrdinalStruct ordinal = work->input.ordinal;
+    ordinal.frameIndex = mOutIndex++;
+    if (!outputBuffers.empty()) {
+        ordinal.timestamp = outputBuffers.front().timestampUs;
+        buffer = outputBuffers.front().buffer;
+    }
+    // Mark the end of frame
     FillWork((C2FrameData::flags_t)(eos ? C2FrameData::FLAG_END_OF_STREAM : 0),
-             outOrdinal, buffer)(work);
+             ordinal, buffer)(work);
 }
 
 c2_status_t C2SoftAacEnc::drain(
@@ -569,7 +619,7 @@
     mSentCodecSpecificData = false;
     mInputTimeSet = false;
     mInputSize = 0u;
-    mInputTimeUs = 0;
+    mNextFrameTimestampUs = 0;
 
     // TODO: we don't have any pending work at this time to drain.
     return C2_OK;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
index a38be19..6ecfbdd 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.h
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -56,11 +56,15 @@
     bool mSentCodecSpecificData;
     bool mInputTimeSet;
     size_t mInputSize;
-    c2_cntr64_t mInputTimeUs;
+    c2_cntr64_t mNextFrameTimestampUs;
 
     bool mSignalledError;
     std::atomic_uint64_t mOutIndex;
 
+    // We support max 6 channels
+    uint8_t mRemainder[6 * sizeof(int16_t)];
+    size_t mRemainderLen;
+
     status_t initEncoder();
 
     status_t setAudioParams();
diff --git a/media/codec2/components/aom/Android.bp b/media/codec2/components/aom/Android.bp
index 0fabf5c..61dbd4c 100644
--- a/media/codec2/components/aom/Android.bp
+++ b/media/codec2/components/aom/Android.bp
@@ -1,10 +1,16 @@
 cc_library_shared {
-    name: "libcodec2_soft_av1dec",
+    name: "libcodec2_soft_av1dec_aom",
     defaults: [
         "libcodec2_soft-defaults",
         "libcodec2_soft_sanitize_all-defaults",
     ],
 
+    // coordinated with frameworks/av/media/codec2/components/gav1/Android.bp
+    // so only 1 of them has the official c2.android.av1.decoder name
+    cflags: [
+        "-DCODECNAME=\"c2.android.av1-aom.decoder\"",
+    ],
+
     srcs: ["C2SoftAomDec.cpp"],
     static_libs: ["libaom"],
 
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index 769895c..36137e6 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -29,7 +29,8 @@
 
 namespace android {
 
-constexpr char COMPONENT_NAME[] = "c2.android.av1.decoder";
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
 
 class C2SoftAomDec::IntfImpl : public SimpleInterface<void>::BaseParams {
   public:
@@ -340,6 +341,7 @@
     aom_codec_flags_t flags;
     memset(&flags, 0, sizeof(aom_codec_flags_t));
 
+    ALOGV("Using libaom AV1 software decoder.");
     aom_codec_err_t err;
     if ((err = aom_codec_dec_init(mCodecCtx, aom_codec_av1_dx(), &cfg, 0))) {
         ALOGE("av1 decoder failed to initialize. (%d)", err);
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 3f015b4..fa98178 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -33,7 +33,8 @@
 namespace {
 
 constexpr char COMPONENT_NAME[] = "c2.android.avc.decoder";
-
+constexpr uint32_t kDefaultOutputDelay = 8;
+constexpr uint32_t kMaxOutputDelay = 16;
 }  // namespace
 
 class C2SoftAvcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -54,7 +55,9 @@
         // TODO: Proper support for reorder depth.
         addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
-                .withConstValue(new C2PortActualDelayTuning::output(8u))
+                .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         // TODO: output latency and reordering
@@ -196,7 +199,6 @@
                                      0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
                 .build());
     }
-
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
                           C2P<C2StreamPictureSizeInfo::output> &me) {
         (void)mayBlock;
@@ -333,6 +335,7 @@
       mDecHandle(nullptr),
       mOutBufferFlush(nullptr),
       mIvColorFormat(IV_YUV_420P),
+      mOutputDelay(kDefaultOutputDelay),
       mWidth(320),
       mHeight(240),
       mHeaderDecoded(false),
@@ -882,6 +885,26 @@
             work->result = C2_CORRUPTED;
             return;
         }
+        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
+            mOutputDelay = s_decode_op.i4_reorder_depth;
+            ALOGV("New Output delay %d ", mOutputDelay);
+
+            C2PortActualDelayTuning::output outputDelay(mOutputDelay);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err =
+                mIntf->config({&outputDelay}, C2_MAY_BLOCK, &failures);
+            if (err == OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(outputDelay));
+            } else {
+                ALOGE("Cannot set output delay");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+            continue;
+        }
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index 72ee583..4414a26 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -157,7 +157,7 @@
 
     size_t mNumCores;
     IV_COLOR_FORMAT_T mIvColorFormat;
-
+    uint32_t mOutputDelay;
     uint32_t mWidth;
     uint32_t mHeight;
     uint32_t mStride;
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index 43b843a..4ff0793 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -74,7 +74,7 @@
         addParameter(
                 DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
                 .withDefault(new C2StreamChannelCountInfo::output(0u, 1))
-                .withFields({C2F(mChannelCount, value).equalTo(1)})
+                .withFields({C2F(mChannelCount, value).inRange(1, 6)})
                 .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
                 .build());
 
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
new file mode 100644
index 0000000..5c4abb7
--- /dev/null
+++ b/media/codec2/components/gav1/Android.bp
@@ -0,0 +1,20 @@
+cc_library_shared {
+    name: "libcodec2_soft_av1dec_gav1",
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+    ],
+
+    // coordinated with frameworks/av/media/codec2/components/aom/Android.bp
+    // so only 1 of them has the official c2.android.av1.decoder name
+    cflags: [
+        "-DCODECNAME=\"c2.android.av1.decoder\"",
+    ],
+
+    srcs: ["C2SoftGav1Dec.cpp"],
+    static_libs: ["libgav1"],
+
+    include_dirs: [
+        "external/libgav1/libgav1/",
+    ],
+}
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
new file mode 100644
index 0000000..ec5f549
--- /dev/null
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -0,0 +1,792 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftGav1Dec"
+#include "C2SoftGav1Dec.h"
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <SimpleC2Interface.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+
+namespace android {
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
+ public:
+  explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+      : SimpleInterface<void>::BaseParams(
+            helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+    noPrivateBuffers();  // TODO: account for our buffers here.
+    noInputReferences();
+    noOutputReferences();
+    noInputLatency();
+    noTimeStretch();
+
+    addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                     .withConstValue(new C2ComponentAttributesSetting(
+                         C2Component::ATTRIB_IS_TEMPORAL))
+                     .build());
+
+    addParameter(
+        DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+            .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+            .withFields({
+                C2F(mSize, width).inRange(2, 2048, 2),
+                C2F(mSize, height).inRange(2, 2048, 2),
+            })
+            .withSetter(SizeSetter)
+            .build());
+
+    addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                     .withDefault(new C2StreamProfileLevelInfo::input(
+                         0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                     .withFields({C2F(mProfileLevel, profile)
+                                      .oneOf({C2Config::PROFILE_AV1_0,
+                                              C2Config::PROFILE_AV1_1}),
+                                  C2F(mProfileLevel, level)
+                                      .oneOf({
+                                          C2Config::LEVEL_AV1_2,
+                                          C2Config::LEVEL_AV1_2_1,
+                                          C2Config::LEVEL_AV1_2_2,
+                                          C2Config::LEVEL_AV1_3,
+                                          C2Config::LEVEL_AV1_3_1,
+                                          C2Config::LEVEL_AV1_3_2,
+                                      })})
+                     .withSetter(ProfileLevelSetter, mSize)
+                     .build());
+
+    mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+    addParameter(
+        DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+            .withDefault(mHdr10PlusInfoInput)
+            .withFields({
+                C2F(mHdr10PlusInfoInput, m.value).any(),
+            })
+            .withSetter(Hdr10PlusInfoInputSetter)
+            .build());
+
+    mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+    addParameter(
+        DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+            .withDefault(mHdr10PlusInfoOutput)
+            .withFields({
+                C2F(mHdr10PlusInfoOutput, m.value).any(),
+            })
+            .withSetter(Hdr10PlusInfoOutputSetter)
+            .build());
+
+    addParameter(
+        DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+            .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+            .withFields({
+                C2F(mSize, width).inRange(2, 2048, 2),
+                C2F(mSize, height).inRange(2, 2048, 2),
+            })
+            .withSetter(MaxPictureSizeSetter, mSize)
+            .build());
+
+    addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                     .withDefault(new C2StreamMaxBufferSizeInfo::input(
+                         0u, 320 * 240 * 3 / 4))
+                     .withFields({
+                         C2F(mMaxInputSize, value).any(),
+                     })
+                     .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                     .build());
+
+    C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+    std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+        C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
+                                               C2Color::YUV_420);
+    memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+    defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+        {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
+        C2Color::YUV_420);
+    helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+    addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                     .withConstValue(defaultColorInfo)
+                     .build());
+
+    addParameter(
+        DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+            .withDefault(new C2StreamColorAspectsTuning::output(
+                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+            .withFields(
+                {C2F(mDefaultColorAspects, range)
+                     .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                 C2F(mDefaultColorAspects, primaries)
+                     .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                              C2Color::PRIMARIES_OTHER),
+                 C2F(mDefaultColorAspects, transfer)
+                     .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                              C2Color::TRANSFER_OTHER),
+                 C2F(mDefaultColorAspects, matrix)
+                     .inRange(C2Color::MATRIX_UNSPECIFIED,
+                              C2Color::MATRIX_OTHER)})
+            .withSetter(DefaultColorAspectsSetter)
+            .build());
+
+    // TODO: support more formats?
+    addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                     .withConstValue(new C2StreamPixelFormatInfo::output(
+                         0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                     .build());
+  }
+
+  static C2R SizeSetter(bool mayBlock,
+                        const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+                        C2P<C2StreamPictureSizeInfo::output> &me) {
+    (void)mayBlock;
+    C2R res = C2R::Ok();
+    if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+      res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+      me.set().width = oldMe.v.width;
+    }
+    if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+      res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+      me.set().height = oldMe.v.height;
+    }
+    return res;
+  }
+
+  static C2R MaxPictureSizeSetter(
+      bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+      const C2P<C2StreamPictureSizeInfo::output> &size) {
+    (void)mayBlock;
+    // TODO: get max width/height from the size's field helpers vs.
+    // hardcoding
+    me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+    me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+    return C2R::Ok();
+  }
+
+  static C2R MaxInputSizeSetter(
+      bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+      const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+    (void)mayBlock;
+    // assume compression ratio of 2
+    me.set().value =
+        (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072);
+    return C2R::Ok();
+  }
+
+  static C2R DefaultColorAspectsSetter(
+      bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+    (void)mayBlock;
+    if (me.v.range > C2Color::RANGE_OTHER) {
+      me.set().range = C2Color::RANGE_OTHER;
+    }
+    if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+      me.set().primaries = C2Color::PRIMARIES_OTHER;
+    }
+    if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+      me.set().transfer = C2Color::TRANSFER_OTHER;
+    }
+    if (me.v.matrix > C2Color::MATRIX_OTHER) {
+      me.set().matrix = C2Color::MATRIX_OTHER;
+    }
+    return C2R::Ok();
+  }
+
+  static C2R ProfileLevelSetter(
+      bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+      const C2P<C2StreamPictureSizeInfo::output> &size) {
+    (void)mayBlock;
+    (void)size;
+    (void)me;  // TODO: validate
+    return C2R::Ok();
+  }
+
+  std::shared_ptr<C2StreamColorAspectsTuning::output>
+  getDefaultColorAspects_l() {
+    return mDefaultColorAspects;
+  }
+
+  static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
+                                      C2P<C2StreamHdr10PlusInfo::input> &me) {
+    (void)mayBlock;
+    (void)me;  // TODO: validate
+    return C2R::Ok();
+  }
+
+  static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
+                                       C2P<C2StreamHdr10PlusInfo::output> &me) {
+    (void)mayBlock;
+    (void)me;  // TODO: validate
+    return C2R::Ok();
+  }
+
+ private:
+  std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+  std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+  std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+  std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+  std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+  std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+  std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+  std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+  std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+};
+
+C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
+                             const std::shared_ptr<IntfImpl> &intfImpl)
+    : SimpleC2Component(
+          std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl),
+      mCodecCtx(nullptr) {
+  gettimeofday(&mTimeStart, nullptr);
+  gettimeofday(&mTimeEnd, nullptr);
+}
+
+C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
+
+c2_status_t C2SoftGav1Dec::onInit() {
+  return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftGav1Dec::onStop() {
+  mSignalledError = false;
+  mSignalledOutputEos = false;
+  return C2_OK;
+}
+
+void C2SoftGav1Dec::onReset() {
+  (void)onStop();
+  c2_status_t err = onFlush_sm();
+  if (err != C2_OK) {
+    ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+    destroyDecoder();
+    if (!initDecoder()) {
+      ALOGE("Hard reset failed.");
+    }
+  }
+}
+
+void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
+
+c2_status_t C2SoftGav1Dec::onFlush_sm() {
+  Libgav1StatusCode status =
+      mCodecCtx->EnqueueFrame(/*data=*/nullptr, /*size=*/0,
+                              /*user_private_data=*/0);
+  if (status != kLibgav1StatusOk) {
+    ALOGE("Failed to flush av1 decoder. status: %d.", status);
+    return C2_CORRUPTED;
+  }
+
+  // Dequeue frame (if any) that was enqueued previously.
+  const libgav1::DecoderBuffer *buffer;
+  status = mCodecCtx->DequeueFrame(&buffer);
+  if (status != kLibgav1StatusOk) {
+    ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
+          status);
+    return C2_CORRUPTED;
+  }
+
+  mSignalledError = false;
+  mSignalledOutputEos = false;
+
+  return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+  int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+  cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+  // _SC_NPROC_ONLN must be defined...
+  cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+  CHECK(cpuCoreCount >= 1);
+  ALOGV("Number of CPU cores: %d", cpuCoreCount);
+  return cpuCoreCount;
+}
+
+bool C2SoftGav1Dec::initDecoder() {
+  mSignalledError = false;
+  mSignalledOutputEos = false;
+  mCodecCtx.reset(new libgav1::Decoder());
+
+  if (mCodecCtx == nullptr) {
+    ALOGE("mCodecCtx is null");
+    return false;
+  }
+
+  libgav1::DecoderSettings settings = {};
+  settings.threads = GetCPUCoreCount();
+
+  ALOGV("Using libgav1 AV1 software decoder.");
+  Libgav1StatusCode status = mCodecCtx->Init(&settings);
+  if (status != kLibgav1StatusOk) {
+    ALOGE("av1 decoder failed to initialize. status: %d.", status);
+    return false;
+  }
+
+  return true;
+}
+
+void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
+
+void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+  uint32_t flags = 0;
+  if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+    flags |= C2FrameData::FLAG_END_OF_STREAM;
+    ALOGV("signalling eos");
+  }
+  work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+  work->worklets.front()->output.buffers.clear();
+  work->worklets.front()->output.ordinal = work->input.ordinal;
+  work->workletsProcessed = 1u;
+}
+
+void C2SoftGav1Dec::finishWork(uint64_t index,
+                               const std::unique_ptr<C2Work> &work,
+                               const std::shared_ptr<C2GraphicBlock> &block) {
+  std::shared_ptr<C2Buffer> buffer =
+      createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+  auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
+    uint32_t flags = 0;
+    if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+        (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+      flags |= C2FrameData::FLAG_END_OF_STREAM;
+      ALOGV("signalling eos");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.buffers.push_back(buffer);
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+  };
+  if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+    fillWork(work);
+  } else {
+    finish(index, fillWork);
+  }
+}
+
+void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
+                            const std::shared_ptr<C2BlockPool> &pool) {
+  work->result = C2_OK;
+  work->workletsProcessed = 0u;
+  work->worklets.front()->output.configUpdate.clear();
+  work->worklets.front()->output.flags = work->input.flags;
+  if (mSignalledError || mSignalledOutputEos) {
+    work->result = C2_BAD_VALUE;
+    return;
+  }
+
+  size_t inOffset = 0u;
+  size_t inSize = 0u;
+  C2ReadView rView = mDummyReadView;
+  if (!work->input.buffers.empty()) {
+    rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+    inSize = rView.capacity();
+    if (inSize && rView.error()) {
+      ALOGE("read view map failed %d", rView.error());
+      work->result = C2_CORRUPTED;
+      return;
+    }
+  }
+
+  bool codecConfig =
+      ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+  bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+  ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
+        (int)work->input.ordinal.timestamp.peeku(),
+        (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+
+  if (codecConfig) {
+    fillEmptyWork(work);
+    return;
+  }
+
+  int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
+  if (inSize) {
+    uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
+    int32_t decodeTime = 0;
+    int32_t delay = 0;
+
+    GETTIME(&mTimeStart, nullptr);
+    TIME_DIFF(mTimeEnd, mTimeStart, delay);
+
+    const Libgav1StatusCode status =
+        mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex);
+
+    GETTIME(&mTimeEnd, nullptr);
+    TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
+    ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
+
+    if (status != kLibgav1StatusOk) {
+      ALOGE("av1 decoder failed to decode frame. status: %d.", status);
+      work->result = C2_CORRUPTED;
+      work->workletsProcessed = 1u;
+      mSignalledError = true;
+      return;
+    }
+
+  } else {
+    const Libgav1StatusCode status =
+        mCodecCtx->EnqueueFrame(/*data=*/nullptr, /*size=*/0,
+                                /*user_private_data=*/0);
+    if (status != kLibgav1StatusOk) {
+      ALOGE("Failed to flush av1 decoder. status: %d.", status);
+      work->result = C2_CORRUPTED;
+      work->workletsProcessed = 1u;
+      mSignalledError = true;
+      return;
+    }
+  }
+
+  (void)outputBuffer(pool, work);
+
+  if (eos) {
+    drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+    mSignalledOutputEos = true;
+  } else if (!inSize) {
+    fillEmptyWork(work);
+  }
+}
+
+static void copyOutputBufferToYV12Frame(uint8_t *dst, const uint8_t *srcY,
+                                        const uint8_t *srcU,
+                                        const uint8_t *srcV, size_t srcYStride,
+                                        size_t srcUStride, size_t srcVStride,
+                                        uint32_t width, uint32_t height) {
+  const size_t dstYStride = align(width, 16);
+  const size_t dstUVStride = align(dstYStride / 2, 16);
+  uint8_t *const dstStart = dst;
+
+  for (size_t i = 0; i < height; ++i) {
+    memcpy(dst, srcY, width);
+    srcY += srcYStride;
+    dst += dstYStride;
+  }
+
+  dst = dstStart + dstYStride * height;
+  for (size_t i = 0; i < height / 2; ++i) {
+    memcpy(dst, srcV, width / 2);
+    srcV += srcVStride;
+    dst += dstUVStride;
+  }
+
+  dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2);
+  for (size_t i = 0; i < height / 2; ++i) {
+    memcpy(dst, srcU, width / 2);
+    srcU += srcUStride;
+    dst += dstUVStride;
+  }
+}
+
+static void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY,
+                                        const uint16_t *srcU,
+                                        const uint16_t *srcV, size_t srcYStride,
+                                        size_t srcUStride, size_t srcVStride,
+                                        size_t dstStride, size_t width,
+                                        size_t height) {
+  // Converting two lines at a time, slightly faster
+  for (size_t y = 0; y < height; y += 2) {
+    uint32_t *dstTop = (uint32_t *)dst;
+    uint32_t *dstBot = (uint32_t *)(dst + dstStride);
+    uint16_t *ySrcTop = (uint16_t *)srcY;
+    uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
+    uint16_t *uSrc = (uint16_t *)srcU;
+    uint16_t *vSrc = (uint16_t *)srcV;
+
+    uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
+    size_t x = 0;
+    for (; x < width - 3; x += 4) {
+      u01 = *((uint32_t *)uSrc);
+      uSrc += 2;
+      v01 = *((uint32_t *)vSrc);
+      vSrc += 2;
+
+      y01 = *((uint32_t *)ySrcTop);
+      ySrcTop += 2;
+      y23 = *((uint32_t *)ySrcTop);
+      ySrcTop += 2;
+      y45 = *((uint32_t *)ySrcBot);
+      ySrcBot += 2;
+      y67 = *((uint32_t *)ySrcBot);
+      ySrcBot += 2;
+
+      uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+      uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
+
+      *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
+      *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
+      *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
+      *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
+
+      *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
+      *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
+      *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
+      *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
+    }
+
+    // There should be at most 2 more pixels to process. Note that we don't
+    // need to consider odd case as the buffer is always aligned to even.
+    if (x < width) {
+      u01 = *uSrc;
+      v01 = *vSrc;
+      y01 = *((uint32_t *)ySrcTop);
+      y45 = *((uint32_t *)ySrcBot);
+      uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
+      *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
+      *dstTop++ = ((y01 >> 16) << 10) | uv0;
+      *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
+      *dstBot++ = ((y45 >> 16) << 10) | uv0;
+    }
+
+    srcY += srcYStride * 2;
+    srcU += srcUStride;
+    srcV += srcVStride;
+    dst += dstStride * 2;
+  }
+}
+
+static void convertYUV420Planar16ToYUV420Planar(
+    uint8_t *dst, const uint16_t *srcY, const uint16_t *srcU,
+    const uint16_t *srcV, size_t srcYStride, size_t srcUStride,
+    size_t srcVStride, size_t dstStride, size_t width, size_t height) {
+  uint8_t *dstY = (uint8_t *)dst;
+  size_t dstYSize = dstStride * height;
+  size_t dstUVStride = align(dstStride / 2, 16);
+  size_t dstUVSize = dstUVStride * height / 2;
+  uint8_t *dstV = dstY + dstYSize;
+  uint8_t *dstU = dstV + dstUVSize;
+
+  for (size_t y = 0; y < height; ++y) {
+    for (size_t x = 0; x < width; ++x) {
+      dstY[x] = (uint8_t)(srcY[x] >> 2);
+    }
+
+    srcY += srcYStride;
+    dstY += dstStride;
+  }
+
+  for (size_t y = 0; y < (height + 1) / 2; ++y) {
+    for (size_t x = 0; x < (width + 1) / 2; ++x) {
+      dstU[x] = (uint8_t)(srcU[x] >> 2);
+      dstV[x] = (uint8_t)(srcV[x] >> 2);
+    }
+
+    srcU += srcUStride;
+    srcV += srcVStride;
+    dstU += dstUVStride;
+    dstV += dstUVStride;
+  }
+}
+
+bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
+                                 const std::unique_ptr<C2Work> &work) {
+  if (!(work && pool)) return false;
+
+  const libgav1::DecoderBuffer *buffer;
+  const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
+
+  if (status != kLibgav1StatusOk) {
+    ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
+    return false;
+  }
+
+  // |buffer| can be NULL if status was equal to kLibgav1StatusOk. This is not
+  // an error. This could mean one of two things:
+  //  - The EnqueueFrame() call was either a flush (called with nullptr).
+  //  - The enqueued frame did not have any displayable frames.
+  if (!buffer) {
+    return false;
+  }
+
+  const int width = buffer->displayed_width[0];
+  const int height = buffer->displayed_height[0];
+  if (width != mWidth || height != mHeight) {
+    mWidth = width;
+    mHeight = height;
+
+    C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+    if (err == C2_OK) {
+      work->worklets.front()->output.configUpdate.push_back(
+          C2Param::Copy(size));
+    } else {
+      ALOGE("Config update size failed");
+      mSignalledError = true;
+      work->result = C2_CORRUPTED;
+      work->workletsProcessed = 1u;
+      return false;
+    }
+  }
+
+  // TODO(vigneshv): Add support for monochrome videos since AV1 supports it.
+  CHECK(buffer->image_format == libgav1::kImageFormatYuv420);
+
+  std::shared_ptr<C2GraphicBlock> block;
+  uint32_t format = HAL_PIXEL_FORMAT_YV12;
+  if (buffer->bitdepth == 10) {
+    IntfImpl::Lock lock = mIntf->lock();
+    std::shared_ptr<C2StreamColorAspectsTuning::output> defaultColorAspects =
+        mIntf->getDefaultColorAspects_l();
+
+    if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+        defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+        defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+      format = HAL_PIXEL_FORMAT_RGBA_1010102;
+    }
+  }
+  C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+  c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format,
+                                            usage, &block);
+
+  if (err != C2_OK) {
+    ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+    work->result = err;
+    return false;
+  }
+
+  C2GraphicView wView = block->map().get();
+
+  if (wView.error()) {
+    ALOGE("graphic view map failed %d", wView.error());
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+
+  ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+        block->height(), mWidth, mHeight, (int)buffer->user_private_data);
+
+  uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
+  size_t srcYStride = buffer->stride[0];
+  size_t srcUStride = buffer->stride[1];
+  size_t srcVStride = buffer->stride[2];
+
+  if (buffer->bitdepth == 10) {
+    const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+    const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+    const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+
+    if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+      convertYUV420Planar16ToY410(
+          (uint32_t *)dst, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+          srcVStride / 2, align(mWidth, 16), mWidth, mHeight);
+    } else {
+      convertYUV420Planar16ToYUV420Planar(dst, srcY, srcU, srcV, srcYStride / 2,
+                                          srcUStride / 2, srcVStride / 2,
+                                          align(mWidth, 16), mWidth, mHeight);
+    }
+  } else {
+    const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
+    const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
+    const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
+    copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV, srcYStride, srcUStride,
+                                srcVStride, mWidth, mHeight);
+  }
+  finishWork(buffer->user_private_data, work, std::move(block));
+  block = nullptr;
+  return true;
+}
+
+c2_status_t C2SoftGav1Dec::drainInternal(
+    uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
+    const std::unique_ptr<C2Work> &work) {
+  if (drainMode == NO_DRAIN) {
+    ALOGW("drain with NO_DRAIN: no-op");
+    return C2_OK;
+  }
+  if (drainMode == DRAIN_CHAIN) {
+    ALOGW("DRAIN_CHAIN not supported");
+    return C2_OMITTED;
+  }
+
+  Libgav1StatusCode status =
+      mCodecCtx->EnqueueFrame(/*data=*/nullptr, /*size=*/0,
+                              /*user_private_data=*/0);
+  if (status != kLibgav1StatusOk) {
+    ALOGE("Failed to flush av1 decoder. status: %d.", status);
+    return C2_CORRUPTED;
+  }
+
+  while (outputBuffer(pool, work)) {
+  }
+
+  if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
+      work->workletsProcessed == 0u) {
+    fillEmptyWork(work);
+  }
+
+  return C2_OK;
+}
+
+c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
+                                 const std::shared_ptr<C2BlockPool> &pool) {
+  return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftGav1Factory : public C2ComponentFactory {
+ public:
+  C2SoftGav1Factory()
+      : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+            GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+  virtual c2_status_t createComponent(
+      c2_node_id_t id, std::shared_ptr<C2Component> *const component,
+      std::function<void(C2Component *)> deleter) override {
+    *component = std::shared_ptr<C2Component>(
+        new C2SoftGav1Dec(COMPONENT_NAME, id,
+                          std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
+        deleter);
+    return C2_OK;
+  }
+
+  virtual c2_status_t createInterface(
+      c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
+      std::function<void(C2ComponentInterface *)> deleter) override {
+    *interface = std::shared_ptr<C2ComponentInterface>(
+        new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
+            COMPONENT_NAME, id,
+            std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
+        deleter);
+    return C2_OK;
+  }
+
+  virtual ~C2SoftGav1Factory() override = default;
+
+ private:
+  std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
+  ALOGV("in %s", __func__);
+  return new ::android::C2SoftGav1Factory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
+  ALOGV("in %s", __func__);
+  delete factory;
+}
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
new file mode 100644
index 0000000..a7c08bb
--- /dev/null
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_GAV1_DEC_H_
+#define ANDROID_C2_SOFT_GAV1_DEC_H_
+
+#include <SimpleC2Component.h>
+#include "libgav1/src/decoder.h"
+#include "libgav1/src/decoder_settings.h"
+
+#define GETTIME(a, b) gettimeofday(a, b);
+#define TIME_DIFF(start, end, diff)     \
+    diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+            ((end).tv_usec - (start).tv_usec);
+
+namespace android {
+
+struct C2SoftGav1Dec : public SimpleC2Component {
+  class IntfImpl;
+
+  C2SoftGav1Dec(const char* name, c2_node_id_t id,
+                const std::shared_ptr<IntfImpl>& intfImpl);
+  ~C2SoftGav1Dec();
+
+  // Begin SimpleC2Component overrides.
+  c2_status_t onInit() override;
+  c2_status_t onStop() override;
+  void onReset() override;
+  void onRelease() override;
+  c2_status_t onFlush_sm() override;
+  void process(const std::unique_ptr<C2Work>& work,
+               const std::shared_ptr<C2BlockPool>& pool) override;
+  c2_status_t drain(uint32_t drainMode,
+                    const std::shared_ptr<C2BlockPool>& pool) override;
+  // End SimpleC2Component overrides.
+
+ private:
+  std::shared_ptr<IntfImpl> mIntf;
+  std::unique_ptr<libgav1::Decoder> mCodecCtx;
+
+  uint32_t mWidth;
+  uint32_t mHeight;
+  bool mSignalledOutputEos;
+  bool mSignalledError;
+
+  struct timeval mTimeStart;  // Time at the start of decode()
+  struct timeval mTimeEnd;    // Time at the end of decode()
+
+  bool initDecoder();
+  void destroyDecoder();
+  void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                  const std::shared_ptr<C2GraphicBlock>& block);
+  bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                    const std::unique_ptr<C2Work>& work);
+  c2_status_t drainInternal(uint32_t drainMode,
+                            const std::shared_ptr<C2BlockPool>& pool,
+                            const std::unique_ptr<C2Work>& work);
+
+  C2_DO_NOT_COPY(C2SoftGav1Dec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_GAV1_DEC_H_
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 7232572..df677c2 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -33,7 +33,8 @@
 namespace {
 
 constexpr char COMPONENT_NAME[] = "c2.android.hevc.decoder";
-
+constexpr uint32_t kDefaultOutputDelay = 8;
+constexpr uint32_t kMaxOutputDelay = 16;
 }  // namespace
 
 class C2SoftHevcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
@@ -54,7 +55,9 @@
         // TODO: Proper support for reorder depth.
         addParameter(
                 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
-                .withConstValue(new C2PortActualDelayTuning::output(8u))
+                .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputDelay))
+                .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputDelay)})
+                .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
                 .build());
 
         addParameter(
@@ -327,6 +330,7 @@
         mDecHandle(nullptr),
         mOutBufferFlush(nullptr),
         mIvColorformat(IV_YUV_420P),
+        mOutputDelay(kDefaultOutputDelay),
         mWidth(320),
         mHeight(240),
         mHeaderDecoded(false),
@@ -877,6 +881,26 @@
             work->result = C2_CORRUPTED;
             return;
         }
+        if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
+            mOutputDelay = s_decode_op.i4_reorder_depth;
+            ALOGV("New Output delay %d ", mOutputDelay);
+
+            C2PortActualDelayTuning::output outputDelay(mOutputDelay);
+            std::vector<std::unique_ptr<C2SettingResult>> failures;
+            c2_status_t err =
+                mIntf->config({&outputDelay}, C2_MAY_BLOCK, &failures);
+            if (err == OK) {
+                work->worklets.front()->output.configUpdate.push_back(
+                    C2Param::Copy(outputDelay));
+            } else {
+                ALOGE("Cannot set output delay");
+                mSignalledError = true;
+                work->workletsProcessed = 1u;
+                work->result = C2_CORRUPTED;
+                return;
+            }
+            continue;
+        }
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index b7664e6..ce63a6c 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -115,7 +115,7 @@
 
     size_t mNumCores;
     IV_COLOR_FORMAT_T mIvColorformat;
-
+    uint32_t mOutputDelay;
     uint32_t mWidth;
     uint32_t mHeight;
     uint32_t mStride;
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 36053f6..54c8c47 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -517,9 +517,11 @@
             if (layout.planes[layout.PLANE_Y].colInc == 1
                     && layout.planes[layout.PLANE_U].colInc == 1
                     && layout.planes[layout.PLANE_V].colInc == 1
+                    && yStride == align(width, 16)
                     && uStride == vStride
                     && yStride == 2 * vStride) {
-                // I420 compatible - planes are already set up above
+                // I420 compatible with yStride being equal to aligned width
+                // planes are already set up above
                 break;
             }
 
diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp
index 95eb909..7b6f21a 100644
--- a/media/codec2/components/raw/C2SoftRawDec.cpp
+++ b/media/codec2/components/raw/C2SoftRawDec.cpp
@@ -58,7 +58,7 @@
         addParameter(
                 DefineParam(mSampleRate, C2_PARAMKEY_SAMPLE_RATE)
                 .withDefault(new C2StreamSampleRateInfo::output(0u, 44100))
-                .withFields({C2F(mSampleRate, value).inRange(8000, 384000)})
+                .withFields({C2F(mSampleRate, value).greaterThan(0)})
                 .withSetter((Setter<decltype(*mSampleRate)>::StrictValueWithNoDeps))
                 .build());
 
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index a52ca15..a759e8f 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -593,12 +593,10 @@
         }
     }
 
-    int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
-
     if (inSize) {
         uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
         vpx_codec_err_t err = vpx_codec_decode(
-                mCodecCtx, bitstream, inSize, &frameIndex, 0);
+                mCodecCtx, bitstream, inSize, &work->input.ordinal.frameIndex, 0);
         if (err != VPX_CODEC_OK) {
             ALOGE("on2 decoder failed to decode frame. err: %d", err);
             mSignalledError = true;
@@ -608,7 +606,20 @@
         }
     }
 
-    (void)outputBuffer(pool, work);
+    status_t err = outputBuffer(pool, work);
+    if (err == NOT_ENOUGH_DATA) {
+        if (inSize > 0) {
+            ALOGV("Maybe non-display frame at %lld.",
+                  work->input.ordinal.frameIndex.peekll());
+            // send the work back with empty buffer.
+            inSize = 0;
+        }
+    } else if (err != OK) {
+        ALOGD("Error while getting the output frame out");
+        // work->result would be already filled; do fillEmptyWork() below to
+        // send the work back.
+        inSize = 0;
+    }
 
     if (eos) {
         drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -742,16 +753,16 @@
     }
     return;
 }
-bool C2SoftVpxDec::outputBuffer(
+status_t C2SoftVpxDec::outputBuffer(
         const std::shared_ptr<C2BlockPool> &pool,
         const std::unique_ptr<C2Work> &work)
 {
-    if (!(work && pool)) return false;
+    if (!(work && pool)) return BAD_VALUE;
 
     vpx_codec_iter_t iter = nullptr;
     vpx_image_t *img = vpx_codec_get_frame(mCodecCtx, &iter);
 
-    if (!img) return false;
+    if (!img) return NOT_ENOUGH_DATA;
 
     if (img->d_w != mWidth || img->d_h != mHeight) {
         mWidth = img->d_w;
@@ -768,7 +779,7 @@
             mSignalledError = true;
             work->workletsProcessed = 1u;
             work->result = C2_CORRUPTED;
-            return false;
+            return UNKNOWN_ERROR;
         }
 
     }
@@ -791,18 +802,19 @@
     if (err != C2_OK) {
         ALOGE("fetchGraphicBlock for Output failed with status %d", err);
         work->result = err;
-        return false;
+        return UNKNOWN_ERROR;
     }
 
     C2GraphicView wView = block->map().get();
     if (wView.error()) {
         ALOGE("graphic view map failed %d", wView.error());
         work->result = C2_CORRUPTED;
-        return false;
+        return UNKNOWN_ERROR;
     }
 
-    ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d",
-           block->width(), block->height(), mWidth, mHeight, (int)*(int64_t *)img->user_priv);
+    ALOGV("provided (%dx%d) required (%dx%d), out frameindex %lld",
+           block->width(), block->height(), mWidth, mHeight,
+           ((c2_cntr64_t *)img->user_priv)->peekll());
 
     uint8_t *dst = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
     size_t srcYStride = img->stride[VPX_PLANE_Y];
@@ -858,8 +870,8 @@
                 dstYStride, dstUVStride,
                 mWidth, mHeight);
     }
-    finishWork(*(int64_t *)img->user_priv, work, std::move(block));
-    return true;
+    finishWork(((c2_cntr64_t *)img->user_priv)->peekull(), work, std::move(block));
+    return OK;
 }
 
 c2_status_t C2SoftVpxDec::drainInternal(
@@ -875,7 +887,7 @@
         return C2_OMITTED;
     }
 
-    while ((outputBuffer(pool, work))) {
+    while (outputBuffer(pool, work) == OK) {
     }
 
     if (drainMode == DRAIN_COMPONENT_WITH_EOS &&
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h
index e51bcee..2065165 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.h
+++ b/media/codec2/components/vpx/C2SoftVpxDec.h
@@ -85,7 +85,7 @@
     status_t destroyDecoder();
     void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work,
                     const std::shared_ptr<C2GraphicBlock> &block);
-    bool outputBuffer(
+    status_t outputBuffer(
             const std::shared_ptr<C2BlockPool> &pool,
             const std::unique_ptr<C2Work> &work);
     c2_status_t drainInternal(
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index 63fe36b..f1f1536 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -24,7 +24,7 @@
         "libgui",
         "libhidlbase",
         "liblog",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
         "libui",
         "libutils",
     ],
@@ -37,7 +37,7 @@
         "android.hardware.media.c2@1.0",
         "libcodec2",
         "libgui",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
         "libui",
     ],
 }
@@ -83,7 +83,7 @@
         "libhidltransport",
         "libhwbinder",
         "liblog",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
         "libstagefright_bufferqueue_helper",
         "libui",
         "libutils",
@@ -98,7 +98,7 @@
         "libcodec2",
         "libcodec2_vndk",
         "libhidlbase",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
         "libui",
     ],
 }
diff --git a/media/codec2/hidl/1.0/utils/InputBufferManager.cpp b/media/codec2/hidl/1.0/utils/InputBufferManager.cpp
index a023a05..8c0d0a4 100644
--- a/media/codec2/hidl/1.0/utils/InputBufferManager.cpp
+++ b/media/codec2/hidl/1.0/utils/InputBufferManager.cpp
@@ -70,7 +70,7 @@
                  << ".";
     std::lock_guard<std::mutex> lock(mMutex);
 
-    std::set<TrackedBuffer> &bufferIds =
+    std::set<TrackedBuffer*> &bufferIds =
             mTrackedBuffersMap[listener][frameIndex];
 
     for (size_t i = 0; i < input.buffers.size(); ++i) {
@@ -79,13 +79,14 @@
                          << "Input buffer at index " << i << " is null.";
             continue;
         }
-        const TrackedBuffer &bufferId =
-                *bufferIds.emplace(listener, frameIndex, i, input.buffers[i]).
-                first;
+        TrackedBuffer *bufferId =
+            new TrackedBuffer(listener, frameIndex, i, input.buffers[i]);
+        mTrackedBufferCache.emplace(bufferId);
+        bufferIds.emplace(bufferId);
 
         c2_status_t status = input.buffers[i]->registerOnDestroyNotify(
                 onBufferDestroyed,
-                const_cast<void*>(reinterpret_cast<const void*>(&bufferId)));
+                reinterpret_cast<void*>(bufferId));
         if (status != C2_OK) {
             LOG(DEBUG) << "InputBufferManager::_registerFrameData -- "
                        << "registerOnDestroyNotify() failed "
@@ -119,31 +120,32 @@
 
     auto findListener = mTrackedBuffersMap.find(listener);
     if (findListener != mTrackedBuffersMap.end()) {
-        std::map<uint64_t, std::set<TrackedBuffer>> &frameIndex2BufferIds
+        std::map<uint64_t, std::set<TrackedBuffer*>> &frameIndex2BufferIds
                 = findListener->second;
         auto findFrameIndex = frameIndex2BufferIds.find(frameIndex);
         if (findFrameIndex != frameIndex2BufferIds.end()) {
-            std::set<TrackedBuffer> &bufferIds = findFrameIndex->second;
-            for (const TrackedBuffer& bufferId : bufferIds) {
-                std::shared_ptr<C2Buffer> buffer = bufferId.buffer.lock();
+            std::set<TrackedBuffer*> &bufferIds = findFrameIndex->second;
+            for (TrackedBuffer* bufferId : bufferIds) {
+                std::shared_ptr<C2Buffer> buffer = bufferId->buffer.lock();
                 if (buffer) {
                     c2_status_t status = buffer->unregisterOnDestroyNotify(
                             onBufferDestroyed,
-                            const_cast<void*>(
-                            reinterpret_cast<const void*>(&bufferId)));
+                            reinterpret_cast<void*>(bufferId));
                     if (status != C2_OK) {
                         LOG(DEBUG) << "InputBufferManager::_unregisterFrameData "
                                    << "-- unregisterOnDestroyNotify() failed "
                                    << "(listener @ 0x"
                                         << std::hex
-                                        << bufferId.listener.unsafe_get()
+                                        << bufferId->listener.unsafe_get()
                                    << ", frameIndex = "
-                                        << std::dec << bufferId.frameIndex
-                                   << ", bufferIndex = " << bufferId.bufferIndex
+                                        << std::dec << bufferId->frameIndex
+                                   << ", bufferIndex = " << bufferId->bufferIndex
                                    << ") => status = " << status
                                    << ".";
                     }
                 }
+                mTrackedBufferCache.erase(bufferId);
+                delete bufferId;
             }
 
             frameIndex2BufferIds.erase(findFrameIndex);
@@ -179,31 +181,32 @@
 
     auto findListener = mTrackedBuffersMap.find(listener);
     if (findListener != mTrackedBuffersMap.end()) {
-        std::map<uint64_t, std::set<TrackedBuffer>> &frameIndex2BufferIds =
+        std::map<uint64_t, std::set<TrackedBuffer*>> &frameIndex2BufferIds =
                 findListener->second;
         for (auto findFrameIndex = frameIndex2BufferIds.begin();
                 findFrameIndex != frameIndex2BufferIds.end();
                 ++findFrameIndex) {
-            std::set<TrackedBuffer> &bufferIds = findFrameIndex->second;
-            for (const TrackedBuffer& bufferId : bufferIds) {
-                std::shared_ptr<C2Buffer> buffer = bufferId.buffer.lock();
+            std::set<TrackedBuffer*> &bufferIds = findFrameIndex->second;
+            for (TrackedBuffer* bufferId : bufferIds) {
+                std::shared_ptr<C2Buffer> buffer = bufferId->buffer.lock();
                 if (buffer) {
                     c2_status_t status = buffer->unregisterOnDestroyNotify(
                             onBufferDestroyed,
-                            const_cast<void*>(
-                            reinterpret_cast<const void*>(&bufferId)));
+                            reinterpret_cast<void*>(bufferId));
                     if (status != C2_OK) {
                         LOG(DEBUG) << "InputBufferManager::_unregisterFrameData "
                                    << "-- unregisterOnDestroyNotify() failed "
                                    << "(listener @ 0x"
                                         << std::hex
-                                        << bufferId.listener.unsafe_get()
+                                        << bufferId->listener.unsafe_get()
                                    << ", frameIndex = "
-                                        << std::dec << bufferId.frameIndex
-                                   << ", bufferIndex = " << bufferId.bufferIndex
+                                        << std::dec << bufferId->frameIndex
+                                   << ", bufferIndex = " << bufferId->bufferIndex
                                    << ") => status = " << status
                                    << ".";
                     }
+                    mTrackedBufferCache.erase(bufferId);
+                    delete bufferId;
                 }
             }
         }
@@ -236,50 +239,59 @@
                      << std::dec << ".";
         return;
     }
-    TrackedBuffer id(*reinterpret_cast<TrackedBuffer*>(arg));
+
+    std::lock_guard<std::mutex> lock(mMutex);
+    TrackedBuffer *bufferId = reinterpret_cast<TrackedBuffer*>(arg);
+
+    if (mTrackedBufferCache.find(bufferId) == mTrackedBufferCache.end()) {
+        LOG(VERBOSE) << "InputBufferManager::_onBufferDestroyed -- called with "
+                     << "unregistered buffer: "
+                     << "buf @ 0x" << std::hex << buf
+                     << ", arg @ 0x" << std::hex << arg
+                     << std::dec << ".";
+        return;
+    }
+
     LOG(VERBOSE) << "InputBufferManager::_onBufferDestroyed -- called with "
                  << "buf @ 0x" << std::hex << buf
                  << ", arg @ 0x" << std::hex << arg
                  << std::dec << " -- "
-                 << "listener @ 0x" << std::hex << id.listener.unsafe_get()
-                 << ", frameIndex = " << std::dec << id.frameIndex
-                 << ", bufferIndex = " << id.bufferIndex
+                 << "listener @ 0x" << std::hex << bufferId->listener.unsafe_get()
+                 << ", frameIndex = " << std::dec << bufferId->frameIndex
+                 << ", bufferIndex = " << bufferId->bufferIndex
                  << ".";
-
-    std::lock_guard<std::mutex> lock(mMutex);
-
-    auto findListener = mTrackedBuffersMap.find(id.listener);
+    auto findListener = mTrackedBuffersMap.find(bufferId->listener);
     if (findListener == mTrackedBuffersMap.end()) {
-        LOG(DEBUG) << "InputBufferManager::_onBufferDestroyed -- "
-                   << "received invalid listener: "
-                   << "listener @ 0x" << std::hex << id.listener.unsafe_get()
-                   << " (frameIndex = " << std::dec << id.frameIndex
-                   << ", bufferIndex = " << id.bufferIndex
-                   << ").";
+        LOG(VERBOSE) << "InputBufferManager::_onBufferDestroyed -- "
+                     << "received invalid listener: "
+                     << "listener @ 0x" << std::hex << bufferId->listener.unsafe_get()
+                     << " (frameIndex = " << std::dec << bufferId->frameIndex
+                     << ", bufferIndex = " << bufferId->bufferIndex
+                     << ").";
         return;
     }
 
-    std::map<uint64_t, std::set<TrackedBuffer>> &frameIndex2BufferIds
+    std::map<uint64_t, std::set<TrackedBuffer*>> &frameIndex2BufferIds
             = findListener->second;
-    auto findFrameIndex = frameIndex2BufferIds.find(id.frameIndex);
+    auto findFrameIndex = frameIndex2BufferIds.find(bufferId->frameIndex);
     if (findFrameIndex == frameIndex2BufferIds.end()) {
         LOG(DEBUG) << "InputBufferManager::_onBufferDestroyed -- "
                    << "received invalid frame index: "
-                   << "frameIndex = " << id.frameIndex
-                   << " (listener @ 0x" << std::hex << id.listener.unsafe_get()
-                   << ", bufferIndex = " << std::dec << id.bufferIndex
+                   << "frameIndex = " << bufferId->frameIndex
+                   << " (listener @ 0x" << std::hex << bufferId->listener.unsafe_get()
+                   << ", bufferIndex = " << std::dec << bufferId->bufferIndex
                    << ").";
         return;
     }
 
-    std::set<TrackedBuffer> &bufferIds = findFrameIndex->second;
-    auto findBufferId = bufferIds.find(id);
+    std::set<TrackedBuffer*> &bufferIds = findFrameIndex->second;
+    auto findBufferId = bufferIds.find(bufferId);
     if (findBufferId == bufferIds.end()) {
         LOG(DEBUG) << "InputBufferManager::_onBufferDestroyed -- "
                    << "received invalid buffer index: "
-                   << "bufferIndex = " << id.bufferIndex
-                   << " (frameIndex = " << id.frameIndex
-                   << ", listener @ 0x" << std::hex << id.listener.unsafe_get()
+                   << "bufferIndex = " << bufferId->bufferIndex
+                   << " (frameIndex = " << bufferId->frameIndex
+                   << ", listener @ 0x" << std::hex << bufferId->listener.unsafe_get()
                    << std::dec << ").";
         return;
     }
@@ -292,10 +304,13 @@
         }
     }
 
-    DeathNotifications &deathNotifications = mDeathNotifications[id.listener];
-    deathNotifications.indices[id.frameIndex].emplace_back(id.bufferIndex);
+    DeathNotifications &deathNotifications = mDeathNotifications[bufferId->listener];
+    deathNotifications.indices[bufferId->frameIndex].emplace_back(bufferId->bufferIndex);
     ++deathNotifications.count;
     mOnBufferDestroyed.notify_one();
+
+    mTrackedBufferCache.erase(bufferId);
+    delete bufferId;
 }
 
 // Notify the clients about buffer destructions.
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
index b6857d5..42fa557 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputBufferManager.h
@@ -196,13 +196,9 @@
                 frameIndex(frameIndex),
                 bufferIndex(bufferIndex),
                 buffer(buffer) {}
-        TrackedBuffer(const TrackedBuffer&) = default;
-        bool operator<(const TrackedBuffer& other) const {
-            return bufferIndex < other.bufferIndex;
-        }
     };
 
-    // Map: listener -> frameIndex -> set<TrackedBuffer>.
+    // Map: listener -> frameIndex -> set<TrackedBuffer*>.
     // Essentially, this is used to store triples (listener, frameIndex,
     // bufferIndex) that's searchable by listener and (listener, frameIndex).
     // However, the value of the innermost map is TrackedBuffer, which also
@@ -210,7 +206,7 @@
     // because onBufferDestroyed() needs to know listener and frameIndex too.
     typedef std::map<wp<IComponentListener>,
                      std::map<uint64_t,
-                              std::set<TrackedBuffer>>> TrackedBuffersMap;
+                              std::set<TrackedBuffer*>>> TrackedBuffersMap;
 
     // Storage for pending (unsent) death notifications for one listener.
     // Each pair in member named "indices" are (frameIndex, bufferIndex) from
@@ -247,6 +243,16 @@
     // Mutex for the management of all input buffers.
     std::mutex mMutex;
 
+    // Cache for all TrackedBuffers.
+    //
+    // Whenever registerOnDestroyNotify() is called, an argument of type
+    // TrackedBuffer is created and stored into this cache.
+    // Whenever unregisterOnDestroyNotify() or onBufferDestroyed() is called,
+    // the TrackedBuffer is removed from this cache.
+    //
+    // mTrackedBuffersMap stores references to TrackedBuffers inside this cache.
+    std::set<TrackedBuffer*> mTrackedBufferCache;
+
     // Tracked input buffers.
     TrackedBuffersMap mTrackedBuffersMap;
 
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 07dbf67..04fa59c 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -1434,6 +1434,11 @@
                 d->type = C2BaseBlock::GRAPHIC;
                 return true;
             }
+            if (cHandle) {
+                // Though we got cloned handle, creating block failed.
+                native_handle_close(cHandle);
+                native_handle_delete(cHandle);
+            }
 
             LOG(ERROR) << "Unknown handle type in BaseBlock::pooledBlock.";
             return false;
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 6469735..a8a552c 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -547,6 +547,10 @@
     if (mCompName == raw) {
         bitStreamInfo[0] = 8000;
         bitStreamInfo[1] = 1;
+    } else if (mCompName == g711alaw || mCompName == g711mlaw) {
+        // g711 test data is all 1-channel and has no embedded config info.
+        bitStreamInfo[0] = 8000;
+        bitStreamInfo[1] = 1;
     } else {
         ASSERT_NO_FATAL_FAILURE(
             getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
diff --git a/media/codec2/hidl/1.0/vts/functional/common/README.md b/media/codec2/hidl/1.0/vts/functional/common/README.md
index 50e8356..f2f579c 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/README.md
+++ b/media/codec2/hidl/1.0/vts/functional/common/README.md
@@ -1,31 +1,36 @@
-## Codec2 VTS Hal @ 1.0 tests ##
----
-#### master :
+# Codec2 VTS Hal @ 1.0 tests #
+
+## master :
 Functionality of master is to enumerate all the Codec2 components available in C2 media service.
 
-usage: VtsHalMediaC2V1\_0TargetMasterTest -I default
+usage: `VtsHalMediaC2V1_0TargetMasterTest -I default`
 
-#### component :
+## component :
 Functionality of component test is to validate common functionality across all the Codec2 components available in C2 media service. For a standard C2 component, these tests are expected to pass.
 
-usage: VtsHalMediaC2V1\_0TargetComponentTest -I software -C <comp name>
-example: VtsHalMediaC2V1\_0TargetComponentTest -I software -C c2.android.vorbis.decoder
+usage: `VtsHalMediaC2V1_0TargetComponentTest -I software -C <comp name>`
 
-#### audio :
-Functionality of audio test is to validate audio specific functionality Codec2 components. The resource files for this test are taken from media/codec2/hidl/1.0/vts/functional/res. The path to these files on the device is required to be given for bitstream tests.
+example: `VtsHalMediaC2V1_0TargetComponentTest -I software -C c2.android.vorbis.decoder`
 
-usage: VtsHalMediaC2V1\_0TargetAudioDecTest -I default -C <comp name> -P /sdcard/media/
-usage: VtsHalMediaC2V1\_0TargetAudioEncTest -I software -C <comp name> -P /sdcard/media/
+## audio :
+Functionality of audio test is to validate audio specific functionality of Codec2 components. The resource files for this test are taken from `frameworks/av/media/codec2/hidl/1.0/vts/functional/res`. The path to these files on the device can be specified with `-P`. (If the device path is omitted, `/data/local/tmp/media/` is the default value.)
 
-example: VtsHalMediaC2V1\_0TargetAudioDecTest -I software -C c2.android.flac.decoder -P /sdcard/media/
-example: VtsHalMediaC2V1\_0TargetAudioEncTest -I software -C c2.android.opus.encoder -P /sdcard/media/
+usage: `VtsHalMediaC2V1_0TargetAudioDecTest -I default -C <comp name> -P <path to resource files>`
 
-#### video :
-Functionality of video test is to validate video specific functionality Codec2 components. The resource files for this test are taken from media/codec2/hidl/1.0/vts/functional/res. The path to these files on the device is required to be given for bitstream tests.
+usage: `VtsHalMediaC2V1_0TargetAudioEncTest -I software -C <comp name> -P <path to resource files>`
 
-usage: VtsHalMediaC2V1\_0TargetVideoDecTest -I default -C <comp name> -P /sdcard/media/
-usage: VtsHalMediaC2V1\_0TargetVideoEncTest -I software -C <comp name> -P /sdcard/media/
+example: `VtsHalMediaC2V1_0TargetAudioDecTest -I software -C c2.android.flac.decoder -P /data/local/tmp/media/`
 
-example: VtsHalMediaC2V1\_0TargetVideoDecTest -I software -C c2.android.avc.decoder -P /sdcard/media/
-example: VtsHalMediaC2V1\_0TargetVideoEncTest -I software -C c2.android.vp9.encoder -P /sdcard/media/
+example: `VtsHalMediaC2V1_0TargetAudioEncTest -I software -C c2.android.opus.encoder -P /data/local/tmp/media/`
+
+## video :
+Functionality of video test is to validate video specific functionality of Codec2 components. The resource files for this test are taken from `frameworks/av/media/codec2/hidl/1.0/vts/functional/res`. The path to these files on the device can be specified with `-P`. (If the device path is omitted, `/data/local/tmp/media/` is the default value.)
+
+usage: `VtsHalMediaC2V1_0TargetVideoDecTest -I default -C <comp name> -P <path to resource files>`
+
+usage: `VtsHalMediaC2V1_0TargetVideoEncTest -I software -C <comp name> -P <path to resource files>`
+
+example: `VtsHalMediaC2V1_0TargetVideoDecTest -I software -C c2.android.avc.decoder -P /data/local/tmp/media/`
+
+example: `VtsHalMediaC2V1_0TargetVideoEncTest -I software -C c2.android.vp9.encoder -P /data/local/tmp/media/`
 
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index c577dac..db59e54 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -118,7 +118,7 @@
         registerTestService<IComponentStore>();
     }
 
-    ComponentTestEnvironment() : res("/sdcard/media/") {}
+    ComponentTestEnvironment() : res("/data/local/tmp/media/") {}
 
     void setComponent(const char* _component) { component = _component; }
 
diff --git a/media/codec2/hidl/client/Android.bp b/media/codec2/hidl/client/Android.bp
index 6038a40..e184223 100644
--- a/media/codec2/hidl/client/Android.bp
+++ b/media/codec2/hidl/client/Android.bp
@@ -19,7 +19,7 @@
         "libhidlbase",
         "libhidltransport",
         "liblog",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
         "libui",
         "libutils",
     ],
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 5ed54f1..c747190 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -125,6 +125,9 @@
         if (!mClient) {
             mClient = Codec2Client::_CreateFromIndex(mIndex);
         }
+        CHECK(mClient) << "Failed to create Codec2Client to service \""
+                       << GetServiceNames()[mIndex] << "\". (Index = "
+                       << mIndex << ").";
         return mClient;
     }
 
@@ -832,6 +835,7 @@
 
 c2_status_t Codec2Client::ForAllServices(
         const std::string &key,
+        size_t numberOfAttempts,
         std::function<c2_status_t(const std::shared_ptr<Codec2Client>&)>
             predicate) {
     c2_status_t status = C2_NO_INIT;  // no IComponentStores present
@@ -860,33 +864,45 @@
 
     for (size_t index : indices) {
         Cache& cache = Cache::List()[index];
-        std::shared_ptr<Codec2Client> client{cache.getClient()};
-        if (client) {
+        for (size_t tries = numberOfAttempts; tries > 0; --tries) {
+            std::shared_ptr<Codec2Client> client{cache.getClient()};
             status = predicate(client);
             if (status == C2_OK) {
                 std::scoped_lock lock{key2IndexMutex};
                 key2Index[key] = index; // update last known client index
                 return C2_OK;
+            } else if (status == C2_TRANSACTION_FAILED) {
+                LOG(WARNING) << "\"" << key << "\" failed for service \""
+                             << client->getName()
+                             << "\" due to transaction failure. "
+                             << "(Service may have crashed.)"
+                             << (tries > 1 ? " Retrying..." : "");
+                cache.invalidate();
+                continue;
             }
-        }
-        if (wasMapped) {
-            LOG(INFO) << "Could not find \"" << key << "\""
-                         " in the last instance. Retrying...";
-            wasMapped = false;
-            cache.invalidate();
+            if (wasMapped) {
+                LOG(INFO) << "\"" << key << "\" became invalid in service \""
+                          << client->getName() << "\". Retrying...";
+                wasMapped = false;
+            }
+            break;
         }
     }
-    return status;  // return the last status from a valid client
+    return status; // return the last status from a valid client
 }
 
 std::shared_ptr<Codec2Client::Component>
         Codec2Client::CreateComponentByName(
         const char* componentName,
         const std::shared_ptr<Listener>& listener,
-        std::shared_ptr<Codec2Client>* owner) {
+        std::shared_ptr<Codec2Client>* owner,
+        size_t numberOfAttempts) {
+    std::string key{"create:"};
+    key.append(componentName);
     std::shared_ptr<Component> component;
     c2_status_t status = ForAllServices(
-            componentName,
+            key,
+            numberOfAttempts,
             [owner, &component, componentName, &listener](
                     const std::shared_ptr<Codec2Client> &client)
                         -> c2_status_t {
@@ -907,8 +923,9 @@
                 return status;
             });
     if (status != C2_OK) {
-        LOG(DEBUG) << "Could not create component \"" << componentName << "\". "
-                      "Status = " << status << ".";
+        LOG(DEBUG) << "Failed to create component \"" << componentName
+                   << "\" from all known services. "
+                      "Last returned status = " << status << ".";
     }
     return component;
 }
@@ -916,10 +933,14 @@
 std::shared_ptr<Codec2Client::Interface>
         Codec2Client::CreateInterfaceByName(
         const char* interfaceName,
-        std::shared_ptr<Codec2Client>* owner) {
+        std::shared_ptr<Codec2Client>* owner,
+        size_t numberOfAttempts) {
+    std::string key{"create:"};
+    key.append(interfaceName);
     std::shared_ptr<Interface> interface;
     c2_status_t status = ForAllServices(
-            interfaceName,
+            key,
+            numberOfAttempts,
             [owner, &interface, interfaceName](
                     const std::shared_ptr<Codec2Client> &client)
                         -> c2_status_t {
@@ -939,8 +960,9 @@
                 return status;
             });
     if (status != C2_OK) {
-        LOG(DEBUG) << "Could not create interface \"" << interfaceName << "\". "
-                      "Status = " << status << ".";
+        LOG(DEBUG) << "Failed to create interface \"" << interfaceName
+                   << "\" from all known services. "
+                      "Last returned status = " << status << ".";
     }
     return interface;
 }
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index b8a7fb5..c37407f 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -179,17 +179,21 @@
     static std::vector<std::shared_ptr<Codec2Client>> CreateFromAllServices();
 
     // Try to create a component with a given name from all known
-    // IComponentStore services.
+    // IComponentStore services. numberOfAttempts determines the number of times
+    // to retry the HIDL call if the transaction fails.
     static std::shared_ptr<Component> CreateComponentByName(
             char const* componentName,
             std::shared_ptr<Listener> const& listener,
-            std::shared_ptr<Codec2Client>* owner = nullptr);
+            std::shared_ptr<Codec2Client>* owner = nullptr,
+            size_t numberOfAttempts = 10);
 
     // Try to create a component interface with a given name from all known
-    // IComponentStore services.
+    // IComponentStore services. numberOfAttempts determines the number of times
+    // to retry the HIDL call if the transaction fails.
     static std::shared_ptr<Interface> CreateInterfaceByName(
             char const* interfaceName,
-            std::shared_ptr<Codec2Client>* owner = nullptr);
+            std::shared_ptr<Codec2Client>* owner = nullptr,
+            size_t numberOfAttempts = 10);
 
     // List traits from all known IComponentStore services.
     static std::vector<C2Component::Traits> const& ListComponents();
@@ -204,11 +208,25 @@
 protected:
     sp<Base> mBase;
 
-    // Finds the first store where the predicate returns OK, and returns the last
-    // predicate result. Uses key to remember the last store found, and if cached,
-    // it tries that store before trying all stores (one retry).
+    // Finds the first store where the predicate returns C2_OK and returns the
+    // last predicate result. The predicate will be tried on all stores. The
+    // function will return C2_OK the first time the predicate returns C2_OK,
+    // or it will return the value from the last time that predicate is tried.
+    // (The latter case corresponds to a failure on every store.) The order of
+    // the stores to try is the same as the return value of GetServiceNames().
+    //
+    // key is used to remember the last store with which the predicate last
+    // succeeded. If the last successful store is cached, it will be tried
+    // first before all the stores are tried. Note that the last successful
+    // store will be tried twice---first before all the stores, and another time
+    // with all the stores.
+    //
+    // If an attempt to evaluate the predicate results in a transaction failure,
+    // repeated attempts will be made until the predicate returns without a
+    // transaction failure or numberOfAttempts attempts have been made.
     static c2_status_t ForAllServices(
             const std::string& key,
+            size_t numberOfAttempts,
             std::function<c2_status_t(std::shared_ptr<Codec2Client> const&)>
                 predicate);
 
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index 78d221e..c7588e9 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -62,7 +62,7 @@
             android::base::unique_fd &&fd0,
             android::base::unique_fd &&fd1) {
         Mutexed<Jobs>::Locked jobs(mJobs);
-        auto it = jobs->queues.try_emplace(comp, comp, systemTime()).first;
+        auto it = jobs->queues.try_emplace(comp, comp).first;
         it->second.workList.emplace_back(
                 std::move(work), fenceFd, std::move(fd0), std::move(fd1));
         jobs->cond.broadcast();
@@ -79,7 +79,8 @@
             for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
                 Queue &queue = it->second;
                 if (queue.workList.empty()
-                        || nowNs - queue.lastQueuedTimestampNs < kIntervalNs) {
+                        || (queue.lastQueuedTimestampNs != 0 &&
+                            nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
                     ++it;
                     continue;
                 }
@@ -104,6 +105,7 @@
                     sp<Fence> fence(new Fence(fenceFd));
                     fence->waitForever(LOG_TAG);
                 }
+                queue.lastQueuedTimestampNs = nowNs;
                 comp->queue(&items);
                 for (android::base::unique_fd &ufd : uniqueFds) {
                     (void)ufd.release();
@@ -143,8 +145,8 @@
         android::base::unique_fd fd1;
     };
     struct Queue {
-        Queue(const std::shared_ptr<Codec2Client::Component> &comp, nsecs_t timestamp)
-            : component(comp), lastQueuedTimestampNs(timestamp) {}
+        Queue(const std::shared_ptr<Codec2Client::Component> &comp)
+            : component(comp), lastQueuedTimestampNs(0) {}
         Queue(const Queue &) = delete;
         Queue &operator =(const Queue &) = delete;
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 9d1cc60..78ddd6d 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -375,7 +375,11 @@
 
         // consumer usage is queried earlier.
 
-        ALOGD("ISConfig%s", status.str().c_str());
+        if (status.str().empty()) {
+            ALOGD("ISConfig not changed");
+        } else {
+            ALOGD("ISConfig%s", status.str().c_str());
+        }
         return err;
     }
 
@@ -810,9 +814,17 @@
             }
 
             {
-                double value;
-                if (msg->findDouble("time-lapse-fps", &value)) {
-                    config->mISConfig->mCaptureFps = value;
+                bool captureFpsFound = false;
+                double timeLapseFps;
+                float captureRate;
+                if (msg->findDouble("time-lapse-fps", &timeLapseFps)) {
+                    config->mISConfig->mCaptureFps = timeLapseFps;
+                    captureFpsFound = true;
+                } else if (msg->findAsFloat(KEY_CAPTURE_RATE, &captureRate)) {
+                    config->mISConfig->mCaptureFps = captureRate;
+                    captureFpsFound = true;
+                }
+                if (captureFpsFound) {
                     (void)msg->findAsFloat(KEY_FRAME_RATE, &config->mISConfig->mCodedFps);
                 }
             }
@@ -1274,7 +1286,8 @@
     {
         Mutexed<Config>::Locked config(mConfig);
         inputFormat = config->mInputFormat;
-        outputFormat = config->mOutputFormat;
+        // start triggers format dup
+        outputFormat = config->mOutputFormat = config->mOutputFormat->dup();
         if (config->mInputSurface) {
             err2 = config->mInputSurface->start();
         }
@@ -1283,6 +1296,8 @@
         mCallback->onError(err2, ACTION_CODE_FATAL);
         return;
     }
+    // We're not starting after flush.
+    (void)mSentConfigAfterResume.test_and_set();
     err2 = mChannel->start(inputFormat, outputFormat);
     if (err2 != OK) {
         mCallback->onError(err2, ACTION_CODE_FATAL);
@@ -1511,18 +1526,26 @@
 }
 
 void CCodec::signalResume() {
-    auto setResuming = [this] {
+    std::shared_ptr<Codec2Client::Component> comp;
+    auto setResuming = [this, &comp] {
         Mutexed<State>::Locked state(mState);
         if (state->get() != FLUSHED) {
             return UNKNOWN_ERROR;
         }
         state->set(RESUMING);
+        comp = state->comp;
         return OK;
     };
     if (tryAndReportOnError(setResuming) != OK) {
         return;
     }
 
+    mSentConfigAfterResume.clear();
+    {
+        Mutexed<Config>::Locked config(mConfig);
+        config->queryConfiguration(comp);
+    }
+
     (void)mChannel->start(nullptr, nullptr);
 
     {
@@ -1718,7 +1741,7 @@
 
             // handle configuration changes in work done
             Mutexed<Config>::Locked config(mConfig);
-            bool changed = false;
+            bool changed = !mSentConfigAfterResume.test_and_set();
             Config::Watcher<C2StreamInitDataInfo::output> initData =
                 config->watch<C2StreamInitDataInfo::output>();
             if (!work->worklets.empty()
@@ -1750,7 +1773,9 @@
                     ++stream;
                 }
 
-                changed = config->updateConfiguration(updates, config->mOutputDomain);
+                if (config->updateConfiguration(updates, config->mOutputDomain)) {
+                    changed = true;
+                }
 
                 // copy standard infos to graphic buffers if not already present (otherwise, we
                 // may overwrite the actual intermediate value with a final value)
diff --git a/media/codec2/sfplugin/CCodec.h b/media/codec2/sfplugin/CCodec.h
index b0b3c4f..a580d1d 100644
--- a/media/codec2/sfplugin/CCodec.h
+++ b/media/codec2/sfplugin/CCodec.h
@@ -17,6 +17,7 @@
 #ifndef C_CODEC_H_
 #define C_CODEC_H_
 
+#include <atomic>
 #include <chrono>
 #include <list>
 #include <memory>
@@ -175,6 +176,7 @@
     typedef CCodecConfig Config;
     Mutexed<Config> mConfig;
     Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
+    std::atomic_flag mSentConfigAfterResume;
 
     friend class CCodecCallbackImpl;
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 8308292..4ee6c1c 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -948,7 +948,11 @@
         uint32_t outputGeneration;
         {
             Mutexed<OutputSurface>::Locked output(mOutputSurface);
-            output->maxDequeueBuffers = numOutputSlots + reorderDepth.value + kRenderingDepth;
+            output->maxDequeueBuffers = numOutputSlots +
+                    reorderDepth.value + kRenderingDepth;
+            if (!secure) {
+                output->maxDequeueBuffers += numInputSlots;
+            }
             outputSurface = output->surface ?
                     output->surface->getIGraphicBufferProducer() : nullptr;
             if (outputSurface) {
@@ -1068,7 +1072,7 @@
         } else {
             output->buffers.reset(new LinearOutputBuffers(mName));
         }
-        output->buffers->setFormat(outputFormat->dup());
+        output->buffers->setFormat(outputFormat);
 
 
         // Try to set output surface to created block pool if given.
@@ -1272,6 +1276,24 @@
         std::unique_ptr<C2Work> work,
         const sp<AMessage> &outputFormat,
         const C2StreamInitDataInfo::output *initData) {
+    if (outputFormat != nullptr) {
+        Mutexed<Output>::Locked output(mOutput);
+        ALOGD("[%s] onWorkDone: output format changed to %s",
+                mName, outputFormat->debugString().c_str());
+        output->buffers->setFormat(outputFormat);
+
+        AString mediaType;
+        if (outputFormat->findString(KEY_MIME, &mediaType)
+                && mediaType == MIMETYPE_AUDIO_RAW) {
+            int32_t channelCount;
+            int32_t sampleRate;
+            if (outputFormat->findInt32(KEY_CHANNEL_COUNT, &channelCount)
+                    && outputFormat->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
+                output->buffers->updateSkipCutBuffer(sampleRate, channelCount);
+            }
+        }
+    }
+
     if ((work->input.ordinal.frameIndex - mFirstValidFrameIndex.load()).peek() < 0) {
         // Discard frames from previous generation.
         ALOGD("[%s] Discard frames from previous generation.", mName);
@@ -1328,13 +1350,18 @@
             case C2PortReorderBufferDepthTuning::CORE_INDEX: {
                 C2PortReorderBufferDepthTuning::output reorderDepth;
                 if (reorderDepth.updateFrom(*param)) {
+                    bool secure = mComponent->getName().find(".secure") != std::string::npos;
                     mReorderStash.lock()->setDepth(reorderDepth.value);
                     ALOGV("[%s] onWorkDone: updated reorder depth to %u",
                           mName, reorderDepth.value);
                     size_t numOutputSlots = mOutput.lock()->numSlots;
+                    size_t numInputSlots = mInput.lock()->numSlots;
                     Mutexed<OutputSurface>::Locked output(mOutputSurface);
-                    output->maxDequeueBuffers =
-                        numOutputSlots + reorderDepth.value + kRenderingDepth;
+                    output->maxDequeueBuffers = numOutputSlots +
+                            reorderDepth.value + kRenderingDepth;
+                    if (!secure) {
+                        output->maxDequeueBuffers += numInputSlots;
+                    }
                     if (output->surface) {
                         output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
                     }
@@ -1378,10 +1405,12 @@
                     if (outputDelay.updateFrom(*param)) {
                         ALOGV("[%s] onWorkDone: updating output delay %u",
                               mName, outputDelay.value);
+                        bool secure = mComponent->getName().find(".secure") != std::string::npos;
                         (void)mPipelineWatcher.lock()->outputDelay(outputDelay.value);
 
                         bool outputBuffersChanged = false;
                         size_t numOutputSlots = 0;
+                        size_t numInputSlots = mInput.lock()->numSlots;
                         {
                             Mutexed<Output>::Locked output(mOutput);
                             output->outputDelay = outputDelay.value;
@@ -1407,6 +1436,9 @@
                         uint32_t depth = mReorderStash.lock()->depth();
                         Mutexed<OutputSurface>::Locked output(mOutputSurface);
                         output->maxDequeueBuffers = numOutputSlots + depth + kRenderingDepth;
+                        if (!secure) {
+                            output->maxDequeueBuffers += numInputSlots;
+                        }
                         if (output->surface) {
                             output->surface->setMaxDequeuedBufferCount(output->maxDequeueBuffers);
                         }
@@ -1439,24 +1471,6 @@
         }
     }
 
-    if (outputFormat != nullptr) {
-        Mutexed<Output>::Locked output(mOutput);
-        ALOGD("[%s] onWorkDone: output format changed to %s",
-                mName, outputFormat->debugString().c_str());
-        output->buffers->setFormat(outputFormat);
-
-        AString mediaType;
-        if (outputFormat->findString(KEY_MIME, &mediaType)
-                && mediaType == MIMETYPE_AUDIO_RAW) {
-            int32_t channelCount;
-            int32_t sampleRate;
-            if (outputFormat->findInt32(KEY_CHANNEL_COUNT, &channelCount)
-                    && outputFormat->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
-                output->buffers->updateSkipCutBuffer(sampleRate, channelCount);
-            }
-        }
-    }
-
     int32_t flags = 0;
     if (worklet->output.flags & C2FrameData::FLAG_END_OF_STREAM) {
         flags |= MediaCodec::BUFFER_FLAG_EOS;
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 26c702d..ed8b832 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -878,9 +878,10 @@
     switch (c2buffer->data().type()) {
         case C2BufferData::LINEAR: {
             uint32_t size = kLinearBufferSize;
-            const C2ConstLinearBlock &block = c2buffer->data().linearBlocks().front();
-            if (block.size() < kMaxLinearBufferSize / 2) {
-                size = block.size() * 2;
+            const std::vector<C2ConstLinearBlock> &linear_blocks = c2buffer->data().linearBlocks();
+            const uint32_t block_size = linear_blocks.front().size();
+            if (block_size < kMaxLinearBufferSize / 2) {
+                size = block_size * 2;
             } else {
                 size = kMaxLinearBufferSize;
             }
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 1cfdc19..5adcd94 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -235,7 +235,10 @@
     const std::vector<ConfigMapper> &getConfigMappersForSdkKey(std::string key) const {
         auto it = mConfigMappers.find(key);
         if (it == mConfigMappers.end()) {
-            ALOGD("no c2 equivalents for %s", key.c_str());
+            if (mComplained.count(key) == 0) {
+                ALOGD("no c2 equivalents for %s", key.c_str());
+                mComplained.insert(key);
+            }
             return NO_MAPPERS;
         }
         ALOGV("found %zu eqs for %s", it->second.size(), key.c_str());
@@ -304,6 +307,7 @@
 
 private:
     std::map<SdkKey, std::vector<ConfigMapper>> mConfigMappers;
+    mutable std::set<std::string> mComplained;
 };
 
 const std::vector<ConfigMapper> StandardParams::NO_MAPPERS;
@@ -508,7 +512,8 @@
                .limitTo(D::ENCODER & D::VIDEO));
     // convert to timestamp base
     add(ConfigMapper(KEY_I_FRAME_INTERVAL, C2_PARAMKEY_SYNC_FRAME_INTERVAL, "value")
-        .withMappers([](C2Value v) -> C2Value {
+        .limitTo(D::VIDEO & D::ENCODER & D::CONFIG)
+        .withMapper([](C2Value v) -> C2Value {
             // convert from i32 to float
             int32_t i32Value;
             float fpValue;
@@ -518,12 +523,6 @@
                 return int64_t(c2_min(1000000 * fpValue + 0.5, (double)INT64_MAX));
             }
             return C2Value();
-        }, [](C2Value v) -> C2Value {
-            int64_t i64;
-            if (v.get(&i64)) {
-                return float(i64) / 1000000;
-            }
-            return C2Value();
         }));
     // remove when codecs switch to proper coding.gop (add support for calculating gop)
     deprecated(ConfigMapper("i-frame-period", "coding.gop", "intra-period")
@@ -1033,7 +1032,25 @@
     }
 
     ReflectedParamUpdater::Dict reflected = mParamUpdater->getParams(paramPointers);
-    ALOGD("c2 config is %s", reflected.debugString().c_str());
+    std::string config = reflected.debugString();
+    std::set<std::string> configLines;
+    std::string diff;
+    for (size_t start = 0; start != std::string::npos; ) {
+        size_t end = config.find('\n', start);
+        size_t count = (end == std::string::npos)
+                ? std::string::npos
+                : end - start + 1;
+        std::string line = config.substr(start, count);
+        configLines.insert(line);
+        if (mLastConfig.count(line) == 0) {
+            diff.append(line);
+        }
+        start = (end == std::string::npos) ? std::string::npos : end + 1;
+    }
+    if (!diff.empty()) {
+        ALOGD("c2 config diff is %s", diff.c_str());
+    }
+    mLastConfig.swap(configLines);
 
     bool changed = false;
     if (domain & mInputDomain) {
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 3bafe3f..a61c8b7 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -134,6 +134,8 @@
     /// For now support a validation function.
     std::map<C2Param::Index, LocalParamValidator> mLocalParams;
 
+    std::set<std::string> mLastConfig;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 74d14e8..0ee9056 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -146,7 +146,7 @@
               std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
         durations.push_back(elapsed);
     }
-    std::nth_element(durations.begin(), durations.end(), durations.begin() + n,
+    std::nth_element(durations.begin(), durations.begin() + n, durations.end(),
                      std::greater<Clock::duration>());
     return durations[n];
 }
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index b6ddfab..52cc7ad 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -66,7 +66,7 @@
         "liblog",
         "libnativewindow",
         "libstagefright_foundation",
-        "libstagefright_bufferpool@2.0",
+        "libstagefright_bufferpool@2.0.1",
         "libui",
         "libutils",
     ],
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 710b536..2d99b53 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -413,17 +413,14 @@
 
     std::shared_ptr<C2LinearAllocation> alloc;
     if (C2AllocatorIon::isValid(cHandle)) {
-        native_handle_t *handle = native_handle_clone(cHandle);
-        if (handle) {
-            c2_status_t err = sAllocator->priorLinearAllocation(handle, &alloc);
-            const std::shared_ptr<C2PooledBlockPoolData> poolData =
-                    std::make_shared<C2PooledBlockPoolData>(data);
-            if (err == C2_OK && poolData) {
-                // TODO: config params?
-                std::shared_ptr<C2LinearBlock> block =
-                        _C2BlockFactory::CreateLinearBlock(alloc, poolData);
-                return block;
-            }
+        c2_status_t err = sAllocator->priorLinearAllocation(cHandle, &alloc);
+        const std::shared_ptr<C2PooledBlockPoolData> poolData =
+                std::make_shared<C2PooledBlockPoolData>(data);
+        if (err == C2_OK && poolData) {
+            // TODO: config params?
+            std::shared_ptr<C2LinearBlock> block =
+                    _C2BlockFactory::CreateLinearBlock(alloc, poolData);
+            return block;
         }
     }
     return nullptr;
@@ -674,17 +671,14 @@
         ResultStatus status = mBufferPoolManager->allocate(
                 mConnectionId, params, &cHandle, &bufferPoolData);
         if (status == ResultStatus::OK) {
-            native_handle_t *handle = native_handle_clone(cHandle);
-            if (handle) {
-                std::shared_ptr<C2LinearAllocation> alloc;
-                std::shared_ptr<C2PooledBlockPoolData> poolData =
-                        std::make_shared<C2PooledBlockPoolData>(bufferPoolData);
-                c2_status_t err = mAllocator->priorLinearAllocation(handle, &alloc);
-                if (err == C2_OK && poolData && alloc) {
-                    *block = _C2BlockFactory::CreateLinearBlock(alloc, poolData, 0, capacity);
-                    if (*block) {
-                        return C2_OK;
-                    }
+            std::shared_ptr<C2LinearAllocation> alloc;
+            std::shared_ptr<C2PooledBlockPoolData> poolData =
+                    std::make_shared<C2PooledBlockPoolData>(bufferPoolData);
+            c2_status_t err = mAllocator->priorLinearAllocation(cHandle, &alloc);
+            if (err == C2_OK && poolData && alloc) {
+                *block = _C2BlockFactory::CreateLinearBlock(alloc, poolData, 0, capacity);
+                if (*block) {
+                    return C2_OK;
                 }
             }
             return C2_NO_MEMORY;
@@ -710,19 +704,16 @@
         ResultStatus status = mBufferPoolManager->allocate(
                 mConnectionId, params, &cHandle, &bufferPoolData);
         if (status == ResultStatus::OK) {
-            native_handle_t *handle = native_handle_clone(cHandle);
-            if (handle) {
-                std::shared_ptr<C2GraphicAllocation> alloc;
-                std::shared_ptr<C2PooledBlockPoolData> poolData =
-                    std::make_shared<C2PooledBlockPoolData>(bufferPoolData);
-                c2_status_t err = mAllocator->priorGraphicAllocation(
-                        handle, &alloc);
-                if (err == C2_OK && poolData && alloc) {
-                    *block = _C2BlockFactory::CreateGraphicBlock(
-                            alloc, poolData, C2Rect(width, height));
-                    if (*block) {
-                        return C2_OK;
-                    }
+            std::shared_ptr<C2GraphicAllocation> alloc;
+            std::shared_ptr<C2PooledBlockPoolData> poolData =
+                std::make_shared<C2PooledBlockPoolData>(bufferPoolData);
+            c2_status_t err = mAllocator->priorGraphicAllocation(
+                    cHandle, &alloc);
+            if (err == C2_OK && poolData && alloc) {
+                *block = _C2BlockFactory::CreateGraphicBlock(
+                        alloc, poolData, C2Rect(width, height));
+                if (*block) {
+                    return C2_OK;
                 }
             }
             return C2_NO_MEMORY;
@@ -1117,17 +1108,14 @@
 
     std::shared_ptr<C2GraphicAllocation> alloc;
     if (C2AllocatorGralloc::isValid(cHandle)) {
-        native_handle_t *handle = native_handle_clone(cHandle);
-        if (handle) {
-            c2_status_t err = sAllocator->priorGraphicAllocation(handle, &alloc);
-            const std::shared_ptr<C2PooledBlockPoolData> poolData =
-                    std::make_shared<C2PooledBlockPoolData>(data);
-            if (err == C2_OK && poolData) {
-                // TODO: config setup?
-                std::shared_ptr<C2GraphicBlock> block =
-                        _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
-                return block;
-            }
+        c2_status_t err = sAllocator->priorGraphicAllocation(cHandle, &alloc);
+        const std::shared_ptr<C2PooledBlockPoolData> poolData =
+                std::make_shared<C2PooledBlockPoolData>(data);
+        if (err == C2_OK && poolData) {
+            // TODO: config setup?
+            std::shared_ptr<C2GraphicBlock> block =
+                    _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
+            return block;
         }
     }
     return nullptr;
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index f8afa7c..5b2bd7b 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -848,7 +848,8 @@
     emplace("libcodec2_soft_amrnbenc.so");
     emplace("libcodec2_soft_amrwbdec.so");
     emplace("libcodec2_soft_amrwbenc.so");
-    emplace("libcodec2_soft_av1dec.so");
+    //emplace("libcodec2_soft_av1dec_aom.so"); // deprecated for the gav1 implementation
+    emplace("libcodec2_soft_av1dec_gav1.so");
     emplace("libcodec2_soft_avcdec.so");
     emplace("libcodec2_soft_avcenc.so");
     emplace("libcodec2_soft_flacdec.so");
diff --git a/media/extractors/midi/Android.bp b/media/extractors/midi/Android.bp
index 7d42e70..91ce78e 100644
--- a/media/extractors/midi/Android.bp
+++ b/media/extractors/midi/Android.bp
@@ -14,7 +14,9 @@
     static_libs: [
         "libmedia_midiiowrapper",
         "libsonivox",
-        "libstagefright_foundation"
+        "libstagefright_foundation",
+        "libwatchdog",
+        "libbase",
     ],
     name: "libmidiextractor",
     relative_install_path: "extractors",
@@ -35,5 +37,4 @@
             "signed-integer-overflow",
         ],
     },
-
 }
diff --git a/media/extractors/midi/MidiExtractor.cpp b/media/extractors/midi/MidiExtractor.cpp
index 9f4f9e6..d0efb2f 100644
--- a/media/extractors/midi/MidiExtractor.cpp
+++ b/media/extractors/midi/MidiExtractor.cpp
@@ -26,6 +26,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <libsonivox/eas_reverb.h>
+#include <watchdog/Watchdog.h>
 
 namespace android {
 
@@ -116,6 +117,7 @@
         MediaBufferHelper **outBuffer, const ReadOptions *options)
 {
     ALOGV("MidiSource::read");
+
     MediaBufferHelper *buffer;
     // process an optional seek request
     int64_t seekTimeUs;
@@ -139,6 +141,8 @@
 }
 
 // MidiEngine
+using namespace std::chrono_literals;
+static constexpr auto kTimeout = 10s;
 
 MidiEngine::MidiEngine(CDataSource *dataSource,
         AMediaFormat *fileMetadata,
@@ -147,6 +151,8 @@
             mEasHandle(NULL),
             mEasConfig(NULL),
             mIsInitialized(false) {
+    Watchdog watchdog(kTimeout);
+
     mIoWrapper = new MidiIoWrapper(dataSource);
     // spin up a new EAS engine
     EAS_I32 temp;
@@ -186,6 +192,8 @@
 }
 
 MidiEngine::~MidiEngine() {
+    Watchdog watchdog(kTimeout);
+
     if (mEasHandle) {
         EAS_CloseFile(mEasData, mEasHandle);
     }
@@ -217,12 +225,16 @@
 }
 
 status_t MidiEngine::seekTo(int64_t positionUs) {
+    Watchdog watchdog(kTimeout);
+
     ALOGV("seekTo %lld", (long long)positionUs);
     EAS_RESULT result = EAS_Locate(mEasData, mEasHandle, positionUs / 1000, false);
     return result == EAS_SUCCESS ? OK : UNKNOWN_ERROR;
 }
 
 MediaBufferHelper* MidiEngine::readBuffer() {
+    Watchdog watchdog(kTimeout);
+
     EAS_STATE state;
     EAS_State(mEasData, mEasHandle, &state);
     if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) {
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 49c684e..bd4d4fe 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1061,6 +1061,8 @@
                     // drop it now to reduce our footprint
                     free(mLastTrack->mTx3gBuffer);
                     mLastTrack->mTx3gBuffer = NULL;
+                    mLastTrack->mTx3gFilled = 0;
+                    mLastTrack->mTx3gSize = 0;
                 }
 
             } else if (chunk_type == FOURCC("moov")) {
@@ -2622,6 +2624,10 @@
             // if those apps are compensating for it, we'd break them with such a change
             //
 
+            if (mLastTrack->mTx3gBuffer == NULL) {
+                mLastTrack->mTx3gSize = 0;
+                mLastTrack->mTx3gFilled = 0;
+            }
             if (mLastTrack->mTx3gSize - mLastTrack->mTx3gFilled < chunk_size) {
                 size_t growth = kTx3gGrowth;
                 if (growth < chunk_size) {
@@ -4997,8 +5003,11 @@
 }
 
 status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(
-        off64_t offset, off64_t /* size */) {
+        off64_t offset, off64_t size) {
     ALOGV("parseSampleAuxiliaryInformationSizes");
+    if (size < 9) {
+        return -EINVAL;
+    }
     // 14496-12 8.7.12
     uint8_t version;
     if (mDataSource->readAt(
@@ -5011,25 +5020,32 @@
         return ERROR_UNSUPPORTED;
     }
     offset++;
+    size--;
 
     uint32_t flags;
     if (!mDataSource->getUInt24(offset, &flags)) {
         return ERROR_IO;
     }
     offset += 3;
+    size -= 3;
 
     if (flags & 1) {
+        if (size < 13) {
+            return -EINVAL;
+        }
         uint32_t tmp;
         if (!mDataSource->getUInt32(offset, &tmp)) {
             return ERROR_MALFORMED;
         }
         mCurrentAuxInfoType = tmp;
         offset += 4;
+        size -= 4;
         if (!mDataSource->getUInt32(offset, &tmp)) {
             return ERROR_MALFORMED;
         }
         mCurrentAuxInfoTypeParameter = tmp;
         offset += 4;
+        size -= 4;
     }
 
     uint8_t defsize;
@@ -5038,6 +5054,7 @@
     }
     mCurrentDefaultSampleInfoSize = defsize;
     offset++;
+    size--;
 
     uint32_t smplcnt;
     if (!mDataSource->getUInt32(offset, &smplcnt)) {
@@ -5045,11 +5062,16 @@
     }
     mCurrentSampleInfoCount = smplcnt;
     offset += 4;
-
+    size -= 4;
     if (mCurrentDefaultSampleInfoSize != 0) {
         ALOGV("@@@@ using default sample info size of %d", mCurrentDefaultSampleInfoSize);
         return OK;
     }
+    if(smplcnt > size) {
+        ALOGW("b/124525515 - smplcnt(%u) > size(%ld)", (unsigned int)smplcnt, (unsigned long)size);
+        android_errorWriteLog(0x534e4554, "124525515");
+        return -EINVAL;
+    }
     if (smplcnt > mCurrentSampleInfoAllocSize) {
         uint8_t * newPtr =  (uint8_t*) realloc(mCurrentSampleInfoSizes, smplcnt);
         if (newPtr == NULL) {
@@ -5065,26 +5087,32 @@
 }
 
 status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(
-        off64_t offset, off64_t /* size */) {
+        off64_t offset, off64_t size) {
     ALOGV("parseSampleAuxiliaryInformationOffsets");
+    if (size < 8) {
+        return -EINVAL;
+    }
     // 14496-12 8.7.13
     uint8_t version;
     if (mDataSource->readAt(offset, &version, sizeof(version)) != 1) {
         return ERROR_IO;
     }
     offset++;
+    size--;
 
     uint32_t flags;
     if (!mDataSource->getUInt24(offset, &flags)) {
         return ERROR_IO;
     }
     offset += 3;
+    size -= 3;
 
     uint32_t entrycount;
     if (!mDataSource->getUInt32(offset, &entrycount)) {
         return ERROR_IO;
     }
     offset += 4;
+    size -= 4;
     if (entrycount == 0) {
         return OK;
     }
@@ -5110,19 +5138,31 @@
 
     for (size_t i = 0; i < entrycount; i++) {
         if (version == 0) {
+            if (size < 4) {
+                ALOGW("b/124526959");
+                android_errorWriteLog(0x534e4554, "124526959");
+                return -EINVAL;
+            }
             uint32_t tmp;
             if (!mDataSource->getUInt32(offset, &tmp)) {
                 return ERROR_IO;
             }
             mCurrentSampleInfoOffsets[i] = tmp;
             offset += 4;
+            size -= 4;
         } else {
+            if (size < 8) {
+                ALOGW("b/124526959");
+                android_errorWriteLog(0x534e4554, "124526959");
+                return -EINVAL;
+            }
             uint64_t tmp;
             if (!mDataSource->getUInt64(offset, &tmp)) {
                 return ERROR_IO;
             }
             mCurrentSampleInfoOffsets[i] = tmp;
             offset += 8;
+            size -= 8;
         }
     }
 
@@ -5444,16 +5484,12 @@
 
         // apply some sanity (vs strict legality) checks
         //
-        // clamp the count of entries in the trun box, to avoid spending forever parsing
-        // this box. Clamping (vs error) lets us play *something*.
-        // 1 million is about 400 msecs on a Pixel3, should be no more than a couple seconds
-        // on the slowest devices.
-        static constexpr uint32_t kMaxTrunSampleCount = 1000000;
+        static constexpr uint32_t kMaxTrunSampleCount = 10000;
         if (sampleCount > kMaxTrunSampleCount) {
-            ALOGW("b/123389881 clamp sampleCount(%u) @ kMaxTrunSampleCount(%u)",
+            ALOGW("b/123389881 sampleCount(%u) > kMaxTrunSampleCount(%u)",
                   sampleCount, kMaxTrunSampleCount);
             android_errorWriteLog(0x534e4554, "124389881 count");
-
+            return -EINVAL;
         }
     }
 
@@ -5497,7 +5533,12 @@
         tmp.duration = sampleDuration;
         tmp.compositionOffset = sampleCtsOffset;
         memset(tmp.iv, 0, sizeof(tmp.iv));
-        mCurrentSamples.add(tmp);
+        if (mCurrentSamples.add(tmp) < 0) {
+            ALOGW("b/123389881 failed saving sample(n=%zu)", mCurrentSamples.size());
+            android_errorWriteLog(0x534e4554, "124389881 allocation");
+            mCurrentSamples.clear();
+            return NO_MEMORY;
+        }
 
         dataOffset += sampleSize;
     }
@@ -5923,7 +5964,9 @@
                 //if nallength abnormal,ignore it.
                 ALOGW("abnormal nallength, ignore this NAL");
                 srcOffset = size;
-                break;
+
+                ALOGE("FP2: Discarding file with malformed NAL!");
+                return AMEDIA_ERROR_MALFORMED;
             }
 
             if (nalLength == 0) {
diff --git a/media/extractors/mp4/SampleTable.cpp b/media/extractors/mp4/SampleTable.cpp
index bf29bf1..e7e8901 100644
--- a/media/extractors/mp4/SampleTable.cpp
+++ b/media/extractors/mp4/SampleTable.cpp
@@ -391,20 +391,11 @@
     }
 
     mTimeToSampleCount = U32_AT(&header[4]);
-    if (mTimeToSampleCount > UINT32_MAX / (2 * sizeof(uint32_t))) {
-        // Choose this bound because
-        // 1) 2 * sizeof(uint32_t) is the amount of memory needed for one
-        //    time-to-sample entry in the time-to-sample table.
-        // 2) mTimeToSampleCount is the number of entries of the time-to-sample
-        //    table.
-        // 3) We hope that the table size does not exceed UINT32_MAX.
+    if (mTimeToSampleCount > (data_size - 8) / (2 * sizeof(uint32_t))) {
         ALOGE("Time-to-sample table size too large.");
         return ERROR_OUT_OF_RANGE;
     }
 
-    // Note: At this point, we know that mTimeToSampleCount * 2 will not
-    // overflow because of the above condition.
-
     uint64_t allocSize = (uint64_t)mTimeToSampleCount * 2 * sizeof(uint32_t);
     mTotalSize += allocSize;
     if (mTotalSize > kMaxTotalSize) {
@@ -540,6 +531,12 @@
     }
 
     uint64_t allocSize = (uint64_t)numSyncSamples * sizeof(uint32_t);
+    if (allocSize > data_size - 8) {
+        ALOGW("b/124771364 - allocSize(%lu) > size(%lu)",
+                (unsigned long)allocSize, (unsigned long)(data_size - 8));
+        android_errorWriteLog(0x534e4554, "124771364");
+        return ERROR_MALFORMED;
+    }
     if (allocSize > kMaxTotalSize) {
         ALOGE("Sync sample table size too large.");
         return ERROR_OUT_OF_RANGE;
diff --git a/media/libaaudio/src/binding/IAAudioService.cpp b/media/libaaudio/src/binding/IAAudioService.cpp
index 97ad2b0..e017b3a 100644
--- a/media/libaaudio/src/binding/IAAudioService.cpp
+++ b/media/libaaudio/src/binding/IAAudioService.cpp
@@ -237,12 +237,12 @@
 
 status_t BnAAudioService::onTransact(uint32_t code, const Parcel& data,
                                         Parcel* reply, uint32_t flags) {
-    aaudio_handle_t streamHandle;
+    aaudio_handle_t streamHandle = 0;
     aaudio::AAudioStreamRequest request;
     aaudio::AAudioStreamConfiguration configuration;
-    pid_t tid;
-    int64_t nanoseconds;
-    aaudio_result_t result;
+    pid_t tid = 0;
+    int64_t nanoseconds = 0;
+    aaudio_result_t result = AAUDIO_OK;
     status_t status = NO_ERROR;
     ALOGV("BnAAudioService::onTransact(%i) %i", code, flags);
 
@@ -285,7 +285,11 @@
 
         case CLOSE_STREAM: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(CLOSE_STREAM) streamHandle failed!", __func__);
+                return status;
+            }
             result = closeStream(streamHandle);
             //ALOGD("BnAAudioService::onTransact CLOSE_STREAM 0x%08X, result = %d",
             //      streamHandle, result);
@@ -297,6 +301,7 @@
             CHECK_INTERFACE(IAAudioService, data, reply);
             status = data.readInt32(&streamHandle);
             if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(GET_STREAM_DESCRIPTION) streamHandle failed!", __func__);
                 return status;
             }
             aaudio::AudioEndpointParcelable parcelable;
@@ -313,7 +318,11 @@
 
         case START_STREAM: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(START_STREAM) streamHandle failed!", __func__);
+                return status;
+            }
             result = startStream(streamHandle);
             ALOGV("BnAAudioService::onTransact START_STREAM 0x%08X, result = %d",
                     streamHandle, result);
@@ -323,7 +332,11 @@
 
         case PAUSE_STREAM: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(PAUSE_STREAM) streamHandle failed!", __func__);
+                return status;
+            }
             result = pauseStream(streamHandle);
             ALOGV("BnAAudioService::onTransact PAUSE_STREAM 0x%08X, result = %d",
                   streamHandle, result);
@@ -333,7 +346,11 @@
 
         case STOP_STREAM: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(STOP_STREAM) streamHandle failed!", __func__);
+                return status;
+            }
             result = stopStream(streamHandle);
             ALOGV("BnAAudioService::onTransact STOP_STREAM 0x%08X, result = %d",
                   streamHandle, result);
@@ -343,7 +360,11 @@
 
         case FLUSH_STREAM: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(FLUSH_STREAM) streamHandle failed!", __func__);
+                return status;
+            }
             result = flushStream(streamHandle);
             ALOGV("BnAAudioService::onTransact FLUSH_STREAM 0x%08X, result = %d",
                     streamHandle, result);
@@ -353,20 +374,40 @@
 
         case REGISTER_AUDIO_THREAD: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
-            data.readInt32(&tid);
-            data.readInt64(&nanoseconds);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
+                return status;
+            }
+            status = data.readInt32(&tid);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) tid failed!", __func__);
+                return status;
+            }
+            status = data.readInt64(&nanoseconds);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(REGISTER_AUDIO_THREAD) nanoseconds failed!", __func__);
+                return status;
+            }
             result = registerAudioThread(streamHandle, tid, nanoseconds);
-            ALOGV("BnAAudioService::onTransact REGISTER_AUDIO_THREAD 0x%08X, result = %d",
-                    streamHandle, result);
+            ALOGV("BnAAudioService::%s(REGISTER_AUDIO_THREAD) 0x%08X, result = %d",
+                    __func__, streamHandle, result);
             reply->writeInt32(result);
             return NO_ERROR;
         } break;
 
         case UNREGISTER_AUDIO_THREAD: {
             CHECK_INTERFACE(IAAudioService, data, reply);
-            data.readInt32(&streamHandle);
-            data.readInt32(&tid);
+            status = data.readInt32(&streamHandle);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) streamHandle failed!", __func__);
+                return status;
+            }
+            status = data.readInt32(&tid);
+            if (status != NO_ERROR) {
+                ALOGE("BnAAudioService::%s(UNREGISTER_AUDIO_THREAD) tid failed!", __func__);
+                return status;
+            }
             result = unregisterAudioThread(streamHandle, tid);
             ALOGV("BnAAudioService::onTransact UNREGISTER_AUDIO_THREAD 0x%08X, result = %d",
                     streamHandle, result);
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index a6cc45b..366cc87 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -89,7 +89,11 @@
     if (mAudioEndpoint.isFreeRunning()) {
         //ALOGD("AudioStreamInternalCapture::processDataNow() - update remote counter");
         // Update data queue based on the timing model.
-        int64_t estimatedRemoteCounter = mClockModel.convertTimeToPosition(currentNanoTime);
+        // Jitter in the DSP can cause late writes to the FIFO.
+        // This might be caused by resampling.
+        // We want to read the FIFO after the latest possible time
+        // that the DSP could have written the data.
+        int64_t estimatedRemoteCounter = mClockModel.convertLatestTimeToPosition(currentNanoTime);
         // TODO refactor, maybe use setRemoteCounter()
         mAudioEndpoint.setDataWriteCounter(estimatedRemoteCounter);
     }
@@ -139,7 +143,7 @@
                 // the writeCounter might have just advanced in the background,
                 // causing us to sleep until a later burst.
                 int64_t nextPosition = mAudioEndpoint.getDataReadCounter() + mFramesPerBurst;
-                wakeTime = mClockModel.convertPositionToTime(nextPosition);
+                wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
             }
                 break;
             default:
diff --git a/media/libaaudio/src/client/IsochronousClockModel.cpp b/media/libaaudio/src/client/IsochronousClockModel.cpp
index 747d0e1..9abdf53 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.cpp
+++ b/media/libaaudio/src/client/IsochronousClockModel.cpp
@@ -19,12 +19,11 @@
 #include <log/log.h>
 
 #include <stdint.h>
+#include <algorithm>
 
 #include "utility/AudioClock.h"
 #include "IsochronousClockModel.h"
 
-#define MIN_LATENESS_NANOS (10 * AAUDIO_NANOS_PER_MICROSECOND)
-
 using namespace aaudio;
 
 IsochronousClockModel::IsochronousClockModel()
@@ -32,7 +31,7 @@
         , mMarkerNanoTime(0)
         , mSampleRate(48000)
         , mFramesPerBurst(64)
-        , mMaxLatenessInNanos(0)
+        , mMaxMeasuredLatenessNanos(0)
         , mState(STATE_STOPPED)
 {
 }
@@ -41,8 +40,7 @@
 }
 
 void IsochronousClockModel::setPositionAndTime(int64_t framePosition, int64_t nanoTime) {
-    ALOGV("setPositionAndTime(%lld, %lld)",
-          (long long) framePosition, (long long) nanoTime);
+    ALOGV("setPositionAndTime, %lld, %lld", (long long) framePosition, (long long) nanoTime);
     mMarkerFramePosition = framePosition;
     mMarkerNanoTime = nanoTime;
 }
@@ -54,7 +52,9 @@
 }
 
 void IsochronousClockModel::stop(int64_t nanoTime) {
-    ALOGV("stop(nanos = %lld)\n", (long long) nanoTime);
+    ALOGD("stop(nanos = %lld) max lateness = %d micros\n",
+        (long long) nanoTime,
+        (int) (mMaxMeasuredLatenessNanos / 1000));
     setPositionAndTime(convertTimeToPosition(nanoTime), nanoTime);
     // TODO should we set position?
     mState = STATE_STOPPED;
@@ -69,9 +69,10 @@
 }
 
 void IsochronousClockModel::processTimestamp(int64_t framePosition, int64_t nanoTime) {
-//    ALOGD("processTimestamp() - framePosition = %lld at nanoTime %llu",
-//         (long long)framePosition,
-//         (long long)nanoTime);
+    mTimestampCount++;
+// Log position and time in CSV format so we can import it easily into spreadsheets.
+    //ALOGD("%s() CSV, %d, %lld, %lld", __func__,
+          //mTimestampCount, (long long)framePosition, (long long)nanoTime);
     int64_t framesDelta = framePosition - mMarkerFramePosition;
     int64_t nanosDelta = nanoTime - mMarkerNanoTime;
     if (nanosDelta < 1000) {
@@ -108,17 +109,56 @@
     case STATE_RUNNING:
         if (nanosDelta < expectedNanosDelta) {
             // Earlier than expected timestamp.
-            // This data is probably more accurate so use it.
-            // or we may be drifting due to a slow HW clock.
-//            ALOGD("processTimestamp() - STATE_RUNNING - %d < %d micros - EARLY",
-//                 (int) (nanosDelta / 1000), (int)(expectedNanosDelta / 1000));
+            // This data is probably more accurate, so use it.
+            // Or we may be drifting due to a fast HW clock.
+            //int microsDelta = (int) (nanosDelta / 1000);
+            //int expectedMicrosDelta = (int) (expectedNanosDelta / 1000);
+            //ALOGD("%s() - STATE_RUNNING - #%d, %4d micros EARLY",
+                //__func__, mTimestampCount, expectedMicrosDelta - microsDelta);
+
             setPositionAndTime(framePosition, nanoTime);
-        } else if (nanosDelta > (expectedNanosDelta + mMaxLatenessInNanos)) {
-            // Later than expected timestamp.
-//            ALOGD("processTimestamp() - STATE_RUNNING - %d > %d + %d micros - LATE",
-//                 (int) (nanosDelta / 1000), (int)(expectedNanosDelta / 1000),
-//                 (int) (mMaxLatenessInNanos / 1000));
-            setPositionAndTime(framePosition - mFramesPerBurst,  nanoTime - mMaxLatenessInNanos);
+        } else if (nanosDelta > (expectedNanosDelta + (2 * mBurstPeriodNanos))) {
+            // In this case we do not update mMaxMeasuredLatenessNanos because it
+            // would force it too high.
+            // mMaxMeasuredLatenessNanos should range from 1 to 2 * mBurstPeriodNanos
+            //int32_t measuredLatenessNanos = (int32_t)(nanosDelta - expectedNanosDelta);
+            //ALOGD("%s() - STATE_RUNNING - #%d, lateness %d - max %d = %4d micros VERY LATE",
+                  //__func__,
+                  //mTimestampCount,
+                  //measuredLatenessNanos / 1000,
+                  //mMaxMeasuredLatenessNanos / 1000,
+                  //(measuredLatenessNanos - mMaxMeasuredLatenessNanos) / 1000
+                  //);
+
+            // This typically happens when we are modelling a service instead of a DSP.
+            setPositionAndTime(framePosition,  nanoTime - (2 * mBurstPeriodNanos));
+        } else if (nanosDelta > (expectedNanosDelta + mMaxMeasuredLatenessNanos)) {
+            //int32_t previousLatenessNanos = mMaxMeasuredLatenessNanos;
+            mMaxMeasuredLatenessNanos = (int32_t)(nanosDelta - expectedNanosDelta);
+
+            //ALOGD("%s() - STATE_RUNNING - #%d, newmax %d - oldmax %d = %4d micros LATE",
+                  //__func__,
+                  //mTimestampCount,
+                  //mMaxMeasuredLatenessNanos / 1000,
+                  //previousLatenessNanos / 1000,
+                  //(mMaxMeasuredLatenessNanos - previousLatenessNanos) / 1000
+                  //);
+
+            // When we are late, it may be because of preemption in the kernel,
+            // or timing jitter caused by resampling in the DSP,
+            // or we may be drifting due to a slow HW clock.
+            // We add slight drift value just in case there is actual long term drift
+            // forward caused by a slower clock.
+            // If the clock is faster than the model will get pushed earlier
+            // by the code in the preceding branch.
+            // The two opposing forces should allow the model to track the real clock
+            // over a long time.
+            int64_t driftingTime = mMarkerNanoTime + expectedNanosDelta + kDriftNanos;
+            setPositionAndTime(framePosition,  driftingTime);
+            //ALOGD("%s() - #%d, max lateness = %d micros",
+                  //__func__,
+                  //mTimestampCount,
+                  //(int) (mMaxMeasuredLatenessNanos / 1000));
         }
         break;
     default:
@@ -138,9 +178,12 @@
     update();
 }
 
+// Update expected lateness based on sampleRate and framesPerBurst
 void IsochronousClockModel::update() {
-    int64_t nanosLate = convertDeltaPositionToTime(mFramesPerBurst); // uses mSampleRate
-    mMaxLatenessInNanos = (nanosLate > MIN_LATENESS_NANOS) ? nanosLate : MIN_LATENESS_NANOS;
+    mBurstPeriodNanos = convertDeltaPositionToTime(mFramesPerBurst); // uses mSampleRate
+    // Timestamps may be late by up to a burst because we are randomly sampling the time period
+    // after the DSP position is actually updated.
+    mMaxMeasuredLatenessNanos = mBurstPeriodNanos;
 }
 
 int64_t IsochronousClockModel::convertDeltaPositionToTime(int64_t framesDelta) const {
@@ -183,11 +226,25 @@
     return position;
 }
 
+int32_t IsochronousClockModel::getLateTimeOffsetNanos() const {
+    // This will never be < 0 because mMaxLatenessNanos starts at
+    // mBurstPeriodNanos and only gets bigger.
+    return (mMaxMeasuredLatenessNanos - mBurstPeriodNanos) + kExtraLatenessNanos;
+}
+
+int64_t IsochronousClockModel::convertPositionToLatestTime(int64_t framePosition) const {
+    return convertPositionToTime(framePosition) + getLateTimeOffsetNanos();
+}
+
+int64_t IsochronousClockModel::convertLatestTimeToPosition(int64_t nanoTime) const {
+    return convertTimeToPosition(nanoTime - getLateTimeOffsetNanos());
+}
+
 void IsochronousClockModel::dump() const {
     ALOGD("mMarkerFramePosition = %lld", (long long) mMarkerFramePosition);
     ALOGD("mMarkerNanoTime      = %lld", (long long) mMarkerNanoTime);
     ALOGD("mSampleRate          = %6d", mSampleRate);
     ALOGD("mFramesPerBurst      = %6d", mFramesPerBurst);
-    ALOGD("mMaxLatenessInNanos  = %6d", mMaxLatenessInNanos);
+    ALOGD("mMaxMeasuredLatenessNanos = %6d", mMaxMeasuredLatenessNanos);
     ALOGD("mState               = %6d", mState);
 }
diff --git a/media/libaaudio/src/client/IsochronousClockModel.h b/media/libaaudio/src/client/IsochronousClockModel.h
index 46ca48e..582bf4e 100644
--- a/media/libaaudio/src/client/IsochronousClockModel.h
+++ b/media/libaaudio/src/client/IsochronousClockModel.h
@@ -18,6 +18,7 @@
 #define ANDROID_AAUDIO_ISOCHRONOUS_CLOCK_MODEL_H
 
 #include <stdint.h>
+#include "utility/AudioClock.h"
 
 namespace aaudio {
 
@@ -79,6 +80,15 @@
     int64_t convertPositionToTime(int64_t framePosition) const;
 
     /**
+     * Calculate the latest estimated time that the stream will be at that position.
+     * The more jittery the clock is then the later this will be.
+     *
+     * @param framePosition
+     * @return time in nanoseconds
+     */
+    int64_t convertPositionToLatestTime(int64_t framePosition) const;
+
+    /**
      * Calculate an estimated position where the stream will be at the specified time.
      *
      * @param nanoTime time of interest
@@ -87,6 +97,18 @@
     int64_t convertTimeToPosition(int64_t nanoTime) const;
 
     /**
+     * Calculate the corresponding estimated position based on the specified time being
+     * the latest possible time.
+     *
+     * For the same nanoTime, this may return an earlier position than
+     * convertTimeToPosition().
+     *
+     * @param nanoTime
+     * @return position in frames
+     */
+    int64_t convertLatestTimeToPosition(int64_t nanoTime) const;
+
+    /**
      * @param framesDelta difference in frames
      * @return duration in nanoseconds
      */
@@ -101,6 +123,9 @@
     void dump() const;
 
 private:
+
+    int32_t getLateTimeOffsetNanos() const;
+
     enum clock_model_state_t {
         STATE_STOPPED,
         STATE_STARTING,
@@ -108,13 +133,23 @@
         STATE_RUNNING
     };
 
+    // Amount of time to drift forward when we get a late timestamp.
+    // This value was calculated to allow tracking of a clock with 50 ppm error.
+    static constexpr int32_t   kDriftNanos         =  10 * 1000;
+    // TODO review value of kExtraLatenessNanos
+    static constexpr int32_t   kExtraLatenessNanos = 100 * 1000;
+
     int64_t             mMarkerFramePosition;
     int64_t             mMarkerNanoTime;
     int32_t             mSampleRate;
     int32_t             mFramesPerBurst;
-    int32_t             mMaxLatenessInNanos;
+    int32_t             mBurstPeriodNanos;
+    // Includes mBurstPeriodNanos because we sample randomly over time.
+    int32_t             mMaxMeasuredLatenessNanos;
     clock_model_state_t mState;
 
+    int32_t             mTimestampCount = 0;
+
     void update();
 };
 
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 9b77223..f5c97d8 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -473,7 +473,7 @@
 
 void AudioStream::MyPlayerBase::registerWithAudioManager() {
     if (!mRegistered) {
-        init(android::PLAYER_TYPE_AAUDIO, AUDIO_USAGE_MEDIA);
+        init(android::PLAYER_TYPE_AAUDIO, AAudioConvert_usageToInternal(mParent->getUsage()));
         mRegistered = true;
     }
 }
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index a1b04ca..271e186 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -884,7 +884,6 @@
 {
     // previous and new IAudioRecord sequence numbers are used to detect track re-creation
     uint32_t oldSequence = 0;
-    uint32_t newSequence;
 
     Proxy::Buffer buffer;
     status_t status = NO_ERROR;
@@ -902,7 +901,7 @@
             // start of lock scope
             AutoMutex lock(mLock);
 
-            newSequence = mSequence;
+            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
@@ -939,6 +938,7 @@
     audioBuffer->frameCount = buffer.mFrameCount;
     audioBuffer->size = buffer.mFrameCount * mFrameSize;
     audioBuffer->raw = buffer.mRaw;
+    audioBuffer->sequence = oldSequence;
     if (nonContig != NULL) {
         *nonContig = buffer.mNonContig;
     }
@@ -959,6 +959,12 @@
     buffer.mRaw = audioBuffer->raw;
 
     AutoMutex lock(mLock);
+    if (audioBuffer->sequence != mSequence) {
+        // This Buffer came from a different IAudioRecord instance, so ignore the releaseBuffer
+        ALOGD("%s is no-op due to IAudioRecord sequence mismatch %u != %u",
+                __func__, audioBuffer->sequence, mSequence);
+        return;
+    }
     mInOverrun = false;
     mProxy->releaseBuffer(&buffer);
 
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 4a80cd3..9a66d48 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1665,7 +1665,6 @@
 {
     // previous and new IAudioTrack sequence numbers are used to detect track re-creation
     uint32_t oldSequence = 0;
-    uint32_t newSequence;
 
     Proxy::Buffer buffer;
     status_t status = NO_ERROR;
@@ -1682,7 +1681,7 @@
         {   // start of lock scope
             AutoMutex lock(mLock);
 
-            newSequence = mSequence;
+            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
@@ -1729,6 +1728,7 @@
     audioBuffer->frameCount = buffer.mFrameCount;
     audioBuffer->size = buffer.mFrameCount * mFrameSize;
     audioBuffer->raw = buffer.mRaw;
+    audioBuffer->sequence = oldSequence;
     if (nonContig != NULL) {
         *nonContig = buffer.mNonContig;
     }
@@ -1752,6 +1752,12 @@
     buffer.mRaw = audioBuffer->raw;
 
     AutoMutex lock(mLock);
+    if (audioBuffer->sequence != mSequence) {
+        // This Buffer came from a different IAudioTrack instance, so ignore the releaseBuffer
+        ALOGD("%s is no-op due to IAudioTrack sequence mismatch %u != %u",
+                __func__, audioBuffer->sequence, mSequence);
+        return;
+    }
     mReleased += stepCount;
     mInUnderrun = false;
     mProxy->releaseBuffer(&buffer);
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index dd95e34..efa0512 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -1339,10 +1339,14 @@
         }
         case GET_EFFECT_DESCRIPTOR: {
             CHECK_INTERFACE(IAudioFlinger, data, reply);
-            effect_uuid_t uuid;
-            data.read(&uuid, sizeof(effect_uuid_t));
-            effect_uuid_t type;
-            data.read(&type, sizeof(effect_uuid_t));
+            effect_uuid_t uuid = {};
+            if (data.read(&uuid, sizeof(effect_uuid_t)) != NO_ERROR) {
+                android_errorWriteLog(0x534e4554, "139417189");
+            }
+            effect_uuid_t type = {};
+            if (data.read(&type, sizeof(effect_uuid_t)) != NO_ERROR) {
+                android_errorWriteLog(0x534e4554, "139417189");
+            }
             uint32_t preferredTypeFlag = data.readUint32();
             effect_descriptor_t desc = {};
             status_t status = getEffectDescriptor(&uuid, &type, preferredTypeFlag, &desc);
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 64f0aca..2fb9491 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -1346,7 +1346,8 @@
         case GET_OFFLOAD_FORMATS_A2DP:
         case LIST_AUDIO_VOLUME_GROUPS:
         case GET_VOLUME_GROUP_FOR_ATTRIBUTES:
-        case SET_RTT_ENABLED: {
+        case SET_RTT_ENABLED:
+        case SET_ALLOWED_CAPTURE_POLICY: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index a3c0fe4..574302b 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -92,6 +92,11 @@
             int8_t*     i8;         // unsigned 8-bit, offset by 0x80
                                     // input to obtainBuffer(): unused, output: pointer to buffer
         };
+
+        uint32_t    sequence;       // IAudioRecord instance sequence number, as of obtainBuffer().
+                                    // It is set by obtainBuffer() and confirmed by releaseBuffer().
+                                    // Not "user-serviceable".
+                                    // TODO Consider sp<IMemory> instead, or in addition to this.
     };
 
     /* As a convenience, if a callback is supplied, a handler thread
@@ -420,14 +425,17 @@
      *  frameCount  number of frames requested
      *  size        ignored
      *  raw         ignored
+     *  sequence    ignored
      * After error return:
      *  frameCount  0
      *  size        0
      *  raw         undefined
+     *  sequence    undefined
      * After successful return:
      *  frameCount  actual number of frames available, <= number requested
      *  size        actual number of bytes available
      *  raw         pointer to the buffer
+     *  sequence    IAudioRecord instance sequence number, as of obtainBuffer()
      */
 
             status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount,
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index df5eabc..c607918 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -107,6 +107,11 @@
             int16_t*    i16;      // signed 16-bit
             int8_t*     i8;       // unsigned 8-bit, offset by 0x80
         };                        // input to obtainBuffer(): unused, output: pointer to buffer
+
+        uint32_t    sequence;       // IAudioTrack instance sequence number, as of obtainBuffer().
+                                    // It is set by obtainBuffer() and confirmed by releaseBuffer().
+                                    // Not "user-serviceable".
+                                    // TODO Consider sp<IMemory> instead, or in addition to this.
     };
 
     /* As a convenience, if a callback is supplied, a handler thread
@@ -692,14 +697,17 @@
      *  frameCount  number of [empty slots for] frames requested
      *  size        ignored
      *  raw         ignored
+     *  sequence    ignored
      * After error return:
      *  frameCount  0
      *  size        0
      *  raw         undefined
+     *  sequence    undefined
      * After successful return:
      *  frameCount  actual number of [empty slots for] frames available, <= number requested
      *  size        actual number of bytes available
      *  raw         pointer to the buffer
+     *  sequence    IAudioTrack instance sequence number, as of obtainBuffer()
      */
             status_t    obtainBuffer(Buffer* audioBuffer, int32_t waitCount,
                                 size_t *nonContig = NULL);
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 3fbbc09..10dda19 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -302,6 +302,8 @@
         for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
             pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i];
         }
+        pContext->pBundledContext->effectProcessCalled      = 0;
+        pContext->pBundledContext->effectInDrain            = 0;
 
         ALOGV("\tEffectCreate - Calling LvmBundle_init");
         ret = LvmBundle_init(pContext);
@@ -394,6 +396,8 @@
 
     // Clear the instantiated flag for the effect
     // protect agains the case where an effect is un-instantiated without being disabled
+
+    int &effectInDrain = pContext->pBundledContext->effectInDrain;
     if(pContext->EffectType == LVM_BASS_BOOST) {
         ALOGV("\tEffectRelease LVM_BASS_BOOST Clearing global intstantiated flag");
         pSessionContext->bBassInstantiated = LVM_FALSE;
@@ -418,12 +422,16 @@
     } else if(pContext->EffectType == LVM_VOLUME) {
         ALOGV("\tEffectRelease LVM_VOLUME Clearing global intstantiated flag");
         pSessionContext->bVolumeInstantiated = LVM_FALSE;
-        if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE){
+        // There is no samplesToExitCount for volume so we also use the drain flag to check
+        // if we should decrement the effects enabled.
+        if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE
+                || (effectInDrain & 1 << LVM_VOLUME) != 0) {
             pContext->pBundledContext->NumberEffectsEnabled--;
         }
     } else {
         ALOGV("\tLVM_ERROR : EffectRelease : Unsupported effect\n\n\n\n\n\n\n");
     }
+    effectInDrain &= ~(1 << pContext->EffectType); // no need to drain if released
 
     // Disable effect, in this case ignore errors (return codes)
     // if an effect has already been disabled
@@ -3124,8 +3132,9 @@
 
 int Effect_setEnabled(EffectContext *pContext, bool enabled)
 {
-    ALOGV("\tEffect_setEnabled() type %d, enabled %d", pContext->EffectType, enabled);
-
+    ALOGV("%s effectType %d, enabled %d, currently enabled %d", __func__,
+            pContext->EffectType, enabled, pContext->pBundledContext->NumberEffectsEnabled);
+    int &effectInDrain = pContext->pBundledContext->effectInDrain;
     if (enabled) {
         // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due
         // to their nature.
@@ -3139,6 +3148,7 @@
                 if(pContext->pBundledContext->SamplesToExitCountBb <= 0){
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
+                effectInDrain &= ~(1 << LVM_BASS_BOOST);
                 pContext->pBundledContext->SamplesToExitCountBb =
                      (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
                 pContext->pBundledContext->bBassEnabled = LVM_TRUE;
@@ -3152,6 +3162,7 @@
                 if(pContext->pBundledContext->SamplesToExitCountEq <= 0){
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
+                effectInDrain &= ~(1 << LVM_EQUALIZER);
                 pContext->pBundledContext->SamplesToExitCountEq =
                      (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
                 pContext->pBundledContext->bEqualizerEnabled = LVM_TRUE;
@@ -3164,6 +3175,7 @@
                 if(pContext->pBundledContext->SamplesToExitCountVirt <= 0){
                     pContext->pBundledContext->NumberEffectsEnabled++;
                 }
+                effectInDrain &= ~(1 << LVM_VIRTUALIZER);
                 pContext->pBundledContext->SamplesToExitCountVirt =
                      (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
                 pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE;
@@ -3174,7 +3186,10 @@
                     ALOGV("\tEffect_setEnabled() LVM_VOLUME is already enabled");
                     return -EINVAL;
                 }
-                pContext->pBundledContext->NumberEffectsEnabled++;
+                if ((effectInDrain & 1 << LVM_VOLUME) == 0) {
+                    pContext->pBundledContext->NumberEffectsEnabled++;
+                }
+                effectInDrain &= ~(1 << LVM_VOLUME);
                 pContext->pBundledContext->bVolumeEnabled = LVM_TRUE;
                 break;
             default:
@@ -3192,6 +3207,7 @@
                     return -EINVAL;
                 }
                 pContext->pBundledContext->bBassEnabled = LVM_FALSE;
+                effectInDrain |= 1 << LVM_BASS_BOOST;
                 break;
             case LVM_EQUALIZER:
                 if (pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE) {
@@ -3199,6 +3215,7 @@
                     return -EINVAL;
                 }
                 pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE;
+                effectInDrain |= 1 << LVM_EQUALIZER;
                 break;
             case LVM_VIRTUALIZER:
                 if (pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE) {
@@ -3206,6 +3223,7 @@
                     return -EINVAL;
                 }
                 pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
+                effectInDrain |= 1 << LVM_VIRTUALIZER;
                 break;
             case LVM_VOLUME:
                 if (pContext->pBundledContext->bVolumeEnabled == LVM_FALSE) {
@@ -3213,6 +3231,7 @@
                     return -EINVAL;
                 }
                 pContext->pBundledContext->bVolumeEnabled = LVM_FALSE;
+                effectInDrain |= 1 << LVM_VOLUME;
                 break;
             default:
                 ALOGV("\tEffect_setEnabled() invalid effect type");
@@ -3283,6 +3302,38 @@
         ALOGV("\tLVM_ERROR : Effect_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG");
         return -EINVAL;
     }
+
+    int &effectProcessCalled = pContext->pBundledContext->effectProcessCalled;
+    int &effectInDrain = pContext->pBundledContext->effectInDrain;
+    if ((effectProcessCalled & 1 << pContext->EffectType) != 0) {
+        ALOGW("Effect %d already called", pContext->EffectType);
+        const int undrainedEffects = effectInDrain & ~effectProcessCalled;
+        if ((undrainedEffects & 1 << LVM_BASS_BOOST) != 0) {
+            ALOGW("Draining BASS_BOOST");
+            pContext->pBundledContext->SamplesToExitCountBb = 0;
+            --pContext->pBundledContext->NumberEffectsEnabled;
+            effectInDrain &= ~(1 << LVM_BASS_BOOST);
+        }
+        if ((undrainedEffects & 1 << LVM_EQUALIZER) != 0) {
+            ALOGW("Draining EQUALIZER");
+            pContext->pBundledContext->SamplesToExitCountEq = 0;
+            --pContext->pBundledContext->NumberEffectsEnabled;
+            effectInDrain &= ~(1 << LVM_EQUALIZER);
+        }
+        if ((undrainedEffects & 1 << LVM_VIRTUALIZER) != 0) {
+            ALOGW("Draining VIRTUALIZER");
+            pContext->pBundledContext->SamplesToExitCountVirt = 0;
+            --pContext->pBundledContext->NumberEffectsEnabled;
+            effectInDrain &= ~(1 << LVM_VIRTUALIZER);
+        }
+        if ((undrainedEffects & 1 << LVM_VOLUME) != 0) {
+            ALOGW("Draining VOLUME");
+            --pContext->pBundledContext->NumberEffectsEnabled;
+            effectInDrain &= ~(1 << LVM_VOLUME);
+        }
+    }
+    effectProcessCalled |= 1 << pContext->EffectType;
+
     if ((pContext->pBundledContext->bBassEnabled == LVM_FALSE)&&
         (pContext->EffectType == LVM_BASS_BOOST)){
         //ALOGV("\tEffect_process() LVM_BASS_BOOST Effect is not enabled");
@@ -3291,9 +3342,12 @@
             //ALOGV("\tEffect_process: Waiting to turn off BASS_BOOST, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountBb);
         }
-        if(pContext->pBundledContext->SamplesToExitCountBb <= 0) {
+        if (pContext->pBundledContext->SamplesToExitCountBb <= 0) {
             status = -ENODATA;
-            pContext->pBundledContext->NumberEffectsEnabled--;
+            if ((effectInDrain & 1 << LVM_BASS_BOOST) != 0) {
+                pContext->pBundledContext->NumberEffectsEnabled--;
+                effectInDrain &= ~(1 << LVM_BASS_BOOST);
+            }
             ALOGV("\tEffect_process() this is the last frame for LVM_BASS_BOOST");
         }
     }
@@ -3301,7 +3355,10 @@
         (pContext->EffectType == LVM_VOLUME)){
         //ALOGV("\tEffect_process() LVM_VOLUME Effect is not enabled");
         status = -ENODATA;
-        pContext->pBundledContext->NumberEffectsEnabled--;
+        if ((effectInDrain & 1 << LVM_VOLUME) != 0) {
+            pContext->pBundledContext->NumberEffectsEnabled--;
+            effectInDrain &= ~(1 << LVM_VOLUME);
+        }
     }
     if ((pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE)&&
         (pContext->EffectType == LVM_EQUALIZER)){
@@ -3311,9 +3368,12 @@
             //ALOGV("\tEffect_process: Waiting to turn off EQUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountEq);
         }
-        if(pContext->pBundledContext->SamplesToExitCountEq <= 0) {
+        if (pContext->pBundledContext->SamplesToExitCountEq <= 0) {
             status = -ENODATA;
-            pContext->pBundledContext->NumberEffectsEnabled--;
+            if ((effectInDrain & 1 << LVM_EQUALIZER) != 0) {
+                pContext->pBundledContext->NumberEffectsEnabled--;
+                effectInDrain &= ~(1 << LVM_EQUALIZER);
+            }
             ALOGV("\tEffect_process() this is the last frame for LVM_EQUALIZER");
         }
     }
@@ -3326,9 +3386,12 @@
             //ALOGV("\tEffect_process: Waiting for to turn off VIRTUALIZER, %d samples left",
             //    pContext->pBundledContext->SamplesToExitCountVirt);
         }
-        if(pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
+        if (pContext->pBundledContext->SamplesToExitCountVirt <= 0) {
             status = -ENODATA;
-            pContext->pBundledContext->NumberEffectsEnabled--;
+            if ((effectInDrain & 1 << LVM_VIRTUALIZER) != 0) {
+                pContext->pBundledContext->NumberEffectsEnabled--;
+                effectInDrain &= ~(1 << LVM_VIRTUALIZER);
+            }
             ALOGV("\tEffect_process() this is the last frame for LVM_VIRTUALIZER");
         }
     }
@@ -3337,8 +3400,18 @@
         pContext->pBundledContext->NumberEffectsCalled++;
     }
 
-    if(pContext->pBundledContext->NumberEffectsCalled ==
-       pContext->pBundledContext->NumberEffectsEnabled){
+    if (pContext->pBundledContext->NumberEffectsCalled >=
+            pContext->pBundledContext->NumberEffectsEnabled) {
+
+        // We expect the # effects called to be equal to # effects enabled in sequence (including
+        // draining effects).  Warn if this is not the case due to inconsistent calls.
+        ALOGW_IF(pContext->pBundledContext->NumberEffectsCalled >
+                pContext->pBundledContext->NumberEffectsEnabled,
+                "%s Number of effects called %d is greater than number of effects enabled %d",
+                __func__, pContext->pBundledContext->NumberEffectsCalled,
+                pContext->pBundledContext->NumberEffectsEnabled);
+        effectProcessCalled = 0; // reset our consistency check.
+
         //ALOGV("\tEffect_process     Calling process with %d effects enabled, %d called: Effect %d",
         //pContext->pBundledContext->NumberEffectsEnabled,
         //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 6af4554..e4aacd0 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -110,6 +110,14 @@
 #ifdef SUPPORT_MC
     LVM_INT32                       ChMask;
 #endif
+
+    /* Bitmask whether drain is in progress due to disabling the effect.
+       The corresponding bit to an effect is set by 1 << lvm_effect_en. */
+    int                             effectInDrain;
+
+    /* Bitmask whether process() was called for a particular effect.
+       The corresponding bit to an effect is set by 1 << lvm_effect_en. */
+    int                             effectProcessCalled;
 };
 
 /* SessionContext : One session */
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 1d33590..b7dc033 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -225,6 +225,7 @@
 
     srcs: [
         "IDataSource.cpp",
+        "IHDCP.cpp",
         "BufferingSettings.cpp",
         "mediaplayer.cpp",
         "IMediaHTTPConnection.cpp",
@@ -290,6 +291,7 @@
         "libcamera_client",
         "libstagefright_foundation",
         "libgui",
+        "libui",
         "libdl",
         "libaudioutils",
         "libaudioclient",
diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp
new file mode 100644
index 0000000..a46017f
--- /dev/null
+++ b/media/libmedia/IHDCP.cpp
@@ -0,0 +1,359 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IHDCP"
+#include <utils/Log.h>
+
+#include <binder/Parcel.h>
+#include <media/IHDCP.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+enum {
+    OBSERVER_NOTIFY = IBinder::FIRST_CALL_TRANSACTION,
+    HDCP_SET_OBSERVER,
+    HDCP_INIT_ASYNC,
+    HDCP_SHUTDOWN_ASYNC,
+    HDCP_GET_CAPS,
+    HDCP_ENCRYPT,
+    HDCP_ENCRYPT_NATIVE,
+    HDCP_DECRYPT,
+};
+
+struct BpHDCPObserver : public BpInterface<IHDCPObserver> {
+    explicit BpHDCPObserver(const sp<IBinder> &impl)
+        : BpInterface<IHDCPObserver>(impl) {
+    }
+
+    virtual void notify(
+            int msg, int ext1, int ext2, const Parcel *obj) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCPObserver::getInterfaceDescriptor());
+        data.writeInt32(msg);
+        data.writeInt32(ext1);
+        data.writeInt32(ext2);
+        if (obj && obj->dataSize() > 0) {
+            data.appendFrom(const_cast<Parcel *>(obj), 0, obj->dataSize());
+        }
+        remote()->transact(OBSERVER_NOTIFY, data, &reply, IBinder::FLAG_ONEWAY);
+    }
+};
+
+IMPLEMENT_META_INTERFACE(HDCPObserver, "android.hardware.IHDCPObserver");
+
+struct BpHDCP : public BpInterface<IHDCP> {
+    explicit BpHDCP(const sp<IBinder> &impl)
+        : BpInterface<IHDCP>(impl) {
+    }
+
+    virtual status_t setObserver(const sp<IHDCPObserver> &observer) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        data.writeStrongBinder(IInterface::asBinder(observer));
+        remote()->transact(HDCP_SET_OBSERVER, data, &reply);
+        return reply.readInt32();
+    }
+
+    virtual status_t initAsync(const char *host, unsigned port) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        data.writeCString(host);
+        data.writeInt32(port);
+        remote()->transact(HDCP_INIT_ASYNC, data, &reply);
+        return reply.readInt32();
+    }
+
+    virtual status_t shutdownAsync() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        remote()->transact(HDCP_SHUTDOWN_ASYNC, data, &reply);
+        return reply.readInt32();
+    }
+
+    virtual uint32_t getCaps() {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        remote()->transact(HDCP_GET_CAPS, data, &reply);
+        return reply.readInt32();
+    }
+
+    virtual status_t encrypt(
+            const void *inData, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        data.writeInt32(size);
+        data.write(inData, size);
+        data.writeInt32(streamCTR);
+        remote()->transact(HDCP_ENCRYPT, data, &reply);
+
+        status_t err = reply.readInt32();
+
+        if (err != OK) {
+            *outInputCTR = 0;
+
+            return err;
+        }
+
+        *outInputCTR = reply.readInt64();
+        reply.read(outData, size);
+
+        return err;
+    }
+
+    virtual status_t encryptNative(
+            const sp<GraphicBuffer> &graphicBuffer,
+            size_t offset, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        data.write(*graphicBuffer);
+        data.writeInt32(offset);
+        data.writeInt32(size);
+        data.writeInt32(streamCTR);
+        remote()->transact(HDCP_ENCRYPT_NATIVE, data, &reply);
+
+        status_t err = reply.readInt32();
+
+        if (err != OK) {
+            *outInputCTR = 0;
+            return err;
+        }
+
+        *outInputCTR = reply.readInt64();
+        reply.read(outData, size);
+
+        return err;
+    }
+
+    virtual status_t decrypt(
+            const void *inData, size_t size,
+            uint32_t streamCTR, uint64_t inputCTR,
+            void *outData) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        data.writeInt32(size);
+        data.write(inData, size);
+        data.writeInt32(streamCTR);
+        data.writeInt64(inputCTR);
+        remote()->transact(HDCP_DECRYPT, data, &reply);
+
+        status_t err = reply.readInt32();
+
+        if (err != OK) {
+            return err;
+        }
+
+        reply.read(outData, size);
+
+        return err;
+    }
+};
+
+IMPLEMENT_META_INTERFACE(HDCP, "android.hardware.IHDCP");
+
+status_t BnHDCPObserver::onTransact(
+        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+    switch (code) {
+        case OBSERVER_NOTIFY:
+        {
+            CHECK_INTERFACE(IHDCPObserver, data, reply);
+
+            int msg = data.readInt32();
+            int ext1 = data.readInt32();
+            int ext2 = data.readInt32();
+
+            Parcel obj;
+            if (data.dataAvail() > 0) {
+                obj.appendFrom(
+                        const_cast<Parcel *>(&data),
+                        data.dataPosition(),
+                        data.dataAvail());
+            }
+
+            notify(msg, ext1, ext2, &obj);
+
+            return OK;
+        }
+
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+}
+
+status_t BnHDCP::onTransact(
+        uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {
+    switch (code) {
+        case HDCP_SET_OBSERVER:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            sp<IHDCPObserver> observer =
+                interface_cast<IHDCPObserver>(data.readStrongBinder());
+
+            reply->writeInt32(setObserver(observer));
+            return OK;
+        }
+
+        case HDCP_INIT_ASYNC:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            const char *host = data.readCString();
+            unsigned port = data.readInt32();
+
+            reply->writeInt32(initAsync(host, port));
+            return OK;
+        }
+
+        case HDCP_SHUTDOWN_ASYNC:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            reply->writeInt32(shutdownAsync());
+            return OK;
+        }
+
+        case HDCP_GET_CAPS:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            reply->writeInt32(getCaps());
+            return OK;
+        }
+
+        case HDCP_ENCRYPT:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            size_t size = data.readInt32();
+            void *inData = NULL;
+            // watch out for overflow
+            if (size <= SIZE_MAX / 2) {
+                inData = malloc(2 * size);
+            }
+            if (inData == NULL) {
+                reply->writeInt32(ERROR_OUT_OF_RANGE);
+                return OK;
+            }
+
+            void *outData = (uint8_t *)inData + size;
+
+            status_t err = data.read(inData, size);
+            if (err != OK) {
+                free(inData);
+                reply->writeInt32(err);
+                return OK;
+            }
+
+            uint32_t streamCTR = data.readInt32();
+            uint64_t inputCTR;
+            err = encrypt(inData, size, streamCTR, &inputCTR, outData);
+
+            reply->writeInt32(err);
+
+            if (err == OK) {
+                reply->writeInt64(inputCTR);
+                reply->write(outData, size);
+            }
+
+            free(inData);
+            inData = outData = NULL;
+
+            return OK;
+        }
+
+        case HDCP_ENCRYPT_NATIVE:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+            data.read(*graphicBuffer);
+            size_t offset = data.readInt32();
+            size_t size = data.readInt32();
+            uint32_t streamCTR = data.readInt32();
+            void *outData = NULL;
+            uint64_t inputCTR;
+
+            status_t err = ERROR_OUT_OF_RANGE;
+
+            outData = malloc(size);
+
+            if (outData != NULL) {
+                err = encryptNative(graphicBuffer, offset, size,
+                                             streamCTR, &inputCTR, outData);
+            }
+
+            reply->writeInt32(err);
+
+            if (err == OK) {
+                reply->writeInt64(inputCTR);
+                reply->write(outData, size);
+            }
+
+            free(outData);
+            outData = NULL;
+
+            return OK;
+        }
+
+        case HDCP_DECRYPT:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            size_t size = data.readInt32();
+            size_t bufSize = 2 * size;
+
+            // watch out for overflow
+            void *inData = NULL;
+            if (bufSize > size) {
+                inData = malloc(bufSize);
+            }
+
+            if (inData == NULL) {
+                reply->writeInt32(ERROR_OUT_OF_RANGE);
+                return OK;
+            }
+
+            void *outData = (uint8_t *)inData + size;
+
+            data.read(inData, size);
+
+            uint32_t streamCTR = data.readInt32();
+            uint64_t inputCTR = data.readInt64();
+            status_t err = decrypt(inData, size, streamCTR, inputCTR, outData);
+
+            reply->writeInt32(err);
+
+            if (err == OK) {
+                reply->write(outData, size);
+            }
+
+            free(inData);
+            inData = outData = NULL;
+
+            return OK;
+        }
+
+        default:
+            return BBinder::onTransact(code, data, reply, flags);
+    }
+}
+
+}  // namespace android
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index bd18a40..fd19647 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -20,6 +20,7 @@
 
 #include <binder/Parcel.h>
 #include <binder/IMemory.h>
+#include <media/IHDCP.h>
 #include <media/IMediaCodecList.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IMediaPlayerService.h>
@@ -39,6 +40,7 @@
     CREATE = IBinder::FIRST_CALL_TRANSACTION,
     CREATE_MEDIA_RECORDER,
     CREATE_METADATA_RETRIEVER,
+    MAKE_HDCP,
     ADD_BATTERY_DATA,
     PULL_BATTERY_DATA,
     LISTEN_FOR_REMOTE_DISPLAY,
@@ -81,6 +83,14 @@
         return interface_cast<IMediaRecorder>(reply.readStrongBinder());
     }
 
+    virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+        data.writeInt32(createEncryptionModule);
+        remote()->transact(MAKE_HDCP, data, &reply);
+        return interface_cast<IHDCP>(reply.readStrongBinder());
+    }
+
     virtual void addBatteryData(uint32_t params) {
         Parcel data, reply;
         data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@@ -144,6 +154,13 @@
             reply->writeStrongBinder(IInterface::asBinder(retriever));
             return NO_ERROR;
         } break;
+        case MAKE_HDCP: {
+            CHECK_INTERFACE(IMediaPlayerService, data, reply);
+            bool createEncryptionModule = data.readInt32();
+            sp<IHDCP> hdcp = makeHDCP(createEncryptionModule);
+            reply->writeStrongBinder(IInterface::asBinder(hdcp));
+            return NO_ERROR;
+        } break;
         case ADD_BATTERY_DATA: {
             CHECK_INTERFACE(IMediaPlayerService, data, reply);
             uint32_t params = data.readInt32();
diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp
index 9724fc1..f8a0a14 100644
--- a/media/libmedia/IResourceManagerService.cpp
+++ b/media/libmedia/IResourceManagerService.cpp
@@ -32,6 +32,7 @@
     CONFIG = IBinder::FIRST_CALL_TRANSACTION,
     ADD_RESOURCE,
     REMOVE_RESOURCE,
+    REMOVE_CLIENT,
     RECLAIM_RESOURCE,
 };
 
@@ -72,12 +73,14 @@
 
     virtual void addResource(
             int pid,
+            int uid,
             int64_t clientId,
             const sp<IResourceManagerClient> client,
             const Vector<MediaResource> &resources) {
         Parcel data, reply;
         data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
         data.writeInt32(pid);
+        data.writeInt32(uid);
         data.writeInt64(clientId);
         data.writeStrongBinder(IInterface::asBinder(client));
         writeToParcel(&data, resources);
@@ -85,13 +88,23 @@
         remote()->transact(ADD_RESOURCE, data, &reply);
     }
 
-    virtual void removeResource(int pid, int64_t clientId) {
+    virtual void removeResource(int pid, int64_t clientId, const Vector<MediaResource> &resources) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
+        data.writeInt32(pid);
+        data.writeInt64(clientId);
+        writeToParcel(&data, resources);
+
+        remote()->transact(REMOVE_RESOURCE, data, &reply);
+    }
+
+    virtual void removeClient(int pid, int64_t clientId) {
         Parcel data, reply;
         data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
         data.writeInt32(pid);
         data.writeInt64(clientId);
 
-        remote()->transact(REMOVE_RESOURCE, data, &reply);
+        remote()->transact(REMOVE_CLIENT, data, &reply);
     }
 
     virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources) {
@@ -129,6 +142,7 @@
         case ADD_RESOURCE: {
             CHECK_INTERFACE(IResourceManagerService, data, reply);
             int pid = data.readInt32();
+            int uid = data.readInt32();
             int64_t clientId = data.readInt64();
             sp<IResourceManagerClient> client(
                     interface_cast<IResourceManagerClient>(data.readStrongBinder()));
@@ -137,7 +151,7 @@
             }
             Vector<MediaResource> resources;
             readFromParcel(data, &resources);
-            addResource(pid, clientId, client, resources);
+            addResource(pid, uid, clientId, client, resources);
             return NO_ERROR;
         } break;
 
@@ -145,7 +159,17 @@
             CHECK_INTERFACE(IResourceManagerService, data, reply);
             int pid = data.readInt32();
             int64_t clientId = data.readInt64();
-            removeResource(pid, clientId);
+            Vector<MediaResource> resources;
+            readFromParcel(data, &resources);
+            removeResource(pid, clientId, resources);
+            return NO_ERROR;
+        } break;
+
+        case REMOVE_CLIENT: {
+            CHECK_INTERFACE(IResourceManagerService, data, reply);
+            int pid = data.readInt32();
+            int64_t clientId = data.readInt64();
+            removeClient(pid, clientId);
             return NO_ERROR;
         } break;
 
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index cb8d375..2bf0802 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -77,10 +77,13 @@
     if (t != 0) {
         if (enabled) {
             if (t->exitPending()) {
+                mCaptureLock.unlock();
                 if (t->requestExitAndWait() == WOULD_BLOCK) {
+                    mCaptureLock.lock();
                     ALOGE("Visualizer::enable() called from thread");
                     return INVALID_OPERATION;
                 }
+                mCaptureLock.lock();
             }
         }
         t->mLock.lock();
diff --git a/media/libmedia/include/media/IHDCP.h b/media/libmedia/include/media/IHDCP.h
new file mode 100644
index 0000000..352561e
--- /dev/null
+++ b/media/libmedia/include/media/IHDCP.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/IInterface.h>
+#include <media/hardware/HDCPAPI.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <ui/GraphicBuffer.h>
+
+namespace android {
+
+struct IHDCPObserver : public IInterface {
+    DECLARE_META_INTERFACE(HDCPObserver);
+
+    virtual void notify(
+            int msg, int ext1, int ext2, const Parcel *obj) = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IHDCPObserver);
+};
+
+struct IHDCP : public IInterface {
+    DECLARE_META_INTERFACE(HDCP);
+
+    // Called to specify the observer that receives asynchronous notifications
+    // from the HDCP implementation to signal completion/failure of asynchronous
+    // operations (such as initialization) or out of band events.
+    virtual status_t setObserver(const sp<IHDCPObserver> &observer) = 0;
+
+    // Request to setup an HDCP session with the specified host listening
+    // on the specified port.
+    virtual status_t initAsync(const char *host, unsigned port) = 0;
+
+    // Request to shutdown the active HDCP session.
+    virtual status_t shutdownAsync() = 0;
+
+    // Returns the capability bitmask of this HDCP session.
+    // Possible return values (please refer to HDCAPAPI.h):
+    //   HDCP_CAPS_ENCRYPT: mandatory, meaning the HDCP module can encrypt
+    //   from an input byte-array buffer to an output byte-array buffer
+    //   HDCP_CAPS_ENCRYPT_NATIVE: the HDCP module supports encryption from
+    //   a native buffer to an output byte-array buffer. The format of the
+    //   input native buffer is specific to vendor's encoder implementation.
+    //   It is the same format as that used by the encoder when
+    //   "storeMetaDataInBuffers" extension is enabled on its output port.
+    virtual uint32_t getCaps() = 0;
+
+    // ENCRYPTION only:
+    // Encrypt data according to the HDCP spec. "size" bytes of data are
+    // available at "inData" (virtual address), "size" may not be a multiple
+    // of 128 bits (16 bytes). An equal number of encrypted bytes should be
+    // written to the buffer at "outData" (virtual address).
+    // This operation is to be synchronous, i.e. this call does not return
+    // until outData contains size bytes of encrypted data.
+    // streamCTR will be assigned by the caller (to 0 for the first PES stream,
+    // 1 for the second and so on)
+    // inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
+    virtual status_t encrypt(
+            const void *inData, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData) = 0;
+
+    // Encrypt data according to the HDCP spec. "size" bytes of data starting
+    // at location "offset" are available in "buffer" (buffer handle). "size"
+    // may not be a multiple of 128 bits (16 bytes). An equal number of
+    // encrypted bytes should be written to the buffer at "outData" (virtual
+    // address). This operation is to be synchronous, i.e. this call does not
+    // return until outData contains size bytes of encrypted data.
+    // streamCTR will be assigned by the caller (to 0 for the first PES stream,
+    // 1 for the second and so on)
+    // inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
+    virtual status_t encryptNative(
+            const sp<GraphicBuffer> &graphicBuffer,
+            size_t offset, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData) = 0;
+
+    // DECRYPTION only:
+    // Decrypt data according to the HDCP spec.
+    // "size" bytes of encrypted data are available at "inData"
+    // (virtual address), "size" may not be a multiple of 128 bits (16 bytes).
+    // An equal number of decrypted bytes should be written to the buffer
+    // at "outData" (virtual address).
+    // This operation is to be synchronous, i.e. this call does not return
+    // until outData contains size bytes of decrypted data.
+    // Both streamCTR and inputCTR will be provided by the caller.
+    virtual status_t decrypt(
+            const void *inData, size_t size,
+            uint32_t streamCTR, uint64_t inputCTR,
+            void *outData) = 0;
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(IHDCP);
+};
+
+struct BnHDCPObserver : public BnInterface<IHDCPObserver> {
+    virtual status_t onTransact(
+            uint32_t code, const Parcel &data, Parcel *reply,
+            uint32_t flags = 0);
+};
+
+struct BnHDCP : public BnInterface<IHDCP> {
+    virtual status_t onTransact(
+            uint32_t code, const Parcel &data, Parcel *reply,
+            uint32_t flags = 0);
+};
+
+}  // namespace android
+
+
diff --git a/media/libmedia/include/media/IMediaPlayerService.h b/media/libmedia/include/media/IMediaPlayerService.h
index f2e2060..0aa3944 100644
--- a/media/libmedia/include/media/IMediaPlayerService.h
+++ b/media/libmedia/include/media/IMediaPlayerService.h
@@ -31,6 +31,7 @@
 namespace android {
 
 class IMediaPlayer;
+struct IHDCP;
 class IMediaCodecList;
 struct IMediaHTTPService;
 class IMediaRecorder;
@@ -48,6 +49,7 @@
     virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0;
     virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client,
             audio_session_t audioSessionId = AUDIO_SESSION_ALLOCATE) = 0;
+    virtual sp<IHDCP>           makeHDCP(bool createEncryptionModule) = 0;
     virtual sp<IMediaCodecList> getCodecList() const = 0;
 
     // Connects to a remote display.
diff --git a/media/libmedia/include/media/IResourceManagerService.h b/media/libmedia/include/media/IResourceManagerService.h
index 1e4f6de..8992f8b 100644
--- a/media/libmedia/include/media/IResourceManagerService.h
+++ b/media/libmedia/include/media/IResourceManagerService.h
@@ -39,11 +39,15 @@
 
     virtual void addResource(
             int pid,
+            int uid,
             int64_t clientId,
             const sp<IResourceManagerClient> client,
             const Vector<MediaResource> &resources) = 0;
 
-    virtual void removeResource(int pid, int64_t clientId) = 0;
+    virtual void removeResource(int pid, int64_t clientId,
+            const Vector<MediaResource> &resources) = 0;
+
+    virtual void removeClient(int pid, int64_t clientId) = 0;
 
     virtual bool reclaimResource(
             int callingPid,
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index e1fdb9b..10a07bb 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -31,12 +31,13 @@
         kNonSecureCodec,
         kGraphicMemory,
         kCpuBoost,
+        kBattery,
     };
 
     enum SubType {
         kUnspecifiedSubType = 0,
         kAudioCodec,
-        kVideoCodec
+        kVideoCodec,
     };
 
     MediaResource();
@@ -62,6 +63,8 @@
         case MediaResource::kSecureCodec:    return "secure-codec";
         case MediaResource::kNonSecureCodec: return "non-secure-codec";
         case MediaResource::kGraphicMemory:  return "graphic-memory";
+        case MediaResource::kCpuBoost:       return "cpu-boost";
+        case MediaResource::kBattery:        return "battery";
         default:                             return def;
     }
 }
diff --git a/media/libmedia/xsd/vts/Android.bp b/media/libmedia/xsd/vts/Android.bp
index b590a12..4739504 100644
--- a/media/libmedia/xsd/vts/Android.bp
+++ b/media/libmedia/xsd/vts/Android.bp
@@ -24,6 +24,7 @@
         "libxml2",
     ],
     shared_libs: [
+        "libbase",
         "liblog",
     ],
     cflags: [
diff --git a/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp b/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp
index ff9b060..7729d52 100644
--- a/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp
+++ b/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp
@@ -14,6 +14,10 @@
  * limitations under the License.
  */
 
+#include <string>
+
+#include <android-base/file.h>
+#include <android-base/properties.h>
 #include "utility/ValidateXml.h"
 
 TEST(CheckConfig, mediaProfilesValidation) {
@@ -21,8 +25,12 @@
                    "Verify that the media profiles file "
                    "is valid according to the schema");
 
-    const char* location = "/vendor/etc";
+    std::string mediaSettingsPath = android::base::GetProperty("media.settings.xml", "");
+    if (mediaSettingsPath.empty()) {
+        mediaSettingsPath.assign("/vendor/etc/media_profiles_V1_0.xml");
+    }
 
-    EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS("media_profiles_V1_0.xml", {location},
+    EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS(android::base::Basename(mediaSettingsPath).c_str(),
+                                            {android::base::Dirname(mediaSettingsPath).c_str()},
                                             "/data/local/tmp/media_profiles.xsd");
 }
diff --git a/media/libmediaplayer2/nuplayer2/GenericSource2.cpp b/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
index 9552580..d09a392 100644
--- a/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
+++ b/media/libmediaplayer2/nuplayer2/GenericSource2.cpp
@@ -1089,7 +1089,6 @@
         // If ref>0, there must be an observer, or it'll crash at release().
         // TODO: MediaBuffer might need to be revised to ease such need.
         mb->setObserver(this);
-        // setMediaBufferBase() interestingly doesn't increment the ref count on its own.
         // Extra increment (since we want to keep mb alive and attached to ab beyond this function
         // call. This is to counter the effect of mb->release() towards the end.
         mb->add_ref();
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 6709585..838f980 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -3,10 +3,12 @@
     srcs: [
         "ActivityManager.cpp",
         "DeathNotifier.cpp",
+        "HDCP.cpp",
         "MediaPlayerFactory.cpp",
         "MediaPlayerService.cpp",
         "MediaRecorderClient.cpp",
         "MetadataRetrieverClient.cpp",
+        "RemoteDisplay.cpp",
         "StagefrightRecorder.cpp",
         "TestPlayerStub.cpp",
     ],
@@ -34,6 +36,7 @@
         "libnetd_client",
         "libpowermanager",
         "libstagefright",
+        "libstagefright_wfd",
         "libstagefright_foundation",
         "libstagefright_httplive",
         "libutils",
@@ -54,6 +57,7 @@
     include_dirs: [
         "frameworks/av/media/libstagefright/rtsp",
         "frameworks/av/media/libstagefright/webm",
+        "frameworks/av/media/libstagefright/wifi-display",
     ],
 
     local_include_dirs: ["include"],
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
new file mode 100644
index 0000000..afe3936
--- /dev/null
+++ b/media/libmediaplayerservice/HDCP.cpp
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HDCP"
+#include <utils/Log.h>
+
+#include "HDCP.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <dlfcn.h>
+
+namespace android {
+
+HDCP::HDCP(bool createEncryptionModule)
+    : mIsEncryptionModule(createEncryptionModule),
+      mLibHandle(NULL),
+      mHDCPModule(NULL) {
+    mLibHandle = dlopen("libstagefright_hdcp.so", RTLD_NOW);
+
+    if (mLibHandle == NULL) {
+        ALOGE("Unable to locate libstagefright_hdcp.so");
+        return;
+    }
+
+    typedef HDCPModule *(*CreateHDCPModuleFunc)(
+            void *, HDCPModule::ObserverFunc);
+
+    CreateHDCPModuleFunc createHDCPModule =
+        mIsEncryptionModule
+            ? (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule")
+            : (CreateHDCPModuleFunc)dlsym(
+                    mLibHandle, "createHDCPModuleForDecryption");
+
+    if (createHDCPModule == NULL) {
+        ALOGE("Unable to find symbol 'createHDCPModule'.");
+    } else if ((mHDCPModule = createHDCPModule(
+                    this, &HDCP::ObserveWrapper)) == NULL) {
+        ALOGE("createHDCPModule failed.");
+    }
+}
+
+HDCP::~HDCP() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mHDCPModule != NULL) {
+        delete mHDCPModule;
+        mHDCPModule = NULL;
+    }
+
+    if (mLibHandle != NULL) {
+        dlclose(mLibHandle);
+        mLibHandle = NULL;
+    }
+}
+
+status_t HDCP::setObserver(const sp<IHDCPObserver> &observer) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mHDCPModule == NULL) {
+        return NO_INIT;
+    }
+
+    mObserver = observer;
+
+    return OK;
+}
+
+status_t HDCP::initAsync(const char *host, unsigned port) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mHDCPModule == NULL) {
+        return NO_INIT;
+    }
+
+    return mHDCPModule->initAsync(host, port);
+}
+
+status_t HDCP::shutdownAsync() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mHDCPModule == NULL) {
+        return NO_INIT;
+    }
+
+    return mHDCPModule->shutdownAsync();
+}
+
+uint32_t HDCP::getCaps() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mHDCPModule == NULL) {
+        return NO_INIT;
+    }
+
+    return mHDCPModule->getCaps();
+}
+
+status_t HDCP::encrypt(
+        const void *inData, size_t size, uint32_t streamCTR,
+        uint64_t *outInputCTR, void *outData) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(mIsEncryptionModule);
+
+    if (mHDCPModule == NULL) {
+        *outInputCTR = 0;
+
+        return NO_INIT;
+    }
+
+    return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData);
+}
+
+status_t HDCP::encryptNative(
+        const sp<GraphicBuffer> &graphicBuffer,
+        size_t offset, size_t size, uint32_t streamCTR,
+        uint64_t *outInputCTR, void *outData) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(mIsEncryptionModule);
+
+    if (mHDCPModule == NULL) {
+        *outInputCTR = 0;
+
+        return NO_INIT;
+    }
+
+    return mHDCPModule->encryptNative(graphicBuffer->handle,
+                    offset, size, streamCTR, outInputCTR, outData);
+}
+
+status_t HDCP::decrypt(
+        const void *inData, size_t size,
+        uint32_t streamCTR, uint64_t outInputCTR, void *outData) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(!mIsEncryptionModule);
+
+    if (mHDCPModule == NULL) {
+        return NO_INIT;
+    }
+
+    return mHDCPModule->decrypt(inData, size, streamCTR, outInputCTR, outData);
+}
+
+// static
+void HDCP::ObserveWrapper(void *me, int msg, int ext1, int ext2) {
+    static_cast<HDCP *>(me)->observe(msg, ext1, ext2);
+}
+
+void HDCP::observe(int msg, int ext1, int ext2) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mObserver != NULL) {
+        mObserver->notify(msg, ext1, ext2, NULL /* obj */);
+    }
+}
+
+}  // namespace android
+
diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h
new file mode 100644
index 0000000..83c61b5
--- /dev/null
+++ b/media/libmediaplayerservice/HDCP.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HDCP_H_
+
+#define HDCP_H_
+
+#include <media/IHDCP.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+struct HDCP : public BnHDCP {
+    explicit HDCP(bool createEncryptionModule);
+    virtual ~HDCP();
+
+    virtual status_t setObserver(const sp<IHDCPObserver> &observer);
+    virtual status_t initAsync(const char *host, unsigned port);
+    virtual status_t shutdownAsync();
+    virtual uint32_t getCaps();
+
+    virtual status_t encrypt(
+            const void *inData, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData);
+
+    virtual status_t encryptNative(
+            const sp<GraphicBuffer> &graphicBuffer,
+            size_t offset, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData);
+
+    virtual status_t decrypt(
+            const void *inData, size_t size,
+            uint32_t streamCTR, uint64_t outInputCTR, void *outData);
+
+private:
+    Mutex mLock;
+
+    bool mIsEncryptionModule;
+
+    void *mLibHandle;
+    HDCPModule *mHDCPModule;
+    sp<IHDCPObserver> mObserver;
+
+    static void ObserveWrapper(void *me, int msg, int ext1, int ext2);
+    void observe(int msg, int ext1, int ext2);
+
+    DISALLOW_EVIL_CONSTRUCTORS(HDCP);
+};
+
+}  // namespace android
+
+#endif  // HDCP_H_
+
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index dfd3933..7fbbc64 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -80,7 +80,9 @@
 #include "TestPlayerStub.h"
 #include "nuplayer/NuPlayerDriver.h"
 
+#include "HDCP.h"
 #include "HTTPBase.h"
+#include "RemoteDisplay.h"
 
 static const int kDumpLockRetries = 50;
 static const int kDumpLockSleepUs = 20000;
@@ -337,13 +339,18 @@
     return MediaCodecList::getLocalInstance();
 }
 
-sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay(
-        const String16 &/*opPackageName*/,
-        const sp<IRemoteDisplayClient>& /*client*/,
-        const String8& /*iface*/) {
-    ALOGE("listenForRemoteDisplay is no longer supported!");
+sp<IHDCP> MediaPlayerService::makeHDCP(bool createEncryptionModule) {
+    return new HDCP(createEncryptionModule);
+}
 
-    return NULL;
+sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay(
+        const String16 &opPackageName,
+        const sp<IRemoteDisplayClient>& client, const String8& iface) {
+    if (!checkPermission("android.permission.CONTROL_WIFI_DISPLAY")) {
+        return NULL;
+    }
+
+    return new RemoteDisplay(opPackageName, client, iface.string());
 }
 
 status_t MediaPlayerService::AudioOutput::dump(int fd, const Vector<String16>& args) const
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 49688ce..9522a9e 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -236,6 +236,7 @@
                                        audio_session_t audioSessionId);
 
     virtual sp<IMediaCodecList> getCodecList() const;
+    virtual sp<IHDCP>           makeHDCP(bool createEncryptionModule);
 
     virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName,
             const sp<IRemoteDisplayClient>& client, const String8& iface);
diff --git a/media/libmediaplayerservice/RemoteDisplay.cpp b/media/libmediaplayerservice/RemoteDisplay.cpp
new file mode 100644
index 0000000..0eb4b5d
--- /dev/null
+++ b/media/libmediaplayerservice/RemoteDisplay.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "RemoteDisplay.h"
+
+#include "source/WifiDisplaySource.h"
+
+#include <media/IRemoteDisplayClient.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ANetworkSession.h>
+
+namespace android {
+
+RemoteDisplay::RemoteDisplay(
+        const String16 &opPackageName,
+        const sp<IRemoteDisplayClient> &client,
+        const char *iface)
+    : mLooper(new ALooper),
+      mNetSession(new ANetworkSession) {
+    mLooper->setName("wfd_looper");
+
+    mSource = new WifiDisplaySource(opPackageName, mNetSession, client);
+    mLooper->registerHandler(mSource);
+
+    mNetSession->start();
+    mLooper->start();
+
+    mSource->start(iface);
+}
+
+RemoteDisplay::~RemoteDisplay() {
+}
+
+status_t RemoteDisplay::pause() {
+    return mSource->pause();
+}
+
+status_t RemoteDisplay::resume() {
+    return mSource->resume();
+}
+
+status_t RemoteDisplay::dispose() {
+    mSource->stop();
+    mSource.clear();
+
+    mLooper->stop();
+    mNetSession->stop();
+
+    return OK;
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/RemoteDisplay.h b/media/libmediaplayerservice/RemoteDisplay.h
new file mode 100644
index 0000000..d4573e9
--- /dev/null
+++ b/media/libmediaplayerservice/RemoteDisplay.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef REMOTE_DISPLAY_H_
+
+#define REMOTE_DISPLAY_H_
+
+#include <media/IMediaPlayerService.h>
+#include <media/IRemoteDisplay.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ALooper;
+struct ANetworkSession;
+class IRemoteDisplayClient;
+struct WifiDisplaySource;
+
+struct RemoteDisplay : public BnRemoteDisplay {
+    RemoteDisplay(
+            const String16 &opPackageName,
+            const sp<IRemoteDisplayClient> &client,
+            const char *iface);
+
+    virtual status_t pause();
+    virtual status_t resume();
+    virtual status_t dispose();
+
+protected:
+    virtual ~RemoteDisplay();
+
+private:
+    sp<ALooper> mNetLooper;
+    sp<ALooper> mLooper;
+    sp<ANetworkSession> mNetSession;
+    sp<WifiDisplaySource> mSource;
+
+    DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplay);
+};
+
+}  // namespace android
+
+#endif  // REMOTE_DISPLAY_H_
+
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index ea64f98..3250a48 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -131,29 +131,32 @@
         } else if (n < 0) {
             break;
         } else {
-            if (buffer[0] == 0x00) {
+            if (buffer[0] == 0x00) { // OK to access buffer[0] since n must be > 0 here
                 // XXX legacy
 
                 if (extra == NULL) {
                     extra = new AMessage;
                 }
 
-                uint8_t type = buffer[1];
+                uint8_t type = 0;
+                if (n > 1) {
+                    type = buffer[1];
 
-                if (type & 2) {
-                    int64_t mediaTimeUs;
-                    memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs));
+                    if ((type & 2) && (n >= 2 + sizeof(int64_t))) {
+                        int64_t mediaTimeUs;
+                        memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs));
 
-                    extra->setInt64(kATSParserKeyMediaTimeUs, mediaTimeUs);
+                        extra->setInt64(kATSParserKeyMediaTimeUs, mediaTimeUs);
+                    }
                 }
 
                 mTSParser->signalDiscontinuity(
                         ((type & 1) == 0)
-                            ? ATSParser::DISCONTINUITY_TIME
-                            : ATSParser::DISCONTINUITY_FORMATCHANGE,
+                                ? ATSParser::DISCONTINUITY_TIME
+                                : ATSParser::DISCONTINUITY_FORMATCHANGE,
                         extra);
             } else {
-                status_t err = mTSParser->feedTSPacket(buffer, sizeof(buffer));
+                status_t err = mTSParser->feedTSPacket(buffer, n);
 
                 if (err != OK) {
                     ALOGE("TS Parser returned error %d", err);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 3d67c91..44f246d 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1826,6 +1826,23 @@
             mRepeatFrameDelayUs = -1LL;
         }
 
+        if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) {
+            float captureRate;
+            if (msg->findAsFloat(KEY_CAPTURE_RATE, &captureRate)) {
+                mCaptureFps = captureRate;
+            } else {
+                mCaptureFps = -1.0;
+            }
+        }
+
+        if (!msg->findInt32(
+                KEY_CREATE_INPUT_SURFACE_SUSPENDED,
+                (int32_t*)&mCreateInputBuffersSuspended)) {
+            mCreateInputBuffersSuspended = false;
+        }
+    }
+
+    if (encoder && (mIsVideo || mIsImage)) {
         // only allow 32-bit value, since we pass it as U32 to OMX.
         if (!msg->findInt64(KEY_MAX_PTS_GAP_TO_ENCODER, &mMaxPtsGapUs)) {
             mMaxPtsGapUs = 0LL;
@@ -1842,16 +1859,6 @@
         if (mMaxPtsGapUs < 0LL) {
             mMaxFps = -1;
         }
-
-        if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) {
-            mCaptureFps = -1.0;
-        }
-
-        if (!msg->findInt32(
-                KEY_CREATE_INPUT_SURFACE_SUSPENDED,
-                (int32_t*)&mCreateInputBuffersSuspended)) {
-            mCreateInputBuffersSuspended = false;
-        }
     }
 
     // NOTE: we only use native window for video decoders
@@ -4505,22 +4512,38 @@
 status_t ACodec::configureImageGrid(
         const sp<AMessage> &msg, sp<AMessage> &outputFormat) {
     int32_t tileWidth, tileHeight, gridRows, gridCols;
-    if (!msg->findInt32("tile-width", &tileWidth) ||
-        !msg->findInt32("tile-height", &tileHeight) ||
-        !msg->findInt32("grid-rows", &gridRows) ||
-        !msg->findInt32("grid-cols", &gridCols)) {
+    OMX_BOOL useGrid = OMX_FALSE;
+    if (msg->findInt32("tile-width", &tileWidth) &&
+        msg->findInt32("tile-height", &tileHeight) &&
+        msg->findInt32("grid-rows", &gridRows) &&
+        msg->findInt32("grid-cols", &gridCols)) {
+        useGrid = OMX_TRUE;
+    } else {
+        // when bEnabled is false, the tile info is not used,
+        // but clear out these too.
+        tileWidth = tileHeight = gridRows = gridCols = 0;
+    }
+
+    if (!mIsImage && !useGrid) {
         return OK;
     }
 
     OMX_VIDEO_PARAM_ANDROID_IMAGEGRIDTYPE gridType;
     InitOMXParams(&gridType);
     gridType.nPortIndex = kPortIndexOutput;
-    gridType.bEnabled = OMX_TRUE;
+    gridType.bEnabled = useGrid;
     gridType.nTileWidth = tileWidth;
     gridType.nTileHeight = tileHeight;
     gridType.nGridRows = gridRows;
     gridType.nGridCols = gridCols;
 
+    ALOGV("sending image grid info to component: bEnabled %d, tile %dx%d, grid %dx%d",
+            gridType.bEnabled,
+            gridType.nTileWidth,
+            gridType.nTileHeight,
+            gridType.nGridRows,
+            gridType.nGridCols);
+
     status_t err = mOMXNode->setParameter(
             (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidImageGrid,
             &gridType, sizeof(gridType));
@@ -4541,6 +4564,13 @@
             (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidImageGrid,
             &gridType, sizeof(gridType));
 
+    ALOGV("received image grid info from component: bEnabled %d, tile %dx%d, grid %dx%d",
+            gridType.bEnabled,
+            gridType.nTileWidth,
+            gridType.nTileHeight,
+            gridType.nGridRows,
+            gridType.nGridCols);
+
     if (err == OK && gridType.bEnabled) {
         outputFormat->setInt32("tile-width", gridType.nTileWidth);
         outputFormat->setInt32("tile-height", gridType.nTileHeight);
@@ -6871,7 +6901,7 @@
         }
     }
 
-    if (mCodec->mMaxPtsGapUs != 0LL) {
+    if (mCodec->mIsVideo && mCodec->mMaxPtsGapUs != 0LL) {
         OMX_PARAM_U32TYPE maxPtsGapParams;
         InitOMXParams(&maxPtsGapParams);
         maxPtsGapParams.nPortIndex = kPortIndexInput;
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 9170805..11af7fb 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -116,10 +116,9 @@
     name: "libstagefright",
 
     srcs: [
-        "AACWriter.cpp",
         "ACodec.cpp",
         "ACodecBufferChannel.cpp",
-        "AHierarchicalStateMachine.cpp",
+        "AACWriter.cpp",
         "AMRWriter.cpp",
         "AudioPlayer.cpp",
         "AudioSource.cpp",
@@ -129,7 +128,6 @@
         "CameraSource.cpp",
         "CameraSourceTimeLapse.cpp",
         "DataConverter.cpp",
-        "DataSourceBase.cpp",
         "DataSourceFactory.cpp",
         "DataURISource.cpp",
         "ClearFileSource.cpp",
@@ -167,6 +165,7 @@
         "StagefrightMediaScanner.cpp",
         "StagefrightMetadataRetriever.cpp",
         "StagefrightPluginLoader.cpp",
+        "SurfaceMediaSource.cpp",
         "SurfaceUtils.cpp",
         "Utils.cpp",
         "ThrottledSource.cpp",
@@ -267,7 +266,6 @@
     srcs: [
         "ClearFileSource.cpp",
         "DataURISource.cpp",
-        "DataSourceBase.cpp",
         "HTTPBase.cpp",
         "HevcUtils.cpp",
         "MediaClock.cpp",
diff --git a/media/libstagefright/DataSourceBase.cpp b/media/libstagefright/DataSourceBase.cpp
deleted file mode 100644
index 8f47ee5..0000000
--- a/media/libstagefright/DataSourceBase.cpp
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DataSourceBase"
-
-#include <media/DataSourceBase.h>
-#include <media/stagefright/foundation/ByteUtils.h>
-#include <media/stagefright/MediaErrors.h>
-#include <utils/String8.h>
-
-namespace android {
-
-bool DataSourceBase::getUInt16(off64_t offset, uint16_t *x) {
-    *x = 0;
-
-    uint8_t byte[2];
-    if (readAt(offset, byte, 2) != 2) {
-        return false;
-    }
-
-    *x = (byte[0] << 8) | byte[1];
-
-    return true;
-}
-
-bool DataSourceBase::getUInt24(off64_t offset, uint32_t *x) {
-    *x = 0;
-
-    uint8_t byte[3];
-    if (readAt(offset, byte, 3) != 3) {
-        return false;
-    }
-
-    *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
-
-    return true;
-}
-
-bool DataSourceBase::getUInt32(off64_t offset, uint32_t *x) {
-    *x = 0;
-
-    uint32_t tmp;
-    if (readAt(offset, &tmp, 4) != 4) {
-        return false;
-    }
-
-    *x = ntohl(tmp);
-
-    return true;
-}
-
-bool DataSourceBase::getUInt64(off64_t offset, uint64_t *x) {
-    *x = 0;
-
-    uint64_t tmp;
-    if (readAt(offset, &tmp, 8) != 8) {
-        return false;
-    }
-
-    *x = ntoh64(tmp);
-
-    return true;
-}
-
-bool DataSourceBase::getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
-    if (size == 2) {
-        return getUInt16(offset, x);
-    }
-    if (size == 1) {
-        uint8_t tmp;
-        if (readAt(offset, &tmp, 1) == 1) {
-            *x = tmp;
-            return true;
-        }
-    }
-    return false;
-}
-
-bool DataSourceBase::getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
-    if (size == 4) {
-        return getUInt32(offset, x);
-    }
-    if (size == 2) {
-        uint16_t tmp;
-        if (getUInt16(offset, &tmp)) {
-            *x = tmp;
-            return true;
-        }
-    }
-    return false;
-}
-
-bool DataSourceBase::getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
-    if (size == 8) {
-        return getUInt64(offset, x);
-    }
-    if (size == 4) {
-        uint32_t tmp;
-        if (getUInt32(offset, &tmp)) {
-            *x = tmp;
-            return true;
-        }
-    }
-    return false;
-}
-
-status_t DataSourceBase::getSize(off64_t *size) {
-    *size = 0;
-
-    return ERROR_UNSUPPORTED;
-}
-
-bool DataSourceBase::getUri(char *uriString __unused, size_t bufferSize __unused) {
-    return false;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 2f13dc9..f130c9b 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -1635,8 +1635,13 @@
         return BAD_VALUE;
     }
 
+    // Increase moovExtraSize once only irrespective of how many times
+    // setCaptureRate is called.
+    bool containsCaptureFps = mMetaKeys->contains(kMetaKey_CaptureFps);
     mMetaKeys->setFloat(kMetaKey_CaptureFps, captureFps);
-    mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32;
+    if (!containsCaptureFps) {
+        mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32;
+    }
 
     return OK;
 }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c08df69..2437430 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -49,6 +49,7 @@
 #include <media/stagefright/foundation/avc_utils.h>
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/ACodec.h>
+#include <media/stagefright/BatteryChecker.h>
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecList.h>
@@ -58,7 +59,6 @@
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
 #include <media/stagefright/SurfaceUtils.h>
-#include <mediautils/BatteryNotifier.h>
 #include <private/android_filesystem_config.h>
 #include <utils/Singleton.h>
 
@@ -167,8 +167,9 @@
     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
 };
 
-MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(pid_t pid)
-        : mPid(pid) {
+MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
+        pid_t pid, uid_t uid)
+        : mPid(pid), mUid(uid) {
     if (mPid == MediaCodec::kNoPid) {
         mPid = IPCThreadState::self()->getCallingPid();
     }
@@ -205,15 +206,25 @@
     if (mService == NULL) {
         return;
     }
-    mService->addResource(mPid, clientId, client, resources);
+    mService->addResource(mPid, mUid, clientId, client, resources);
 }
 
-void MediaCodec::ResourceManagerServiceProxy::removeResource(int64_t clientId) {
+void MediaCodec::ResourceManagerServiceProxy::removeResource(
+        int64_t clientId,
+        const Vector<MediaResource> &resources) {
     Mutex::Autolock _l(mLock);
     if (mService == NULL) {
         return;
     }
-    mService->removeResource(mPid, clientId);
+    mService->removeResource(mPid, clientId, resources);
+}
+
+void MediaCodec::ResourceManagerServiceProxy::removeClient(int64_t clientId) {
+    Mutex::Autolock _l(mLock);
+    if (mService == NULL) {
+        return;
+    }
+    mService->removeClient(mPid, clientId);
 }
 
 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
@@ -518,9 +529,6 @@
       mStickyError(OK),
       mSoftRenderer(NULL),
       mAnalyticsItem(NULL),
-      mResourceManagerClient(new ResourceManagerClient(this)),
-      mResourceManagerService(new ResourceManagerServiceProxy(pid)),
-      mBatteryStatNotified(false),
       mIsVideo(false),
       mVideoWidth(0),
       mVideoHeight(0),
@@ -538,13 +546,15 @@
     } else {
         mUid = uid;
     }
+    mResourceManagerClient = new ResourceManagerClient(this);
+    mResourceManagerService = new ResourceManagerServiceProxy(pid, mUid);
 
     initAnalyticsItem();
 }
 
 MediaCodec::~MediaCodec() {
     CHECK_EQ(mState, UNINITIALIZED);
-    mResourceManagerService->removeResource(getId(mResourceManagerClient));
+    mResourceManagerService->removeClient(getId(mResourceManagerClient));
 
     flushAnalyticsItem();
 
@@ -756,6 +766,12 @@
         return;
     }
 
+    if (mBatteryChecker != nullptr) {
+        mBatteryChecker->onCodecActivity([this] () {
+            addResource(MediaResource::kBattery, MediaResource::kVideoCodec, 1);
+        });
+    }
+
     const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
     BufferFlightTiming_t startdata = { presentationUs, nowNs };
 
@@ -790,6 +806,12 @@
         return;
     }
 
+    if (mBatteryChecker != nullptr) {
+        mBatteryChecker->onCodecActivity([this] () {
+            addResource(MediaResource::kBattery, MediaResource::kVideoCodec, 1);
+        });
+    }
+
     BufferFlightTiming_t startdata;
     bool valid = false;
     while (mBuffersInFlight.size() > 0) {
@@ -978,6 +1000,10 @@
         mAnalyticsItem->setCString(kCodecMode, mIsVideo ? kCodecModeVideo : kCodecModeAudio);
     }
 
+    if (mIsVideo) {
+        mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
+    }
+
     status_t err;
     Vector<MediaResource> resources;
     MediaResource::Type type =
@@ -1250,6 +1276,13 @@
             getId(mResourceManagerClient), mResourceManagerClient, resources);
 }
 
+void MediaCodec::removeResource(
+        MediaResource::Type type, MediaResource::SubType subtype, uint64_t value) {
+    Vector<MediaResource> resources;
+    resources.push_back(MediaResource(type, subtype, value));
+    mResourceManagerService->removeResource(getId(mResourceManagerClient), resources);
+}
+
 status_t MediaCodec::start() {
     sp<AMessage> msg = new AMessage(kWhatStart, this);
 
@@ -1727,6 +1760,59 @@
     }
 }
 
+BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
+    : mTimeoutUs(timeoutUs)
+    , mLastActivityTimeUs(-1ll)
+    , mBatteryStatNotified(false)
+    , mBatteryCheckerGeneration(0)
+    , mIsExecuting(false)
+    , mBatteryCheckerMsg(msg) {}
+
+void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
+    if (!isExecuting()) {
+        // ignore if not executing
+        return;
+    }
+    if (!mBatteryStatNotified) {
+        batteryOnCb();
+        mBatteryStatNotified = true;
+        sp<AMessage> msg = mBatteryCheckerMsg->dup();
+        msg->setInt32("generation", mBatteryCheckerGeneration);
+
+        // post checker and clear last activity time
+        msg->post(mTimeoutUs);
+        mLastActivityTimeUs = -1ll;
+    } else {
+        // update last activity time
+        mLastActivityTimeUs = ALooper::GetNowUs();
+    }
+}
+
+void BatteryChecker::onCheckBatteryTimer(
+        const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
+    // ignore if this checker already expired because the client resource was removed
+    int32_t generation;
+    if (!msg->findInt32("generation", &generation)
+            || generation != mBatteryCheckerGeneration) {
+        return;
+    }
+
+    if (mLastActivityTimeUs < 0ll) {
+        // timed out inactive, do not repost checker
+        batteryOffCb();
+        mBatteryStatNotified = false;
+    } else {
+        // repost checker and clear last activity time
+        msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
+        mLastActivityTimeUs = -1ll;
+    }
+}
+
+void BatteryChecker::onClientRemoved() {
+    mBatteryStatNotified = false;
+    mBatteryCheckerGeneration++;
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 void MediaCodec::cancelPendingDequeueOperations() {
@@ -1809,6 +1895,18 @@
         int32_t flags;
         CHECK(buffer->meta()->findInt32("flags", &flags));
 
+        // Workaround for CtsMediaTestCases: android.media.cts.DecoderTest#testEOSBehaviorH264
+        // for the single-frame-case. The Qualcomm hardware decoder reports
+        // unexpected metadata for that case.
+        if (timeUs == -1 && index == 0 && flags == BUFFER_FLAG_EOS
+            && mVideoWidth == 480 && mVideoHeight == 360
+            && mComponentName == "OMX.qcom.video.decoder.avc") {
+            ALOGD(
+                "Replacing unexpected timeUs value of -1 with 0, working "
+                "around CTS failure: testEOSBehaviorH264.");
+            response->setInt64("timeUs", 0);
+        }
+
         response->setInt32("flags", flags);
         response->postReply(replyID);
     }
@@ -2363,7 +2461,12 @@
 
                     mFlags &= ~kFlagIsComponentAllocated;
 
-                    mResourceManagerService->removeResource(getId(mResourceManagerClient));
+                    // off since we're removing all resources including the battery on
+                    if (mBatteryChecker != nullptr) {
+                        mBatteryChecker->onClientRemoved();
+                    }
+
+                    mResourceManagerService->removeClient(getId(mResourceManagerClient));
 
                     (new AMessage)->postReply(mReplyID);
                     break;
@@ -3093,6 +3196,16 @@
             break;
         }
 
+        case kWhatCheckBatteryStats:
+        {
+            if (mBatteryChecker != nullptr) {
+                mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
+                    removeResource(MediaResource::kBattery, MediaResource::kVideoCodec, 1);
+                });
+            }
+            break;
+        }
+
         default:
             TRESPASS();
     }
@@ -3189,9 +3302,11 @@
 
     mState = newState;
 
-    cancelPendingDequeueOperations();
+    if (mBatteryChecker != nullptr) {
+        mBatteryChecker->setExecuting(isExecuting());
+    }
 
-    updateBatteryStat();
+    cancelPendingDequeueOperations();
 }
 
 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
@@ -3695,20 +3810,6 @@
     return OK;
 }
 
-void MediaCodec::updateBatteryStat() {
-    if (!mIsVideo) {
-        return;
-    }
-
-    if (mState == CONFIGURED && !mBatteryStatNotified) {
-        BatteryNotifier::getInstance().noteStartVideo(mUid);
-        mBatteryStatNotified = true;
-    } else if (mState == UNINITIALIZED && mBatteryStatNotified) {
-        BatteryNotifier::getInstance().noteStopVideo(mUid);
-        mBatteryStatNotified = false;
-    }
-}
-
 std::string MediaCodec::stateString(State state) {
     const char *rval = NULL;
     char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 9ba2add..7ebdb1a 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -96,10 +96,18 @@
 
     sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
     status_t result = mWriter->addSource(newTrack);
-    if (result == OK) {
-        return mTrackList.add(newTrack);
+    if (result != OK) {
+        return -1;
     }
-    return -1;
+    float captureFps = -1.0;
+    if (format->findAsFloat("time-lapse-fps", &captureFps)) {
+        ALOGV("addTrack() time-lapse-fps: %f", captureFps);
+        result = mWriter->setCaptureRate(captureFps);
+        if (result != OK) {
+            ALOGW("addTrack() setCaptureRate failed :%d", result);
+        }
+    }
+    return mTrackList.add(newTrack);
 }
 
 status_t MediaMuxer::setOrientationHint(int degrees) {
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
new file mode 100644
index 0000000..d737055
--- /dev/null
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -0,0 +1,485 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SurfaceMediaSource"
+
+#include <inttypes.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/SurfaceMediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <OMX_IVCommon.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/hardware/MetadataBufferType.h>
+
+#include <ui/GraphicBuffer.h>
+#include <gui/BufferItem.h>
+#include <gui/ISurfaceComposer.h>
+#include <OMX_Component.h>
+
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include <private/gui/ComposerService.h>
+
+namespace android {
+
+SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight) :
+    mWidth(bufferWidth),
+    mHeight(bufferHeight),
+    mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
+    mNumPendingBuffers(0),
+    mCurrentTimestamp(0),
+    mFrameRate(30),
+    mStarted(false),
+    mNumFramesReceived(0),
+    mNumFramesEncoded(0),
+    mFirstFrameTimestamp(0),
+    mMaxAcquiredBufferCount(4),  // XXX double-check the default
+    mUseAbsoluteTimestamps(false) {
+    ALOGV("SurfaceMediaSource");
+
+    if (bufferWidth == 0 || bufferHeight == 0) {
+        ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
+    }
+
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+    mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
+            GRALLOC_USAGE_HW_TEXTURE);
+
+    sp<ISurfaceComposer> composer(ComposerService::getComposerService());
+
+    // Note that we can't create an sp<...>(this) in a ctor that will not keep a
+    // reference once the ctor ends, as that would cause the refcount of 'this'
+    // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>
+    // that's what we create.
+    wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
+    sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
+
+    status_t err = mConsumer->consumerConnect(proxy, false);
+    if (err != NO_ERROR) {
+        ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
+                strerror(-err), err);
+    }
+}
+
+SurfaceMediaSource::~SurfaceMediaSource() {
+    ALOGV("~SurfaceMediaSource");
+    CHECK(!mStarted);
+}
+
+nsecs_t SurfaceMediaSource::getTimestamp() {
+    ALOGV("getTimestamp");
+    Mutex::Autolock lock(mMutex);
+    return mCurrentTimestamp;
+}
+
+void SurfaceMediaSource::setFrameAvailableListener(
+        const sp<FrameAvailableListener>& listener) {
+    ALOGV("setFrameAvailableListener");
+    Mutex::Autolock lock(mMutex);
+    mFrameAvailableListener = listener;
+}
+
+void SurfaceMediaSource::dumpState(String8& result) const
+{
+    char buffer[1024];
+    dumpState(result, "", buffer, 1024);
+}
+
+void SurfaceMediaSource::dumpState(
+        String8& result,
+        const char* /* prefix */,
+        char* buffer,
+        size_t /* SIZE */) const
+{
+    Mutex::Autolock lock(mMutex);
+
+    result.append(buffer);
+    mConsumer->dumpState(result, "");
+}
+
+status_t SurfaceMediaSource::setFrameRate(int32_t fps)
+{
+    ALOGV("setFrameRate");
+    Mutex::Autolock lock(mMutex);
+    const int MAX_FRAME_RATE = 60;
+    if (fps < 0 || fps > MAX_FRAME_RATE) {
+        return BAD_VALUE;
+    }
+    mFrameRate = fps;
+    return OK;
+}
+
+MetadataBufferType SurfaceMediaSource::metaDataStoredInVideoBuffers() const {
+    ALOGV("isMetaDataStoredInVideoBuffers");
+    return kMetadataBufferTypeANWBuffer;
+}
+
+int32_t SurfaceMediaSource::getFrameRate( ) const {
+    ALOGV("getFrameRate");
+    Mutex::Autolock lock(mMutex);
+    return mFrameRate;
+}
+
+status_t SurfaceMediaSource::start(MetaData *params)
+{
+    ALOGV("start");
+
+    Mutex::Autolock lock(mMutex);
+
+    CHECK(!mStarted);
+
+    mStartTimeNs = 0;
+    int64_t startTimeUs;
+    int32_t bufferCount = 0;
+    if (params) {
+        if (params->findInt64(kKeyTime, &startTimeUs)) {
+            mStartTimeNs = startTimeUs * 1000;
+        }
+
+        if (!params->findInt32(kKeyNumBuffers, &bufferCount)) {
+            ALOGE("Failed to find the advertised buffer count");
+            return UNKNOWN_ERROR;
+        }
+
+        if (bufferCount <= 1) {
+            ALOGE("bufferCount %d is too small", bufferCount);
+            return BAD_VALUE;
+        }
+
+        mMaxAcquiredBufferCount = bufferCount;
+    }
+
+    CHECK_GT(mMaxAcquiredBufferCount, 1u);
+
+    status_t err =
+        mConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
+
+    if (err != OK) {
+        return err;
+    }
+
+    mNumPendingBuffers = 0;
+    mStarted = true;
+
+    return OK;
+}
+
+status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
+    ALOGV("setMaxAcquiredBufferCount(%zu)", count);
+    Mutex::Autolock lock(mMutex);
+
+    CHECK_GT(count, 1u);
+    mMaxAcquiredBufferCount = count;
+
+    return OK;
+}
+
+status_t SurfaceMediaSource::setUseAbsoluteTimestamps() {
+    ALOGV("setUseAbsoluteTimestamps");
+    Mutex::Autolock lock(mMutex);
+    mUseAbsoluteTimestamps = true;
+
+    return OK;
+}
+
+status_t SurfaceMediaSource::stop()
+{
+    ALOGV("stop");
+    Mutex::Autolock lock(mMutex);
+
+    if (!mStarted) {
+        return OK;
+    }
+
+    mStarted = false;
+    mFrameAvailableCondition.signal();
+
+    while (mNumPendingBuffers > 0) {
+        ALOGI("Still waiting for %zu buffers to be returned.",
+                mNumPendingBuffers);
+
+#if DEBUG_PENDING_BUFFERS
+        for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
+            ALOGI("%zu: %p", i, mPendingBuffers.itemAt(i));
+        }
+#endif
+
+        mMediaBuffersAvailableCondition.wait(mMutex);
+    }
+
+    mMediaBuffersAvailableCondition.signal();
+
+    return mConsumer->consumerDisconnect();
+}
+
+sp<MetaData> SurfaceMediaSource::getFormat()
+{
+    ALOGV("getFormat");
+
+    Mutex::Autolock lock(mMutex);
+    sp<MetaData> meta = new MetaData;
+
+    meta->setInt32(kKeyWidth, mWidth);
+    meta->setInt32(kKeyHeight, mHeight);
+    // The encoder format is set as an opaque colorformat
+    // The encoder will later find out the actual colorformat
+    // from the GL Frames itself.
+    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
+    meta->setInt32(kKeyStride, mWidth);
+    meta->setInt32(kKeySliceHeight, mHeight);
+    meta->setInt32(kKeyFrameRate, mFrameRate);
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+    return meta;
+}
+
+// Pass the data to the MediaBuffer. Pass in only the metadata
+// Note: Call only when you have the lock
+void SurfaceMediaSource::passMetadataBuffer_l(MediaBufferBase **buffer,
+        ANativeWindowBuffer *bufferHandle) const {
+    *buffer = new MediaBuffer(sizeof(VideoNativeMetadata));
+    VideoNativeMetadata *data = (VideoNativeMetadata *)(*buffer)->data();
+    if (data == NULL) {
+        ALOGE("Cannot allocate memory for metadata buffer!");
+        return;
+    }
+    data->eType = metaDataStoredInVideoBuffers();
+    data->pBuffer = bufferHandle;
+    data->nFenceFd = -1;
+    ALOGV("handle = %p, offset = %zu, length = %zu",
+            bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
+}
+
+status_t SurfaceMediaSource::read(
+        MediaBufferBase **buffer, const ReadOptions * /* options */) {
+    ALOGV("read");
+    Mutex::Autolock lock(mMutex);
+
+    *buffer = NULL;
+
+    while (mStarted && mNumPendingBuffers == mMaxAcquiredBufferCount) {
+        mMediaBuffersAvailableCondition.wait(mMutex);
+    }
+
+    // Update the current buffer info
+    // TODO: mCurrentSlot can be made a bufferstate since there
+    // can be more than one "current" slots.
+
+    BufferItem item;
+    // If the recording has started and the queue is empty, then just
+    // wait here till the frames come in from the client side
+    while (mStarted) {
+
+        status_t err = mConsumer->acquireBuffer(&item, 0);
+        if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
+            // wait for a buffer to be queued
+            mFrameAvailableCondition.wait(mMutex);
+        } else if (err == OK) {
+            err = item.mFence->waitForever("SurfaceMediaSource::read");
+            if (err) {
+                ALOGW("read: failed to wait for buffer fence: %d", err);
+            }
+
+            // First time seeing the buffer?  Added it to the SMS slot
+            if (item.mGraphicBuffer != NULL) {
+                mSlots[item.mSlot].mGraphicBuffer = item.mGraphicBuffer;
+            }
+            mSlots[item.mSlot].mFrameNumber = item.mFrameNumber;
+
+            // check for the timing of this buffer
+            if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) {
+                mFirstFrameTimestamp = item.mTimestamp;
+                // Initial delay
+                if (mStartTimeNs > 0) {
+                    if (item.mTimestamp < mStartTimeNs) {
+                        // This frame predates start of record, discard
+                        mConsumer->releaseBuffer(
+                                item.mSlot, item.mFrameNumber, EGL_NO_DISPLAY,
+                                EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+                        continue;
+                    }
+                    mStartTimeNs = item.mTimestamp - mStartTimeNs;
+                }
+            }
+            item.mTimestamp = mStartTimeNs + (item.mTimestamp - mFirstFrameTimestamp);
+
+            mNumFramesReceived++;
+
+            break;
+        } else {
+            ALOGE("read: acquire failed with error code %d", err);
+            return ERROR_END_OF_STREAM;
+        }
+
+    }
+
+    // If the loop was exited as a result of stopping the recording,
+    // it is OK
+    if (!mStarted) {
+        ALOGV("Read: SurfaceMediaSource is stopped. Returning ERROR_END_OF_STREAM.");
+        return ERROR_END_OF_STREAM;
+    }
+
+    mCurrentSlot = item.mSlot;
+
+    // First time seeing the buffer?  Added it to the SMS slot
+    if (item.mGraphicBuffer != NULL) {
+        mSlots[item.mSlot].mGraphicBuffer = item.mGraphicBuffer;
+    }
+    mSlots[item.mSlot].mFrameNumber = item.mFrameNumber;
+
+    mCurrentBuffers.push_back(mSlots[mCurrentSlot].mGraphicBuffer);
+    int64_t prevTimeStamp = mCurrentTimestamp;
+    mCurrentTimestamp = item.mTimestamp;
+
+    mNumFramesEncoded++;
+    // Pass the data to the MediaBuffer. Pass in only the metadata
+
+    passMetadataBuffer_l(buffer, mSlots[mCurrentSlot].mGraphicBuffer->getNativeBuffer());
+
+    (*buffer)->setObserver(this);
+    (*buffer)->add_ref();
+    (*buffer)->meta_data().setInt64(kKeyTime, mCurrentTimestamp / 1000);
+    ALOGV("Frames encoded = %d, timestamp = %" PRId64 ", time diff = %" PRId64,
+            mNumFramesEncoded, mCurrentTimestamp / 1000,
+            mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
+
+    ++mNumPendingBuffers;
+
+#if DEBUG_PENDING_BUFFERS
+    mPendingBuffers.push_back(*buffer);
+#endif
+
+    ALOGV("returning mbuf %p", *buffer);
+
+    return OK;
+}
+
+static buffer_handle_t getMediaBufferHandle(MediaBufferBase *buffer) {
+    // need to convert to char* for pointer arithmetic and then
+    // copy the byte stream into our handle
+    buffer_handle_t bufferHandle;
+    VideoNativeMetadata *data = (VideoNativeMetadata *)buffer->data();
+    ANativeWindowBuffer *anwbuffer = (ANativeWindowBuffer *)data->pBuffer;
+    bufferHandle = anwbuffer->handle;
+    return bufferHandle;
+}
+
+void SurfaceMediaSource::signalBufferReturned(MediaBufferBase *buffer) {
+    ALOGV("signalBufferReturned");
+
+    bool foundBuffer = false;
+
+    Mutex::Autolock lock(mMutex);
+
+    buffer_handle_t bufferHandle = getMediaBufferHandle(buffer);
+    ANativeWindowBuffer* curNativeHandle = NULL;
+
+    for (size_t i = 0; i < mCurrentBuffers.size(); i++) {
+        curNativeHandle = mCurrentBuffers[i]->getNativeBuffer();
+        if ((mCurrentBuffers[i]->handle == bufferHandle) ||
+            ((buffer_handle_t)curNativeHandle == bufferHandle)) {
+            mCurrentBuffers.removeAt(i);
+            foundBuffer = true;
+            break;
+        }
+    }
+
+    if (!foundBuffer) {
+        ALOGW("returned buffer was not found in the current buffer list");
+    }
+
+    for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
+        if (mSlots[id].mGraphicBuffer == NULL) {
+            continue;
+        }
+
+        curNativeHandle = mSlots[id].mGraphicBuffer->getNativeBuffer();
+
+        if ((bufferHandle == mSlots[id].mGraphicBuffer->handle) ||
+            (bufferHandle == (buffer_handle_t)curNativeHandle)) {
+            ALOGV("Slot %d returned, matches handle = %p", id,
+                    mSlots[id].mGraphicBuffer->handle);
+
+            mConsumer->releaseBuffer(id, mSlots[id].mFrameNumber,
+                                        EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
+                    Fence::NO_FENCE);
+
+            buffer->setObserver(0);
+            buffer->release();
+
+            foundBuffer = true;
+            break;
+        }
+    }
+
+    if (!foundBuffer) {
+        CHECK(!"signalBufferReturned: bogus buffer");
+    }
+
+#if DEBUG_PENDING_BUFFERS
+    for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
+        if (mPendingBuffers.itemAt(i) == buffer) {
+            mPendingBuffers.removeAt(i);
+            break;
+        }
+    }
+#endif
+
+    --mNumPendingBuffers;
+    mMediaBuffersAvailableCondition.broadcast();
+}
+
+// Part of the BufferQueue::ConsumerListener
+void SurfaceMediaSource::onFrameAvailable(const BufferItem& /* item */) {
+    ALOGV("onFrameAvailable");
+
+    sp<FrameAvailableListener> listener;
+    { // scope for the lock
+        Mutex::Autolock lock(mMutex);
+        mFrameAvailableCondition.broadcast();
+        listener = mFrameAvailableListener;
+    }
+
+    if (listener != NULL) {
+        ALOGV("actually calling onFrameAvailable");
+        listener->onFrameAvailable();
+    }
+}
+
+// SurfaceMediaSource hijacks this event to assume
+// the prodcuer is disconnecting from the BufferQueue
+// and that it should stop the recording
+void SurfaceMediaSource::onBuffersReleased() {
+    ALOGV("onBuffersReleased");
+
+    Mutex::Autolock lock(mMutex);
+
+    mFrameAvailableCondition.signal();
+
+    for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
+       mSlots[i].mGraphicBuffer = 0;
+    }
+}
+
+void SurfaceMediaSource::onSidebandStreamChanged() {
+    ALOG_ASSERT(false, "SurfaceMediaSource can't consume sideband streams");
+}
+
+} // end of namespace android
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 96818eb..c1b270c 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -7,6 +7,9 @@
           "include-annotation": "android.platform.test.annotations.RequiresDevice"
         }
       ]
+    },
+    {
+       "name": "BatteryChecker_test"
     }
   ]
 }
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 76a400c..1293a74 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -290,7 +290,7 @@
 }
 
 bool SoftVorbis::isConfigured() const {
-    return mInputBufferCount >= 2;
+    return (mState != NULL && mVi != NULL);
 }
 
 static void makeBitReader(
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index 67d3f1a..f69990f 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -119,9 +119,9 @@
                 <Limit name="bitrate" range="1-48000000" />
             </Variant>
             <Variant name="slow-cpu">
-                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="size" min="2x2" max="1280x1280" />
                 <!-- profiles and levels:  ProfileHigh : Level51 -->
-                <Limit name="block-count" range="1-16384" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
                 <Limit name="blocks-per-second" range="1-491520" />
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
diff --git a/media/libstagefright/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
similarity index 97%
rename from media/libstagefright/AHierarchicalStateMachine.cpp
rename to media/libstagefright/foundation/AHierarchicalStateMachine.cpp
index f89b8b0..b837f66 100644
--- a/media/libstagefright/AHierarchicalStateMachine.cpp
+++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
@@ -18,7 +18,7 @@
 #define LOG_TAG "AHierarchicalStateMachine"
 #include <utils/Log.h>
 
-#include <media/stagefright/AHierarchicalStateMachine.h>
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
diff --git a/media/libstagefright/foundation/ANetworkSession.cpp b/media/libstagefright/foundation/ANetworkSession.cpp
new file mode 100644
index 0000000..eafdc37
--- /dev/null
+++ b/media/libstagefright/foundation/ANetworkSession.cpp
@@ -0,0 +1,1401 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NetworkSession"
+#include <utils/Log.h>
+
+#include "ANetworkSession.h"
+#include "ParsedMessage.h"
+
+#include <arpa/inet.h>
+#include <fcntl.h>
+#include <linux/tcp.h>
+#include <net/if.h>
+#include <netdb.h>
+#include <netinet/in.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+namespace android {
+
+static const size_t kMaxUDPSize = 1500;
+static const int32_t kMaxUDPRetries = 200;
+
+struct ANetworkSession::NetworkThread : public Thread {
+    explicit NetworkThread(ANetworkSession *session);
+
+protected:
+    virtual ~NetworkThread();
+
+private:
+    ANetworkSession *mSession;
+
+    virtual bool threadLoop();
+
+    DISALLOW_EVIL_CONSTRUCTORS(NetworkThread);
+};
+
+struct ANetworkSession::Session : public RefBase {
+    enum Mode {
+        MODE_RTSP,
+        MODE_DATAGRAM,
+        MODE_WEBSOCKET,
+    };
+
+    enum State {
+        CONNECTING,
+        CONNECTED,
+        LISTENING_RTSP,
+        LISTENING_TCP_DGRAMS,
+        DATAGRAM,
+    };
+
+    Session(int32_t sessionID,
+            State state,
+            int s,
+            const sp<AMessage> &notify);
+
+    int32_t sessionID() const;
+    int socket() const;
+    sp<AMessage> getNotificationMessage() const;
+
+    bool isRTSPServer() const;
+    bool isTCPDatagramServer() const;
+
+    bool wantsToRead();
+    bool wantsToWrite();
+
+    status_t readMore();
+    status_t writeMore();
+
+    status_t sendRequest(
+            const void *data, ssize_t size, bool timeValid, int64_t timeUs);
+
+    void setMode(Mode mode);
+
+    status_t switchToWebSocketMode();
+
+protected:
+    virtual ~Session();
+
+private:
+    enum {
+        FRAGMENT_FLAG_TIME_VALID = 1,
+    };
+    struct Fragment {
+        uint32_t mFlags;
+        int64_t mTimeUs;
+        sp<ABuffer> mBuffer;
+    };
+
+    int32_t mSessionID;
+    State mState;
+    Mode mMode;
+    int mSocket;
+    sp<AMessage> mNotify;
+    bool mSawReceiveFailure, mSawSendFailure;
+    int32_t mUDPRetries;
+
+    List<Fragment> mOutFragments;
+
+    AString mInBuffer;
+
+    int64_t mLastStallReportUs;
+
+    void notifyError(bool send, status_t err, const char *detail);
+    void notify(NotificationReason reason);
+
+    void dumpFragmentStats(const Fragment &frag);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Session);
+};
+////////////////////////////////////////////////////////////////////////////////
+
+ANetworkSession::NetworkThread::NetworkThread(ANetworkSession *session)
+    : mSession(session) {
+}
+
+ANetworkSession::NetworkThread::~NetworkThread() {
+}
+
+bool ANetworkSession::NetworkThread::threadLoop() {
+    mSession->threadLoop();
+
+    return true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ANetworkSession::Session::Session(
+        int32_t sessionID,
+        State state,
+        int s,
+        const sp<AMessage> &notify)
+    : mSessionID(sessionID),
+      mState(state),
+      mMode(MODE_DATAGRAM),
+      mSocket(s),
+      mNotify(notify),
+      mSawReceiveFailure(false),
+      mSawSendFailure(false),
+      mUDPRetries(kMaxUDPRetries),
+      mLastStallReportUs(-1ll) {
+    if (mState == CONNECTED) {
+        struct sockaddr_in localAddr;
+        socklen_t localAddrLen = sizeof(localAddr);
+
+        int res = getsockname(
+                mSocket, (struct sockaddr *)&localAddr, &localAddrLen);
+        CHECK_GE(res, 0);
+
+        struct sockaddr_in remoteAddr;
+        socklen_t remoteAddrLen = sizeof(remoteAddr);
+
+        res = getpeername(
+                mSocket, (struct sockaddr *)&remoteAddr, &remoteAddrLen);
+        CHECK_GE(res, 0);
+
+        in_addr_t addr = ntohl(localAddr.sin_addr.s_addr);
+        AString localAddrString = AStringPrintf(
+                "%d.%d.%d.%d",
+                (addr >> 24),
+                (addr >> 16) & 0xff,
+                (addr >> 8) & 0xff,
+                addr & 0xff);
+
+        addr = ntohl(remoteAddr.sin_addr.s_addr);
+        AString remoteAddrString = AStringPrintf(
+                "%d.%d.%d.%d",
+                (addr >> 24),
+                (addr >> 16) & 0xff,
+                (addr >> 8) & 0xff,
+                addr & 0xff);
+
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("sessionID", mSessionID);
+        msg->setInt32("reason", kWhatClientConnected);
+        msg->setString("server-ip", localAddrString.c_str());
+        msg->setInt32("server-port", ntohs(localAddr.sin_port));
+        msg->setString("client-ip", remoteAddrString.c_str());
+        msg->setInt32("client-port", ntohs(remoteAddr.sin_port));
+        msg->post();
+    }
+}
+
+ANetworkSession::Session::~Session() {
+    ALOGV("Session %d gone", mSessionID);
+
+    close(mSocket);
+    mSocket = -1;
+}
+
+int32_t ANetworkSession::Session::sessionID() const {
+    return mSessionID;
+}
+
+int ANetworkSession::Session::socket() const {
+    return mSocket;
+}
+
+void ANetworkSession::Session::setMode(Mode mode) {
+    mMode = mode;
+}
+
+status_t ANetworkSession::Session::switchToWebSocketMode() {
+    if (mState != CONNECTED || mMode != MODE_RTSP) {
+        return INVALID_OPERATION;
+    }
+
+    mMode = MODE_WEBSOCKET;
+
+    return OK;
+}
+
+sp<AMessage> ANetworkSession::Session::getNotificationMessage() const {
+    return mNotify;
+}
+
+bool ANetworkSession::Session::isRTSPServer() const {
+    return mState == LISTENING_RTSP;
+}
+
+bool ANetworkSession::Session::isTCPDatagramServer() const {
+    return mState == LISTENING_TCP_DGRAMS;
+}
+
+bool ANetworkSession::Session::wantsToRead() {
+    return !mSawReceiveFailure && mState != CONNECTING;
+}
+
+bool ANetworkSession::Session::wantsToWrite() {
+    return !mSawSendFailure
+        && (mState == CONNECTING
+            || (mState == CONNECTED && !mOutFragments.empty())
+            || (mState == DATAGRAM && !mOutFragments.empty()));
+}
+
+status_t ANetworkSession::Session::readMore() {
+    if (mState == DATAGRAM) {
+        CHECK_EQ(mMode, MODE_DATAGRAM);
+
+        status_t err;
+        do {
+            sp<ABuffer> buf = new ABuffer(kMaxUDPSize);
+
+            struct sockaddr_in remoteAddr;
+            socklen_t remoteAddrLen = sizeof(remoteAddr);
+
+            ssize_t n;
+            do {
+                n = recvfrom(
+                        mSocket, buf->data(), buf->capacity(), 0,
+                        (struct sockaddr *)&remoteAddr, &remoteAddrLen);
+            } while (n < 0 && errno == EINTR);
+
+            err = OK;
+            if (n < 0) {
+                err = -errno;
+            } else if (n == 0) {
+                err = -ECONNRESET;
+            } else {
+                buf->setRange(0, n);
+
+                int64_t nowUs = ALooper::GetNowUs();
+                buf->meta()->setInt64("arrivalTimeUs", nowUs);
+
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("sessionID", mSessionID);
+                notify->setInt32("reason", kWhatDatagram);
+
+                uint32_t ip = ntohl(remoteAddr.sin_addr.s_addr);
+                notify->setString(
+                        "fromAddr",
+                        AStringPrintf(
+                            "%u.%u.%u.%u",
+                            ip >> 24,
+                            (ip >> 16) & 0xff,
+                            (ip >> 8) & 0xff,
+                            ip & 0xff).c_str());
+
+                notify->setInt32("fromPort", ntohs(remoteAddr.sin_port));
+
+                notify->setBuffer("data", buf);
+                notify->post();
+            }
+        } while (err == OK);
+
+        if (err == -EAGAIN) {
+            err = OK;
+        }
+
+        if (err != OK) {
+            if (!mUDPRetries) {
+                notifyError(false /* send */, err, "Recvfrom failed.");
+                mSawReceiveFailure = true;
+            } else {
+                mUDPRetries--;
+                ALOGE("Recvfrom failed, %d/%d retries left",
+                        mUDPRetries, kMaxUDPRetries);
+                err = OK;
+            }
+        } else {
+            mUDPRetries = kMaxUDPRetries;
+        }
+
+        return err;
+    }
+
+    char tmp[512];
+    ssize_t n;
+    do {
+        n = recv(mSocket, tmp, sizeof(tmp), 0);
+    } while (n < 0 && errno == EINTR);
+
+    status_t err = OK;
+
+    if (n > 0) {
+        mInBuffer.append(tmp, n);
+
+#if 0
+        ALOGI("in:");
+        hexdump(tmp, n);
+#endif
+    } else if (n < 0) {
+        err = -errno;
+    } else {
+        err = -ECONNRESET;
+    }
+
+    if (mMode == MODE_DATAGRAM) {
+        // TCP stream carrying 16-bit length-prefixed datagrams.
+
+        while (mInBuffer.size() >= 2) {
+            size_t packetSize = U16_AT((const uint8_t *)mInBuffer.c_str());
+
+            if (mInBuffer.size() < packetSize + 2) {
+                break;
+            }
+
+            sp<ABuffer> packet = new ABuffer(packetSize);
+            memcpy(packet->data(), mInBuffer.c_str() + 2, packetSize);
+
+            int64_t nowUs = ALooper::GetNowUs();
+            packet->meta()->setInt64("arrivalTimeUs", nowUs);
+
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("sessionID", mSessionID);
+            notify->setInt32("reason", kWhatDatagram);
+            notify->setBuffer("data", packet);
+            notify->post();
+
+            mInBuffer.erase(0, packetSize + 2);
+        }
+    } else if (mMode == MODE_RTSP) {
+        for (;;) {
+            size_t length;
+
+            if (mInBuffer.size() > 0 && mInBuffer.c_str()[0] == '$') {
+                if (mInBuffer.size() < 4) {
+                    break;
+                }
+
+                length = U16_AT((const uint8_t *)mInBuffer.c_str() + 2);
+
+                if (mInBuffer.size() < 4 + length) {
+                    break;
+                }
+
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("sessionID", mSessionID);
+                notify->setInt32("reason", kWhatBinaryData);
+                notify->setInt32("channel", mInBuffer.c_str()[1]);
+
+                sp<ABuffer> data = new ABuffer(length);
+                memcpy(data->data(), mInBuffer.c_str() + 4, length);
+
+                int64_t nowUs = ALooper::GetNowUs();
+                data->meta()->setInt64("arrivalTimeUs", nowUs);
+
+                notify->setBuffer("data", data);
+                notify->post();
+
+                mInBuffer.erase(0, 4 + length);
+                continue;
+            }
+
+            sp<ParsedMessage> msg =
+                ParsedMessage::Parse(
+                        mInBuffer.c_str(), mInBuffer.size(), err != OK, &length);
+
+            if (msg == NULL) {
+                break;
+            }
+
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("sessionID", mSessionID);
+            notify->setInt32("reason", kWhatData);
+            notify->setObject("data", msg);
+            notify->post();
+
+#if 1
+            // XXX The (old) dongle sends the wrong content length header on a
+            // SET_PARAMETER request that signals a "wfd_idr_request".
+            // (17 instead of 19).
+            const char *content = msg->getContent();
+            if (content
+                    && !memcmp(content, "wfd_idr_request\r\n", 17)
+                    && length >= 19
+                    && mInBuffer.c_str()[length] == '\r'
+                    && mInBuffer.c_str()[length + 1] == '\n') {
+                length += 2;
+            }
+#endif
+
+            mInBuffer.erase(0, length);
+
+            if (err != OK) {
+                break;
+            }
+        }
+    } else {
+        CHECK_EQ(mMode, MODE_WEBSOCKET);
+
+        const uint8_t *data = (const uint8_t *)mInBuffer.c_str();
+        // hexdump(data, mInBuffer.size());
+
+        while (mInBuffer.size() >= 2) {
+            size_t offset = 2;
+
+            uint64_t payloadLen = data[1] & 0x7f;
+            if (payloadLen == 126) {
+                if (offset + 2 > mInBuffer.size()) {
+                    break;
+                }
+
+                payloadLen = U16_AT(&data[offset]);
+                offset += 2;
+            } else if (payloadLen == 127) {
+                if (offset + 8 > mInBuffer.size()) {
+                    break;
+                }
+
+                payloadLen = U64_AT(&data[offset]);
+                offset += 8;
+            }
+
+            uint32_t mask = 0;
+            if (data[1] & 0x80) {
+                // MASK==1
+                if (offset + 4 > mInBuffer.size()) {
+                    break;
+                }
+
+                mask = U32_AT(&data[offset]);
+                offset += 4;
+            }
+
+            if (payloadLen > mInBuffer.size() || offset > mInBuffer.size() - payloadLen) {
+                break;
+            }
+
+            // We have the full message.
+
+            sp<ABuffer> packet = new ABuffer(payloadLen);
+            memcpy(packet->data(), &data[offset], payloadLen);
+
+            if (mask != 0) {
+                for (size_t i = 0; i < payloadLen; ++i) {
+                    packet->data()[i] =
+                        data[offset + i]
+                            ^ ((mask >> (8 * (3 - (i % 4)))) & 0xff);
+                }
+            }
+
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("sessionID", mSessionID);
+            notify->setInt32("reason", kWhatWebSocketMessage);
+            notify->setBuffer("data", packet);
+            notify->setInt32("headerByte", data[0]);
+            notify->post();
+
+            mInBuffer.erase(0, offset + payloadLen);
+        }
+    }
+
+    if (err != OK) {
+        notifyError(false /* send */, err, "Recv failed.");
+        mSawReceiveFailure = true;
+    }
+
+    return err;
+}
+
+void ANetworkSession::Session::dumpFragmentStats(const Fragment & /* frag */) {
+#if 0
+    int64_t nowUs = ALooper::GetNowUs();
+    int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll;
+
+    static const int64_t kMinDelayMs = 0;
+    static const int64_t kMaxDelayMs = 300;
+
+    const char *kPattern = "########################################";
+    size_t kPatternSize = strlen(kPattern);
+
+    int n = (kPatternSize * (delayMs - kMinDelayMs))
+                / (kMaxDelayMs - kMinDelayMs);
+
+    if (n < 0) {
+        n = 0;
+    } else if ((size_t)n > kPatternSize) {
+        n = kPatternSize;
+    }
+
+    ALOGI("[%lld]: (%4lld ms) %s\n",
+          frag.mTimeUs / 1000,
+          delayMs,
+          kPattern + kPatternSize - n);
+#endif
+}
+
+status_t ANetworkSession::Session::writeMore() {
+    if (mState == DATAGRAM) {
+        CHECK(!mOutFragments.empty());
+
+        status_t err;
+        do {
+            const Fragment &frag = *mOutFragments.begin();
+            const sp<ABuffer> &datagram = frag.mBuffer;
+
+            int n;
+            do {
+                n = send(mSocket, datagram->data(), datagram->size(), 0);
+            } while (n < 0 && errno == EINTR);
+
+            err = OK;
+
+            if (n > 0) {
+                if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) {
+                    dumpFragmentStats(frag);
+                }
+
+                mOutFragments.erase(mOutFragments.begin());
+            } else if (n < 0) {
+                err = -errno;
+            } else if (n == 0) {
+                err = -ECONNRESET;
+            }
+        } while (err == OK && !mOutFragments.empty());
+
+        if (err == -EAGAIN) {
+            if (!mOutFragments.empty()) {
+                ALOGI("%zu datagrams remain queued.", mOutFragments.size());
+            }
+            err = OK;
+        }
+
+        if (err != OK) {
+            if (!mUDPRetries) {
+                notifyError(true /* send */, err, "Send datagram failed.");
+                mSawSendFailure = true;
+            } else {
+                mUDPRetries--;
+                ALOGE("Send datagram failed, %d/%d retries left",
+                        mUDPRetries, kMaxUDPRetries);
+                err = OK;
+            }
+        } else {
+            mUDPRetries = kMaxUDPRetries;
+        }
+
+        return err;
+    }
+
+    if (mState == CONNECTING) {
+        int err;
+        socklen_t optionLen = sizeof(err);
+        CHECK_EQ(getsockopt(mSocket, SOL_SOCKET, SO_ERROR, &err, &optionLen), 0);
+        CHECK_EQ(optionLen, (socklen_t)sizeof(err));
+
+        if (err != 0) {
+            notifyError(kWhatError, -err, "Connection failed");
+            mSawSendFailure = true;
+
+            return -err;
+        }
+
+        mState = CONNECTED;
+        notify(kWhatConnected);
+
+        return OK;
+    }
+
+    CHECK_EQ(mState, CONNECTED);
+    CHECK(!mOutFragments.empty());
+
+    ssize_t n = -1;
+    while (!mOutFragments.empty()) {
+        const Fragment &frag = *mOutFragments.begin();
+
+        do {
+            n = send(mSocket, frag.mBuffer->data(), frag.mBuffer->size(), 0);
+        } while (n < 0 && errno == EINTR);
+
+        if (n <= 0) {
+            break;
+        }
+
+        frag.mBuffer->setRange(
+                frag.mBuffer->offset() + n, frag.mBuffer->size() - n);
+
+        if (frag.mBuffer->size() > 0) {
+            break;
+        }
+
+        if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) {
+            dumpFragmentStats(frag);
+        }
+
+        mOutFragments.erase(mOutFragments.begin());
+    }
+
+    status_t err = OK;
+
+    if (n < 0) {
+        err = -errno;
+    } else if (n == 0) {
+        err = -ECONNRESET;
+    }
+
+    if (err != OK) {
+        notifyError(true /* send */, err, "Send failed.");
+        mSawSendFailure = true;
+    }
+
+#if 0
+    int numBytesQueued;
+    int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued);
+    if (res == 0 && numBytesQueued > 50 * 1024) {
+        if (numBytesQueued > 409600) {
+            ALOGW("!!! numBytesQueued = %d", numBytesQueued);
+        }
+
+        int64_t nowUs = ALooper::GetNowUs();
+
+        if (mLastStallReportUs < 0ll
+                || nowUs > mLastStallReportUs + 100000ll) {
+            sp<AMessage> msg = mNotify->dup();
+            msg->setInt32("sessionID", mSessionID);
+            msg->setInt32("reason", kWhatNetworkStall);
+            msg->setSize("numBytesQueued", numBytesQueued);
+            msg->post();
+
+            mLastStallReportUs = nowUs;
+        }
+    }
+#endif
+
+    return err;
+}
+
+status_t ANetworkSession::Session::sendRequest(
+        const void *data, ssize_t size, bool timeValid, int64_t timeUs) {
+    CHECK(mState == CONNECTED || mState == DATAGRAM);
+
+    if (size < 0) {
+        size = strlen((const char *)data);
+    }
+
+    if (size == 0) {
+        return OK;
+    }
+
+    sp<ABuffer> buffer;
+
+    if (mState == CONNECTED && mMode == MODE_DATAGRAM) {
+        CHECK_LE(size, 65535);
+
+        buffer = new ABuffer(size + 2);
+        buffer->data()[0] = size >> 8;
+        buffer->data()[1] = size & 0xff;
+        memcpy(buffer->data() + 2, data, size);
+    } else if (mState == CONNECTED && mMode == MODE_WEBSOCKET) {
+        static const bool kUseMask = false;  // Chromium doesn't like it.
+
+        size_t numHeaderBytes = 2 + (kUseMask ? 4 : 0);
+        if (size > 65535) {
+            numHeaderBytes += 8;
+        } else if (size > 125) {
+            numHeaderBytes += 2;
+        }
+
+        buffer = new ABuffer(numHeaderBytes + size);
+        buffer->data()[0] = 0x81;  // FIN==1 | opcode=1 (text)
+        buffer->data()[1] = kUseMask ? 0x80 : 0x00;
+
+        if (size > 65535) {
+            buffer->data()[1] |= 127;
+            buffer->data()[2] = 0x00;
+            buffer->data()[3] = 0x00;
+            buffer->data()[4] = 0x00;
+            buffer->data()[5] = 0x00;
+            buffer->data()[6] = (size >> 24) & 0xff;
+            buffer->data()[7] = (size >> 16) & 0xff;
+            buffer->data()[8] = (size >> 8) & 0xff;
+            buffer->data()[9] = size & 0xff;
+        } else if (size > 125) {
+            buffer->data()[1] |= 126;
+            buffer->data()[2] = (size >> 8) & 0xff;
+            buffer->data()[3] = size & 0xff;
+        } else {
+            buffer->data()[1] |= size;
+        }
+
+        if (kUseMask) {
+            uint32_t mask = rand();
+
+            buffer->data()[numHeaderBytes - 4] = (mask >> 24) & 0xff;
+            buffer->data()[numHeaderBytes - 3] = (mask >> 16) & 0xff;
+            buffer->data()[numHeaderBytes - 2] = (mask >> 8) & 0xff;
+            buffer->data()[numHeaderBytes - 1] = mask & 0xff;
+
+            for (size_t i = 0; i < (size_t)size; ++i) {
+                buffer->data()[numHeaderBytes + i] =
+                    ((const uint8_t *)data)[i]
+                        ^ ((mask >> (8 * (3 - (i % 4)))) & 0xff);
+            }
+        } else {
+            memcpy(buffer->data() + numHeaderBytes, data, size);
+        }
+    } else {
+        buffer = new ABuffer(size);
+        memcpy(buffer->data(), data, size);
+    }
+
+    Fragment frag;
+
+    frag.mFlags = 0;
+    if (timeValid) {
+        frag.mFlags = FRAGMENT_FLAG_TIME_VALID;
+        frag.mTimeUs = timeUs;
+    }
+
+    frag.mBuffer = buffer;
+
+    mOutFragments.push_back(frag);
+
+    return OK;
+}
+
+void ANetworkSession::Session::notifyError(
+        bool send, status_t err, const char *detail) {
+    sp<AMessage> msg = mNotify->dup();
+    msg->setInt32("sessionID", mSessionID);
+    msg->setInt32("reason", kWhatError);
+    msg->setInt32("send", send);
+    msg->setInt32("err", err);
+    msg->setString("detail", detail);
+    msg->post();
+}
+
+void ANetworkSession::Session::notify(NotificationReason reason) {
+    sp<AMessage> msg = mNotify->dup();
+    msg->setInt32("sessionID", mSessionID);
+    msg->setInt32("reason", reason);
+    msg->post();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+ANetworkSession::ANetworkSession()
+    : mNextSessionID(1) {
+    mPipeFd[0] = mPipeFd[1] = -1;
+}
+
+ANetworkSession::~ANetworkSession() {
+    stop();
+}
+
+status_t ANetworkSession::start() {
+    if (mThread != NULL) {
+        return INVALID_OPERATION;
+    }
+
+    int res = pipe(mPipeFd);
+    if (res != 0) {
+        mPipeFd[0] = mPipeFd[1] = -1;
+        return -errno;
+    }
+
+    mThread = new NetworkThread(this);
+
+    status_t err = mThread->run("ANetworkSession", ANDROID_PRIORITY_AUDIO);
+
+    if (err != OK) {
+        mThread.clear();
+
+        close(mPipeFd[0]);
+        close(mPipeFd[1]);
+        mPipeFd[0] = mPipeFd[1] = -1;
+
+        return err;
+    }
+
+    return OK;
+}
+
+status_t ANetworkSession::stop() {
+    if (mThread == NULL) {
+        return INVALID_OPERATION;
+    }
+
+    mThread->requestExit();
+    interrupt();
+    mThread->requestExitAndWait();
+
+    mThread.clear();
+
+    close(mPipeFd[0]);
+    close(mPipeFd[1]);
+    mPipeFd[0] = mPipeFd[1] = -1;
+
+    return OK;
+}
+
+status_t ANetworkSession::createRTSPClient(
+        const char *host, unsigned port, const sp<AMessage> &notify,
+        int32_t *sessionID) {
+    return createClientOrServer(
+            kModeCreateRTSPClient,
+            NULL /* addr */,
+            0 /* port */,
+            host,
+            port,
+            notify,
+            sessionID);
+}
+
+status_t ANetworkSession::createRTSPServer(
+        const struct in_addr &addr, unsigned port,
+        const sp<AMessage> &notify, int32_t *sessionID) {
+    return createClientOrServer(
+            kModeCreateRTSPServer,
+            &addr,
+            port,
+            NULL /* remoteHost */,
+            0 /* remotePort */,
+            notify,
+            sessionID);
+}
+
+status_t ANetworkSession::createUDPSession(
+        unsigned localPort, const sp<AMessage> &notify, int32_t *sessionID) {
+    return createUDPSession(localPort, NULL, 0, notify, sessionID);
+}
+
+status_t ANetworkSession::createUDPSession(
+        unsigned localPort,
+        const char *remoteHost,
+        unsigned remotePort,
+        const sp<AMessage> &notify,
+        int32_t *sessionID) {
+    return createClientOrServer(
+            kModeCreateUDPSession,
+            NULL /* addr */,
+            localPort,
+            remoteHost,
+            remotePort,
+            notify,
+            sessionID);
+}
+
+status_t ANetworkSession::createTCPDatagramSession(
+        const struct in_addr &addr, unsigned port,
+        const sp<AMessage> &notify, int32_t *sessionID) {
+    return createClientOrServer(
+            kModeCreateTCPDatagramSessionPassive,
+            &addr,
+            port,
+            NULL /* remoteHost */,
+            0 /* remotePort */,
+            notify,
+            sessionID);
+}
+
+status_t ANetworkSession::createTCPDatagramSession(
+        unsigned localPort,
+        const char *remoteHost,
+        unsigned remotePort,
+        const sp<AMessage> &notify,
+        int32_t *sessionID) {
+    return createClientOrServer(
+            kModeCreateTCPDatagramSessionActive,
+            NULL /* addr */,
+            localPort,
+            remoteHost,
+            remotePort,
+            notify,
+            sessionID);
+}
+
+status_t ANetworkSession::destroySession(int32_t sessionID) {
+    Mutex::Autolock autoLock(mLock);
+
+    ssize_t index = mSessions.indexOfKey(sessionID);
+
+    if (index < 0) {
+        return -ENOENT;
+    }
+
+    mSessions.removeItemsAt(index);
+
+    interrupt();
+
+    return OK;
+}
+
+// static
+status_t ANetworkSession::MakeSocketNonBlocking(int s) {
+    int flags = fcntl(s, F_GETFL, 0);
+    if (flags < 0) {
+        flags = 0;
+    }
+
+    int res = fcntl(s, F_SETFL, flags | O_NONBLOCK);
+    if (res < 0) {
+        return -errno;
+    }
+
+    return OK;
+}
+
+status_t ANetworkSession::createClientOrServer(
+        Mode mode,
+        const struct in_addr *localAddr,
+        unsigned port,
+        const char *remoteHost,
+        unsigned remotePort,
+        const sp<AMessage> &notify,
+        int32_t *sessionID) {
+    Mutex::Autolock autoLock(mLock);
+
+    *sessionID = 0;
+    status_t err = OK;
+    int s, res;
+    sp<Session> session;
+
+    s = socket(
+            AF_INET,
+            (mode == kModeCreateUDPSession) ? SOCK_DGRAM : SOCK_STREAM,
+            0);
+
+    if (s < 0) {
+        err = -errno;
+        goto bail;
+    }
+
+    if (mode == kModeCreateRTSPServer
+            || mode == kModeCreateTCPDatagramSessionPassive) {
+        const int yes = 1;
+        res = setsockopt(s, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(yes));
+
+        if (res < 0) {
+            err = -errno;
+            goto bail2;
+        }
+    }
+
+    if (mode == kModeCreateUDPSession) {
+        int size = 256 * 1024;
+
+        res = setsockopt(s, SOL_SOCKET, SO_RCVBUF, &size, sizeof(size));
+
+        if (res < 0) {
+            err = -errno;
+            goto bail2;
+        }
+
+        res = setsockopt(s, SOL_SOCKET, SO_SNDBUF, &size, sizeof(size));
+
+        if (res < 0) {
+            err = -errno;
+            goto bail2;
+        }
+    } else if (mode == kModeCreateTCPDatagramSessionActive) {
+        int flag = 1;
+        res = setsockopt(s, IPPROTO_TCP, TCP_NODELAY, &flag, sizeof(flag));
+
+        if (res < 0) {
+            err = -errno;
+            goto bail2;
+        }
+
+        int tos = 224;  // VOICE
+        res = setsockopt(s, IPPROTO_IP, IP_TOS, &tos, sizeof(tos));
+
+        if (res < 0) {
+            err = -errno;
+            goto bail2;
+        }
+    }
+
+    err = MakeSocketNonBlocking(s);
+
+    if (err != OK) {
+        goto bail2;
+    }
+
+    struct sockaddr_in addr;
+    memset(addr.sin_zero, 0, sizeof(addr.sin_zero));
+    addr.sin_family = AF_INET;
+
+    if (mode == kModeCreateRTSPClient
+            || mode == kModeCreateTCPDatagramSessionActive) {
+        struct hostent *ent= gethostbyname(remoteHost);
+        if (ent == NULL) {
+            err = -h_errno;
+            goto bail2;
+        }
+
+        addr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+        addr.sin_port = htons(remotePort);
+    } else if (localAddr != NULL) {
+        addr.sin_addr = *localAddr;
+        addr.sin_port = htons(port);
+    } else {
+        addr.sin_addr.s_addr = htonl(INADDR_ANY);
+        addr.sin_port = htons(port);
+    }
+
+    if (mode == kModeCreateRTSPClient
+            || mode == kModeCreateTCPDatagramSessionActive) {
+        in_addr_t x = ntohl(addr.sin_addr.s_addr);
+        ALOGI("connecting socket %d to %d.%d.%d.%d:%d",
+              s,
+              (x >> 24),
+              (x >> 16) & 0xff,
+              (x >> 8) & 0xff,
+              x & 0xff,
+              ntohs(addr.sin_port));
+
+        res = connect(s, (const struct sockaddr *)&addr, sizeof(addr));
+
+        CHECK_LT(res, 0);
+        if (errno == EINPROGRESS) {
+            res = 0;
+        }
+    } else {
+        res = bind(s, (const struct sockaddr *)&addr, sizeof(addr));
+
+        if (res == 0) {
+            if (mode == kModeCreateRTSPServer
+                    || mode == kModeCreateTCPDatagramSessionPassive) {
+                res = listen(s, 4);
+            } else {
+                CHECK_EQ(mode, kModeCreateUDPSession);
+
+                if (remoteHost != NULL) {
+                    struct sockaddr_in remoteAddr;
+                    memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero));
+                    remoteAddr.sin_family = AF_INET;
+                    remoteAddr.sin_port = htons(remotePort);
+
+                    struct hostent *ent= gethostbyname(remoteHost);
+                    if (ent == NULL) {
+                        err = -h_errno;
+                        goto bail2;
+                    }
+
+                    remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+
+                    res = connect(
+                            s,
+                            (const struct sockaddr *)&remoteAddr,
+                            sizeof(remoteAddr));
+                }
+            }
+        }
+    }
+
+    if (res < 0) {
+        err = -errno;
+        goto bail2;
+    }
+
+    Session::State state;
+    switch (mode) {
+        case kModeCreateRTSPClient:
+            state = Session::CONNECTING;
+            break;
+
+        case kModeCreateTCPDatagramSessionActive:
+            state = Session::CONNECTING;
+            break;
+
+        case kModeCreateTCPDatagramSessionPassive:
+            state = Session::LISTENING_TCP_DGRAMS;
+            break;
+
+        case kModeCreateRTSPServer:
+            state = Session::LISTENING_RTSP;
+            break;
+
+        default:
+            CHECK_EQ(mode, kModeCreateUDPSession);
+            state = Session::DATAGRAM;
+            break;
+    }
+
+    session = new Session(
+            mNextSessionID++,
+            state,
+            s,
+            notify);
+
+    if (mode == kModeCreateTCPDatagramSessionActive) {
+        session->setMode(Session::MODE_DATAGRAM);
+    } else if (mode == kModeCreateRTSPClient) {
+        session->setMode(Session::MODE_RTSP);
+    }
+
+    mSessions.add(session->sessionID(), session);
+
+    interrupt();
+
+    *sessionID = session->sessionID();
+
+    goto bail;
+
+bail2:
+    close(s);
+    s = -1;
+
+bail:
+    return err;
+}
+
+status_t ANetworkSession::connectUDPSession(
+        int32_t sessionID, const char *remoteHost, unsigned remotePort) {
+    Mutex::Autolock autoLock(mLock);
+
+    ssize_t index = mSessions.indexOfKey(sessionID);
+
+    if (index < 0) {
+        return -ENOENT;
+    }
+
+    const sp<Session> session = mSessions.valueAt(index);
+    int s = session->socket();
+
+    struct sockaddr_in remoteAddr;
+    memset(remoteAddr.sin_zero, 0, sizeof(remoteAddr.sin_zero));
+    remoteAddr.sin_family = AF_INET;
+    remoteAddr.sin_port = htons(remotePort);
+
+    status_t err = OK;
+    struct hostent *ent = gethostbyname(remoteHost);
+    if (ent == NULL) {
+        err = -h_errno;
+    } else {
+        remoteAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
+
+        int res = connect(
+                s,
+                (const struct sockaddr *)&remoteAddr,
+                sizeof(remoteAddr));
+
+        if (res < 0) {
+            err = -errno;
+        }
+    }
+
+    return err;
+}
+
+status_t ANetworkSession::sendRequest(
+        int32_t sessionID, const void *data, ssize_t size,
+        bool timeValid, int64_t timeUs) {
+    Mutex::Autolock autoLock(mLock);
+
+    ssize_t index = mSessions.indexOfKey(sessionID);
+
+    if (index < 0) {
+        return -ENOENT;
+    }
+
+    const sp<Session> session = mSessions.valueAt(index);
+
+    status_t err = session->sendRequest(data, size, timeValid, timeUs);
+
+    interrupt();
+
+    return err;
+}
+
+status_t ANetworkSession::switchToWebSocketMode(int32_t sessionID) {
+    Mutex::Autolock autoLock(mLock);
+
+    ssize_t index = mSessions.indexOfKey(sessionID);
+
+    if (index < 0) {
+        return -ENOENT;
+    }
+
+    const sp<Session> session = mSessions.valueAt(index);
+    return session->switchToWebSocketMode();
+}
+
+void ANetworkSession::interrupt() {
+    static const char dummy = 0;
+
+    ssize_t n;
+    do {
+        n = write(mPipeFd[1], &dummy, 1);
+    } while (n < 0 && errno == EINTR);
+
+    if (n < 0) {
+        ALOGW("Error writing to pipe (%s)", strerror(errno));
+    }
+}
+
+void ANetworkSession::threadLoop() {
+    fd_set rs, ws;
+    FD_ZERO(&rs);
+    FD_ZERO(&ws);
+
+    FD_SET(mPipeFd[0], &rs);
+    int maxFd = mPipeFd[0];
+
+    {
+        Mutex::Autolock autoLock(mLock);
+
+        for (size_t i = 0; i < mSessions.size(); ++i) {
+            const sp<Session> &session = mSessions.valueAt(i);
+
+            int s = session->socket();
+
+            if (s < 0) {
+                continue;
+            }
+
+            if (session->wantsToRead()) {
+                FD_SET(s, &rs);
+                if (s > maxFd) {
+                    maxFd = s;
+                }
+            }
+
+            if (session->wantsToWrite()) {
+                FD_SET(s, &ws);
+                if (s > maxFd) {
+                    maxFd = s;
+                }
+            }
+        }
+    }
+
+    int res = select(maxFd + 1, &rs, &ws, NULL, NULL /* tv */);
+
+    if (res == 0) {
+        return;
+    }
+
+    if (res < 0) {
+        if (errno == EINTR) {
+            return;
+        }
+
+        ALOGE("select failed w/ error %d (%s)", errno, strerror(errno));
+        return;
+    }
+
+    if (FD_ISSET(mPipeFd[0], &rs)) {
+        char c;
+        ssize_t n;
+        do {
+            n = read(mPipeFd[0], &c, 1);
+        } while (n < 0 && errno == EINTR);
+
+        if (n < 0) {
+            ALOGW("Error reading from pipe (%s)", strerror(errno));
+        }
+
+        --res;
+    }
+
+    {
+        Mutex::Autolock autoLock(mLock);
+
+        List<sp<Session> > sessionsToAdd;
+
+        for (size_t i = mSessions.size(); res > 0 && i > 0;) {
+            i--;
+            const sp<Session> &session = mSessions.valueAt(i);
+
+            int s = session->socket();
+
+            if (s < 0) {
+                continue;
+            }
+
+            if (FD_ISSET(s, &rs) || FD_ISSET(s, &ws)) {
+                --res;
+            }
+
+            if (FD_ISSET(s, &rs)) {
+                if (session->isRTSPServer() || session->isTCPDatagramServer()) {
+                    struct sockaddr_in remoteAddr;
+                    socklen_t remoteAddrLen = sizeof(remoteAddr);
+
+                    int clientSocket = accept(
+                            s, (struct sockaddr *)&remoteAddr, &remoteAddrLen);
+
+                    if (clientSocket >= 0) {
+                        status_t err = MakeSocketNonBlocking(clientSocket);
+
+                        if (err != OK) {
+                            ALOGE("Unable to make client socket non blocking, "
+                                  "failed w/ error %d (%s)",
+                                  err, strerror(-err));
+
+                            close(clientSocket);
+                            clientSocket = -1;
+                        } else {
+                            in_addr_t addr = ntohl(remoteAddr.sin_addr.s_addr);
+
+                            ALOGI("incoming connection from %d.%d.%d.%d:%d "
+                                  "(socket %d)",
+                                  (addr >> 24),
+                                  (addr >> 16) & 0xff,
+                                  (addr >> 8) & 0xff,
+                                  addr & 0xff,
+                                  ntohs(remoteAddr.sin_port),
+                                  clientSocket);
+
+                            sp<Session> clientSession =
+                                new Session(
+                                        mNextSessionID++,
+                                        Session::CONNECTED,
+                                        clientSocket,
+                                        session->getNotificationMessage());
+
+                            clientSession->setMode(
+                                    session->isRTSPServer()
+                                        ? Session::MODE_RTSP
+                                        : Session::MODE_DATAGRAM);
+
+                            sessionsToAdd.push_back(clientSession);
+                        }
+                    } else {
+                        ALOGE("accept returned error %d (%s)",
+                              errno, strerror(errno));
+                    }
+                } else {
+                    status_t err = session->readMore();
+                    if (err != OK) {
+                        ALOGE("readMore on socket %d failed w/ error %d (%s)",
+                              s, err, strerror(-err));
+                    }
+                }
+            }
+
+            if (FD_ISSET(s, &ws)) {
+                status_t err = session->writeMore();
+                if (err != OK) {
+                    ALOGE("writeMore on socket %d failed w/ error %d (%s)",
+                          s, err, strerror(-err));
+                }
+            }
+        }
+
+        while (!sessionsToAdd.empty()) {
+            sp<Session> session = *sessionsToAdd.begin();
+            sessionsToAdd.erase(sessionsToAdd.begin());
+
+            mSessions.add(session->sessionID(), session);
+
+            ALOGI("added clientSession %d", session->sessionID());
+        }
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 533cd72..f55dd2f 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -57,9 +57,11 @@
         "ABuffer.cpp",
         "ADebug.cpp",
         "AHandler.cpp",
+        "AHierarchicalStateMachine.cpp",
         "ALooper.cpp",
         "ALooperRoster.cpp",
         "AMessage.cpp",
+        "ANetworkSession.cpp",
         "AString.cpp",
         "AStringUtils.cpp",
         "AudioPresentationInfo.cpp",
@@ -73,6 +75,7 @@
         "MetaData.cpp",
         "MetaDataBase.cpp",
         "OpusHeader.cpp",
+        "ParsedMessage.cpp",
         "avc_utils.cpp",
         "base64.cpp",
         "hexdump.cpp",
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 070e325..e654c68 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -93,12 +93,19 @@
     return c <= ColorAspects::MatrixBT2020Constant;
 }
 
+// Allow access to sStandardFallbacks defined further down without changing the
+// code structure compared to AOSP.
+static const ALookup<CU::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>>&
+getStandardFallbacks();
+
 //static
 int32_t ColorUtils::wrapColorAspectsIntoColorStandard(
         ColorAspects::Primaries primaries, ColorAspects::MatrixCoeffs coeffs) {
     ColorStandard res;
     if (sStandards.map(std::make_pair(primaries, coeffs), &res)) {
         return res;
+    } else if (getStandardFallbacks().map(std::make_pair(primaries, coeffs), &res)) {
+        return res;
     } else if (!isValid(primaries) || !isValid(coeffs)) {
         return kColorStandardUnspecified;
     }
@@ -430,6 +437,10 @@
     }
 };
 
+static const ALookup<CU::ColorStandard, std::pair<CA::Primaries, CA::MatrixCoeffs>>& getStandardFallbacks() {
+    return sStandardFallbacks;
+}
+
 const static
 ALookup<CU::ColorStandard, CA::Primaries> sStandardPrimariesFallbacks {
     {
diff --git a/media/libstagefright/foundation/OpusHeader.cpp b/media/libstagefright/foundation/OpusHeader.cpp
index 513e41f..f5687e0 100644
--- a/media/libstagefright/foundation/OpusHeader.cpp
+++ b/media/libstagefright/foundation/OpusHeader.cpp
@@ -292,6 +292,10 @@
         *opusHeadSize = data_size;
         return true;
     } else if (memcmp(AOPUS_CSD_MARKER_PREFIX, data, AOPUS_CSD_MARKER_PREFIX_SIZE) == 0) {
+        if (data_size < AOPUS_UNIFIED_CSD_MINSIZE || data_size > AOPUS_UNIFIED_CSD_MAXSIZE) {
+            ALOGD("Unexpected size for unified opus csd %zu", data_size);
+            return false;
+        }
         size_t i = 0;
         bool found = false;
         while (i <= data_size - AOPUS_MARKER_SIZE - AOPUS_LENGTH_SIZE) {
diff --git a/media/libstagefright/foundation/ParsedMessage.cpp b/media/libstagefright/foundation/ParsedMessage.cpp
new file mode 100644
index 0000000..049c9ad
--- /dev/null
+++ b/media/libstagefright/foundation/ParsedMessage.cpp
@@ -0,0 +1,302 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ParsedMessage.h"
+
+#include <ctype.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+namespace android {
+
+// static
+sp<ParsedMessage> ParsedMessage::Parse(
+        const char *data, size_t size, bool noMoreData, size_t *length) {
+    sp<ParsedMessage> msg = new ParsedMessage;
+    ssize_t res = msg->parse(data, size, noMoreData);
+
+    if (res < 0) {
+        *length = 0;
+        return NULL;
+    }
+
+    *length = res;
+    return msg;
+}
+
+ParsedMessage::ParsedMessage() {
+}
+
+ParsedMessage::~ParsedMessage() {
+}
+
+bool ParsedMessage::findString(const char *name, AString *value) const {
+    AString key = name;
+    key.tolower();
+
+    ssize_t index = mDict.indexOfKey(key);
+
+    if (index < 0) {
+        value->clear();
+
+        return false;
+    }
+
+    *value = mDict.valueAt(index);
+    return true;
+}
+
+bool ParsedMessage::findInt32(const char *name, int32_t *value) const {
+    AString stringValue;
+
+    if (!findString(name, &stringValue)) {
+        return false;
+    }
+
+    char *end;
+    *value = strtol(stringValue.c_str(), &end, 10);
+
+    if (end == stringValue.c_str() || *end != '\0') {
+        *value = 0;
+        return false;
+    }
+
+    return true;
+}
+
+const char *ParsedMessage::getContent() const {
+    return mContent.c_str();
+}
+
+ssize_t ParsedMessage::parse(const char *data, size_t size, bool noMoreData) {
+    if (size == 0) {
+        return -1;
+    }
+
+    ssize_t lastDictIndex = -1;
+
+    size_t offset = 0;
+    bool headersComplete = false;
+    while (offset < size) {
+        size_t lineEndOffset = offset;
+        while (lineEndOffset + 1 < size
+                && (data[lineEndOffset] != '\r'
+                        || data[lineEndOffset + 1] != '\n')) {
+            ++lineEndOffset;
+        }
+
+        if (lineEndOffset + 1 >= size) {
+            return -1;
+        }
+
+        AString line(&data[offset], lineEndOffset - offset);
+
+        if (offset == 0) {
+            // Special handling for the request/status line.
+
+            mDict.add(AString("_"), line);
+            offset = lineEndOffset + 2;
+
+            continue;
+        }
+
+        if (lineEndOffset == offset) {
+            // An empty line separates headers from body.
+            headersComplete = true;
+            offset += 2;
+            break;
+        }
+
+        if (line.c_str()[0] == ' ' || line.c_str()[0] == '\t') {
+            // Support for folded header values.
+
+            if (lastDictIndex >= 0) {
+                // Otherwise it's malformed since the first header line
+                // cannot continue anything...
+
+                AString &value = mDict.editValueAt(lastDictIndex);
+                value.append(line);
+            }
+
+            offset = lineEndOffset + 2;
+            continue;
+        }
+
+        ssize_t colonPos = line.find(":");
+        if (colonPos >= 0) {
+            AString key(line, 0, colonPos);
+            key.trim();
+            key.tolower();
+
+            line.erase(0, colonPos + 1);
+
+            lastDictIndex = mDict.add(key, line);
+        }
+
+        offset = lineEndOffset + 2;
+    }
+
+    if (!headersComplete && (!noMoreData || offset == 0)) {
+        // We either saw the empty line separating headers from body
+        // or we saw at least the status line and know that no more data
+        // is going to follow.
+        return -1;
+    }
+
+    for (size_t i = 0; i < mDict.size(); ++i) {
+        mDict.editValueAt(i).trim();
+    }
+
+    int32_t contentLength;
+    if (!findInt32("content-length", &contentLength) || contentLength < 0) {
+        contentLength = 0;
+    }
+
+    size_t totalLength = offset + contentLength;
+
+    if (size < totalLength) {
+        return -1;
+    }
+
+    mContent.setTo(&data[offset], contentLength);
+
+    return totalLength;
+}
+
+bool ParsedMessage::getRequestField(size_t index, AString *field) const {
+    AString line;
+    CHECK(findString("_", &line));
+
+    size_t prevOffset = 0;
+    size_t offset = 0;
+    for (size_t i = 0; i <= index; ++i) {
+        if (offset >= line.size()) {
+            return false;
+        }
+
+        ssize_t spacePos = line.find(" ", offset);
+
+        if (spacePos < 0) {
+            spacePos = line.size();
+        }
+
+        prevOffset = offset;
+        offset = spacePos + 1;
+    }
+
+    field->setTo(line, prevOffset, offset - prevOffset - 1);
+
+    return true;
+}
+
+bool ParsedMessage::getStatusCode(int32_t *statusCode) const {
+    AString statusCodeString;
+    if (!getRequestField(1, &statusCodeString)) {
+        *statusCode = 0;
+        return false;
+    }
+
+    char *end;
+    *statusCode = strtol(statusCodeString.c_str(), &end, 10);
+
+    if (*end != '\0' || end == statusCodeString.c_str()
+            || (*statusCode) < 100 || (*statusCode) > 999) {
+        *statusCode = 0;
+        return false;
+    }
+
+    return true;
+}
+
+AString ParsedMessage::debugString() const {
+    AString line;
+    CHECK(findString("_", &line));
+
+    line.append("\n");
+
+    for (size_t i = 0; i < mDict.size(); ++i) {
+        const AString &key = mDict.keyAt(i);
+        const AString &value = mDict.valueAt(i);
+
+        if (key == AString("_")) {
+            continue;
+        }
+
+        line.append(key);
+        line.append(": ");
+        line.append(value);
+        line.append("\n");
+    }
+
+    line.append("\n");
+    line.append(mContent);
+
+    return line;
+}
+
+// static
+bool ParsedMessage::GetAttribute(
+        const char *s, const char *key, AString *value) {
+    value->clear();
+
+    size_t keyLen = strlen(key);
+
+    for (;;) {
+        while (isspace(*s)) {
+            ++s;
+        }
+
+        const char *colonPos = strchr(s, ';');
+
+        size_t len =
+            (colonPos == NULL) ? strlen(s) : colonPos - s;
+
+        if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
+            value->setTo(&s[keyLen + 1], len - keyLen - 1);
+            return true;
+        }
+
+        if (colonPos == NULL) {
+            return false;
+        }
+
+        s = colonPos + 1;
+    }
+}
+
+// static
+bool ParsedMessage::GetInt32Attribute(
+        const char *s, const char *key, int32_t *value) {
+    AString stringValue;
+    if (!GetAttribute(s, key, &stringValue)) {
+        *value = 0;
+        return false;
+    }
+
+    char *end;
+    *value = strtol(stringValue.c_str(), &end, 10);
+
+    if (end == stringValue.c_str() || *end != '\0') {
+        *value = 0;
+        return false;
+    }
+
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/include/media/stagefright/AHierarchicalStateMachine.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AHierarchicalStateMachine.h
similarity index 100%
rename from media/libstagefright/include/media/stagefright/AHierarchicalStateMachine.h
rename to media/libstagefright/foundation/include/media/stagefright/foundation/AHierarchicalStateMachine.h
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ANetworkSession.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ANetworkSession.h
new file mode 100644
index 0000000..fd3ebaa
--- /dev/null
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ANetworkSession.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_NETWORK_SESSION_H_
+
+#define A_NETWORK_SESSION_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+#include <utils/Thread.h>
+
+#include <netinet/in.h>
+
+namespace android {
+
+struct AMessage;
+
+// Helper class to manage a number of live sockets (datagram and stream-based)
+// on a single thread. Clients are notified about activity through AMessages.
+struct ANetworkSession : public RefBase {
+    ANetworkSession();
+
+    status_t start();
+    status_t stop();
+
+    status_t createRTSPClient(
+            const char *host, unsigned port, const sp<AMessage> &notify,
+            int32_t *sessionID);
+
+    status_t createRTSPServer(
+            const struct in_addr &addr, unsigned port,
+            const sp<AMessage> &notify, int32_t *sessionID);
+
+    status_t createUDPSession(
+            unsigned localPort, const sp<AMessage> &notify, int32_t *sessionID);
+
+    status_t createUDPSession(
+            unsigned localPort,
+            const char *remoteHost,
+            unsigned remotePort,
+            const sp<AMessage> &notify,
+            int32_t *sessionID);
+
+    status_t connectUDPSession(
+            int32_t sessionID, const char *remoteHost, unsigned remotePort);
+
+    // passive
+    status_t createTCPDatagramSession(
+            const struct in_addr &addr, unsigned port,
+            const sp<AMessage> &notify, int32_t *sessionID);
+
+    // active
+    status_t createTCPDatagramSession(
+            unsigned localPort,
+            const char *remoteHost,
+            unsigned remotePort,
+            const sp<AMessage> &notify,
+            int32_t *sessionID);
+
+    status_t destroySession(int32_t sessionID);
+
+    status_t sendRequest(
+            int32_t sessionID, const void *data, ssize_t size = -1,
+            bool timeValid = false, int64_t timeUs = -1ll);
+
+    status_t switchToWebSocketMode(int32_t sessionID);
+
+    enum NotificationReason {
+        kWhatError,
+        kWhatConnected,
+        kWhatClientConnected,
+        kWhatData,
+        kWhatDatagram,
+        kWhatBinaryData,
+        kWhatWebSocketMessage,
+        kWhatNetworkStall,
+    };
+
+protected:
+    virtual ~ANetworkSession();
+
+private:
+    struct NetworkThread;
+    struct Session;
+
+    Mutex mLock;
+    sp<Thread> mThread;
+
+    int32_t mNextSessionID;
+
+    int mPipeFd[2];
+
+    KeyedVector<int32_t, sp<Session> > mSessions;
+
+    enum Mode {
+        kModeCreateUDPSession,
+        kModeCreateTCPDatagramSessionPassive,
+        kModeCreateTCPDatagramSessionActive,
+        kModeCreateRTSPServer,
+        kModeCreateRTSPClient,
+    };
+    status_t createClientOrServer(
+            Mode mode,
+            const struct in_addr *addr,
+            unsigned port,
+            const char *remoteHost,
+            unsigned remotePort,
+            const sp<AMessage> &notify,
+            int32_t *sessionID);
+
+    void threadLoop();
+    void interrupt();
+
+    static status_t MakeSocketNonBlocking(int s);
+
+    DISALLOW_EVIL_CONSTRUCTORS(ANetworkSession);
+};
+
+}  // namespace android
+
+#endif  // A_NETWORK_SESSION_H_
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ParsedMessage.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ParsedMessage.h
new file mode 100644
index 0000000..9d43a93
--- /dev/null
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ParsedMessage.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AString.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+// Encapsulates an "HTTP/RTSP style" response, i.e. a status line,
+// key/value pairs making up the headers and an optional body/content.
+struct ParsedMessage : public RefBase {
+    static sp<ParsedMessage> Parse(
+            const char *data, size_t size, bool noMoreData, size_t *length);
+
+    bool findString(const char *name, AString *value) const;
+    bool findInt32(const char *name, int32_t *value) const;
+
+    const char *getContent() const;
+
+    bool getRequestField(size_t index, AString *field) const;
+    bool getStatusCode(int32_t *statusCode) const;
+
+    AString debugString() const;
+
+    static bool GetAttribute(const char *s, const char *key, AString *value);
+
+    static bool GetInt32Attribute(
+            const char *s, const char *key, int32_t *value);
+
+
+protected:
+    virtual ~ParsedMessage();
+
+private:
+    KeyedVector<AString, AString> mDict;
+    AString mContent;
+
+    ParsedMessage();
+
+    ssize_t parse(const char *data, size_t size, bool noMoreData);
+
+    DISALLOW_EVIL_CONSTRUCTORS(ParsedMessage);
+};
+
+}  // namespace android
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 784fd36..d564c5a 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -22,7 +22,7 @@
 #include <media/hardware/MetadataBufferType.h>
 #include <media/MediaCodecInfo.h>
 #include <media/IOMX.h>
-#include <media/stagefright/AHierarchicalStateMachine.h>
+#include <media/stagefright/foundation/AHierarchicalStateMachine.h>
 #include <media/stagefright/CodecBase.h>
 #include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaDefs.h>
diff --git a/media/libstagefright/include/media/stagefright/BatteryChecker.h b/media/libstagefright/include/media/stagefright/BatteryChecker.h
new file mode 100644
index 0000000..2ec4ac0
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/BatteryChecker.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BATTERY_CHECKER_H_
+#define BATTERY_CHECKER_H_
+
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+struct BatteryChecker : public RefBase {
+    BatteryChecker(const sp<AMessage> &msg, int64_t timeout = 3000000ll);
+
+    void setExecuting(bool executing) { mIsExecuting = executing; }
+    void onCodecActivity(std::function<void()> batteryOnCb);
+    void onCheckBatteryTimer(const sp<AMessage>& msg, std::function<void()> batteryOffCb);
+    void onClientRemoved();
+
+private:
+    const int64_t mTimeoutUs;
+    int64_t mLastActivityTimeUs;
+    bool mBatteryStatNotified;
+    int32_t mBatteryCheckerGeneration;
+    bool mIsExecuting;
+    sp<AMessage> mBatteryCheckerMsg;
+
+    bool isExecuting() { return mIsExecuting; }
+
+    DISALLOW_EVIL_CONSTRUCTORS(BatteryChecker);
+};
+
+}  // namespace android
+
+#endif // BATTERY_CHECKER_H_
diff --git a/media/libstagefright/include/media/stagefright/DataSourceBase.h b/media/libstagefright/include/media/stagefright/DataSourceBase.h
index af5b83d..c607c91 100644
--- a/media/libstagefright/include/media/stagefright/DataSourceBase.h
+++ b/media/libstagefright/include/media/stagefright/DataSourceBase.h
@@ -18,6 +18,8 @@
 
 #define DATA_SOURCE_BASE_H_
 
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/MediaErrors.h>
 #include <sys/types.h>
 #include <utils/Errors.h>
 
@@ -45,20 +47,106 @@
     virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
 
     // Convenience methods:
-    bool getUInt16(off64_t offset, uint16_t *x);
-    bool getUInt24(off64_t offset, uint32_t *x); // 3 byte int, returned as a 32-bit int
-    bool getUInt32(off64_t offset, uint32_t *x);
-    bool getUInt64(off64_t offset, uint64_t *x);
+    bool getUInt16(off64_t offset, uint16_t *x) {
+        *x = 0;
+
+        uint8_t byte[2];
+        if (readAt(offset, byte, 2) != 2) {
+            return false;
+        }
+
+        *x = (byte[0] << 8) | byte[1];
+
+        return true;
+    }
+    // 3 byte int, returned as a 32-bit int
+    bool getUInt24(off64_t offset, uint32_t *x) {
+        *x = 0;
+
+        uint8_t byte[3];
+        if (readAt(offset, byte, 3) != 3) {
+            return false;
+        }
+
+        *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
+
+        return true;
+    }
+    bool getUInt32(off64_t offset, uint32_t *x) {
+        *x = 0;
+
+        uint32_t tmp;
+        if (readAt(offset, &tmp, 4) != 4) {
+            return false;
+        }
+
+        *x = ntohl(tmp);
+
+        return true;
+    }
+    bool getUInt64(off64_t offset, uint64_t *x) {
+        *x = 0;
+
+        uint64_t tmp;
+        if (readAt(offset, &tmp, 8) != 8) {
+            return false;
+        }
+
+        *x = ntoh64(tmp);
+
+        return true;
+    }
 
     // read either int<N> or int<2N> into a uint<2N>_t, size is the int size in bytes.
-    bool getUInt16Var(off64_t offset, uint16_t *x, size_t size);
-    bool getUInt32Var(off64_t offset, uint32_t *x, size_t size);
-    bool getUInt64Var(off64_t offset, uint64_t *x, size_t size);
+    bool getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
+        if (size == 2) {
+            return getUInt16(offset, x);
+        }
+        if (size == 1) {
+            uint8_t tmp;
+            if (readAt(offset, &tmp, 1) == 1) {
+                *x = tmp;
+                return true;
+            }
+        }
+        return false;
+    }
+    bool getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
+        if (size == 4) {
+            return getUInt32(offset, x);
+        }
+        if (size == 2) {
+            uint16_t tmp;
+            if (getUInt16(offset, &tmp)) {
+                *x = tmp;
+                return true;
+            }
+        }
+        return false;
+    }
+    bool getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
+        if (size == 8) {
+            return getUInt64(offset, x);
+        }
+        if (size == 4) {
+            uint32_t tmp;
+            if (getUInt32(offset, &tmp)) {
+                *x = tmp;
+                return true;
+            }
+        }
+        return false;
+    }
 
     // May return ERROR_UNSUPPORTED.
-    virtual status_t getSize(off64_t *size);
+    virtual status_t getSize(off64_t *size) {
+        *size = 0;
+        return ERROR_UNSUPPORTED;
+    }
 
-    virtual bool getUri(char *uriString, size_t bufferSize);
+    virtual bool getUri(char * /*uriString*/, size_t /*bufferSize*/) {
+        return false;
+    }
 
     virtual uint32_t flags() {
         return 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f8567ad..8e5f8b9 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -36,6 +36,7 @@
 struct AMessage;
 struct AReplyToken;
 struct AString;
+struct BatteryChecker;
 class BufferChannelBase;
 struct CodecBase;
 class IBatteryStats;
@@ -257,6 +258,7 @@
         kWhatSetCallback                    = 'setC',
         kWhatSetNotification                = 'setN',
         kWhatDrmReleaseCrypto               = 'rDrm',
+        kWhatCheckBatteryStats              = 'chkB',
         kWhatGetMetrics                     = 'getM',
     };
 
@@ -284,7 +286,7 @@
     };
 
     struct ResourceManagerServiceProxy : public IBinder::DeathRecipient {
-        ResourceManagerServiceProxy(pid_t pid);
+        ResourceManagerServiceProxy(pid_t pid, uid_t uid);
         ~ResourceManagerServiceProxy();
 
         void init();
@@ -297,7 +299,11 @@
                 const sp<IResourceManagerClient> &client,
                 const Vector<MediaResource> &resources);
 
-        void removeResource(int64_t clientId);
+        void removeResource(
+                int64_t clientId,
+                const Vector<MediaResource> &resources);
+
+        void removeClient(int64_t clientId);
 
         bool reclaimResource(const Vector<MediaResource> &resources);
 
@@ -305,6 +311,7 @@
         Mutex mLock;
         sp<IResourceManagerService> mService;
         pid_t mPid;
+        uid_t mUid;
     };
 
     State mState;
@@ -338,7 +345,6 @@
     sp<IResourceManagerClient> mResourceManagerClient;
     sp<ResourceManagerServiceProxy> mResourceManagerService;
 
-    bool mBatteryStatNotified;
     bool mIsVideo;
     int32_t mVideoWidth;
     int32_t mVideoHeight;
@@ -428,11 +434,11 @@
     status_t onSetParameters(const sp<AMessage> &params);
 
     status_t amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> &buffer);
-    void updateBatteryStat();
     bool isExecuting() const;
 
     uint64_t getGraphicBufferSize();
     void addResource(MediaResource::Type type, MediaResource::SubType subtype, uint64_t value);
+    void removeResource(MediaResource::Type type, MediaResource::SubType subtype, uint64_t value);
     void requestCpuBoostIfNeeded();
 
     bool hasPendingBuffer(int portIndex);
@@ -461,6 +467,8 @@
     Mutex mLatencyLock;
     int64_t mLatencyUnknown;    // buffers for which we couldn't calculate latency
 
+    sp<BatteryChecker> mBatteryChecker;
+
     void statsBufferSent(int64_t presentationUs);
     void statsBufferReceived(int64_t presentationUs);
 
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 2c12a87..972ae1d 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -35,6 +35,10 @@
     virtual status_t start(MetaData *params = NULL) = 0;
     virtual status_t stop() = 0;
     virtual status_t pause() = 0;
+    virtual status_t setCaptureRate(float /* captureFps */) {
+        ALOGW("setCaptureRate unsupported");
+        return ERROR_UNSUPPORTED;
+    }
 
     virtual void setMaxFileSize(int64_t bytes) { mMaxFileSizeLimitBytes = bytes; }
     virtual void setMaxFileDuration(int64_t durationUs) { mMaxFileDurationLimitUs = durationUs; }
diff --git a/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
new file mode 100644
index 0000000..e772fbe
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/SurfaceMediaSource.h
@@ -0,0 +1,248 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_GUI_SURFACEMEDIASOURCE_H
+#define ANDROID_GUI_SURFACEMEDIASOURCE_H
+
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/BufferQueue.h>
+
+#include <utils/threads.h>
+#include <utils/Vector.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+
+#include <media/hardware/MetadataBufferType.h>
+
+#include "foundation/ABase.h"
+
+namespace android {
+// ----------------------------------------------------------------------------
+
+class String8;
+class GraphicBuffer;
+
+// ASSUMPTIONS
+// 1. SurfaceMediaSource is initialized with width*height which
+// can never change.  However, deqeueue buffer does not currently
+// enforce this as in BufferQueue, dequeue can be used by Surface
+// which can modify the default width and heght.  Also neither the width
+// nor height can be 0.
+// 2. setSynchronousMode is never used (basically no one should call
+// setSynchronousMode(false)
+// 3. setCrop, setTransform, setScalingMode should never be used
+// 4. queueBuffer returns a filled buffer to the SurfaceMediaSource. In addition, a
+// timestamp must be provided for the buffer. The timestamp is in
+// nanoseconds, and must be monotonically increasing. Its other semantics
+// (zero point, etc) are client-dependent and should be documented by the
+// client.
+// 5. Once disconnected, SurfaceMediaSource can be reused (can not
+// connect again)
+// 6. Stop is a hard stop, the last few frames held by the encoder
+// may be dropped.  It is possible to wait for the buffers to be
+// returned (but not implemented)
+
+#define DEBUG_PENDING_BUFFERS   0
+
+class SurfaceMediaSource : public MediaSource,
+                                public MediaBufferObserver,
+                                protected ConsumerListener {
+public:
+    enum { MIN_UNDEQUEUED_BUFFERS = 4};
+
+    struct FrameAvailableListener : public virtual RefBase {
+        // onFrameAvailable() is called from queueBuffer() is the FIFO is
+        // empty. You can use SurfaceMediaSource::getQueuedCount() to
+        // figure out if there are more frames waiting.
+        // This is called without any lock held can be called concurrently by
+        // multiple threads.
+        virtual void onFrameAvailable() = 0;
+    };
+
+    SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight);
+
+    virtual ~SurfaceMediaSource();
+
+    // For the MediaSource interface for use by StageFrightRecorder:
+    virtual status_t start(MetaData *params = NULL);
+    virtual status_t stop();
+    virtual status_t read(MediaBufferBase **buffer,
+            const ReadOptions *options = NULL);
+    virtual sp<MetaData> getFormat();
+
+    // Get / Set the frame rate used for encoding. Default fps = 30
+    status_t setFrameRate(int32_t fps) ;
+    int32_t getFrameRate( ) const;
+
+    // The call for the StageFrightRecorder to tell us that
+    // it is done using the MediaBuffer data so that its state
+    // can be set to FREE for dequeuing
+    virtual void signalBufferReturned(MediaBufferBase* buffer);
+    // end of MediaSource interface
+
+    // getTimestamp retrieves the timestamp associated with the image
+    // set by the most recent call to read()
+    //
+    // The timestamp is in nanoseconds, and is monotonically increasing. Its
+    // other semantics (zero point, etc) are source-dependent and should be
+    // documented by the source.
+    int64_t getTimestamp();
+
+    // setFrameAvailableListener sets the listener object that will be notified
+    // when a new frame becomes available.
+    void setFrameAvailableListener(const sp<FrameAvailableListener>& listener);
+
+    // dump our state in a String
+    void dumpState(String8& result) const;
+    void dumpState(String8& result, const char* prefix, char* buffer,
+                                                    size_t SIZE) const;
+
+    // metaDataStoredInVideoBuffers tells the encoder what kind of metadata
+    // is passed through the buffers. Currently, it is set to ANWBuffer
+    MetadataBufferType metaDataStoredInVideoBuffers() const;
+
+    sp<IGraphicBufferProducer> getProducer() const { return mProducer; }
+
+    // To be called before start()
+    status_t setMaxAcquiredBufferCount(size_t count);
+
+    // To be called before start()
+    status_t setUseAbsoluteTimestamps();
+
+protected:
+
+    // Implementation of the BufferQueue::ConsumerListener interface.  These
+    // calls are used to notify the Surface of asynchronous events in the
+    // BufferQueue.
+    virtual void onFrameAvailable(const BufferItem& item);
+
+    // Used as a hook to BufferQueue::disconnect()
+    // This is called by the client side when it is done
+    // TODO: Currently, this also sets mStopped to true which
+    // is needed for unblocking the encoder which might be
+    // waiting to read more frames. So if on the client side,
+    // the same thread supplies the frames and also calls stop
+    // on the encoder, the client has to call disconnect before
+    // it calls stop.
+    // In the case of the camera,
+    // that need not be required since the thread supplying the
+    // frames is separate than the one calling stop.
+    virtual void onBuffersReleased();
+
+    // SurfaceMediaSource can't handle sideband streams, so this is not expected
+    // to ever be called. Does nothing.
+    virtual void onSidebandStreamChanged();
+
+    static bool isExternalFormat(uint32_t format);
+
+private:
+    // A BufferQueue, represented by these interfaces, is the exchange point
+    // between the producer and this consumer
+    sp<IGraphicBufferProducer> mProducer;
+    sp<IGraphicBufferConsumer> mConsumer;
+
+    struct SlotData {
+        sp<GraphicBuffer> mGraphicBuffer;
+        uint64_t mFrameNumber;
+    };
+
+    // mSlots caches GraphicBuffers and frameNumbers from the buffer queue
+    SlotData mSlots[BufferQueue::NUM_BUFFER_SLOTS];
+
+    // The permenent width and height of SMS buffers
+    int mWidth;
+    int mHeight;
+
+    // mCurrentSlot is the buffer slot index of the buffer that is currently
+    // being used by buffer consumer
+    // (e.g. StageFrightRecorder in the case of SurfaceMediaSource or GLTexture
+    // in the case of Surface).
+    // It is initialized to INVALID_BUFFER_SLOT,
+    // indicating that no buffer slot is currently bound to the texture. Note,
+    // however, that a value of INVALID_BUFFER_SLOT does not necessarily mean
+    // that no buffer is bound to the texture. A call to setBufferCount will
+    // reset mCurrentTexture to INVALID_BUFFER_SLOT.
+    int mCurrentSlot;
+
+    // mCurrentBuffers is a list of the graphic buffers that are being used by
+    // buffer consumer (i.e. the video encoder). It's possible that these
+    // buffers are not associated with any buffer slots, so we must track them
+    // separately.  Buffers are added to this list in read, and removed from
+    // this list in signalBufferReturned
+    Vector<sp<GraphicBuffer> > mCurrentBuffers;
+
+    size_t mNumPendingBuffers;
+
+#if DEBUG_PENDING_BUFFERS
+    Vector<MediaBufferBase *> mPendingBuffers;
+#endif
+
+    // mCurrentTimestamp is the timestamp for the current texture. It
+    // gets set to mLastQueuedTimestamp each time updateTexImage is called.
+    int64_t mCurrentTimestamp;
+
+    // mFrameAvailableListener is the listener object that will be called when a
+    // new frame becomes available. If it is not NULL it will be called from
+    // queueBuffer.
+    sp<FrameAvailableListener> mFrameAvailableListener;
+
+    // mMutex is the mutex used to prevent concurrent access to the member
+    // variables of SurfaceMediaSource objects. It must be locked whenever the
+    // member variables are accessed.
+    mutable Mutex mMutex;
+
+    ////////////////////////// For MediaSource
+    // Set to a default of 30 fps if not specified by the client side
+    int32_t mFrameRate;
+
+    // mStarted is a flag to check if the recording is going on
+    bool mStarted;
+
+    // mNumFramesReceived indicates the number of frames recieved from
+    // the client side
+    int mNumFramesReceived;
+    // mNumFramesEncoded indicates the number of frames passed on to the
+    // encoder
+    int mNumFramesEncoded;
+
+    // mFirstFrameTimestamp is the timestamp of the first received frame.
+    // It is used to offset the output timestamps so recording starts at time 0.
+    int64_t mFirstFrameTimestamp;
+    // mStartTimeNs is the start time passed into the source at start, used to
+    // offset timestamps.
+    int64_t mStartTimeNs;
+
+    size_t mMaxAcquiredBufferCount;
+
+    bool mUseAbsoluteTimestamps;
+
+    // mFrameAvailableCondition condition used to indicate whether there
+    // is a frame available for dequeuing
+    Condition mFrameAvailableCondition;
+
+    Condition mMediaBuffersAvailableCondition;
+
+    // Allocate and return a new MediaBuffer and pass the ANW buffer as metadata into it.
+    void passMetadataBuffer_l(MediaBufferBase **buffer, ANativeWindowBuffer *bufferHandle) const;
+
+    // Avoid copying and equating and default constructor
+    DISALLOW_EVIL_CONSTRUCTORS(SurfaceMediaSource);
+};
+
+// ----------------------------------------------------------------------------
+}; // namespace android
+
+#endif // ANDROID_GUI_SURFACEMEDIASOURCE_H
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index 1e434cb..9df3508 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -338,6 +338,12 @@
             ABitReader bits(buffer->data() + offset, buffer->size() - offset);
 
             unsigned auxSize = bits.getBits(mAuxiliaryDataSizeLength);
+            if (buffer->size() < auxSize) {
+                ALOGE("b/123940919 auxSize %u", auxSize);
+                android_errorWriteLog(0x534e4554, "123940919");
+                queue->erase(queue->begin());
+                return MALFORMED_PACKET;
+            }
 
             offset += (mAuxiliaryDataSizeLength + auxSize + 7) / 8;
         }
@@ -346,6 +352,12 @@
              it != headers.end(); ++it) {
             const AUHeader &header = *it;
 
+            if (buffer->size() < header.mSize) {
+                ALOGE("b/123940919 AU_size %u", header.mSize);
+                android_errorWriteLog(0x534e4554, "123940919");
+                queue->erase(queue->begin());
+                return MALFORMED_PACKET;
+            }
             if (buffer->size() < offset + header.mSize) {
                 return MALFORMED_PACKET;
             }
diff --git a/media/libstagefright/tests/Android.bp b/media/libstagefright/tests/Android.bp
index be10fdc..f73367f 100644
--- a/media/libstagefright/tests/Android.bp
+++ b/media/libstagefright/tests/Android.bp
@@ -1,6 +1,45 @@
 // Build the unit tests.
 
 cc_test {
+    name: "SurfaceMediaSource_test",
+
+    srcs: [
+        "SurfaceMediaSource_test.cpp",
+        "DummyRecorder.cpp",
+    ],
+
+    shared_libs: [
+        "libEGL",
+        "libGLESv2",
+        "libbinder",
+        "libcutils",
+        "libgui",
+        "libmedia",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libstagefright_omx",
+        "libsync",
+        "libui",
+        "libutils",
+        "liblog",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+        "frameworks/av/media/libstagefright/include",
+        "frameworks/native/include/media/openmax",
+        "frameworks/native/include/media/hardware",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    compile_multilib: "32",
+}
+
+cc_test {
     name: "MediaCodecListOverrides_test",
 
     srcs: ["MediaCodecListOverrides_test.cpp"],
@@ -27,3 +66,21 @@
         "-Wall",
     ],
 }
+
+cc_test {
+    name: "BatteryChecker_test",
+    srcs: ["BatteryChecker_test.cpp"],
+    test_suites: ["device-tests"],
+
+    shared_libs: [
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+        "liblog",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+}
diff --git a/media/libstagefright/tests/BatteryChecker_test.cpp b/media/libstagefright/tests/BatteryChecker_test.cpp
new file mode 100644
index 0000000..0c5ee9b
--- /dev/null
+++ b/media/libstagefright/tests/BatteryChecker_test.cpp
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "BatteryChecker_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include <media/stagefright/BatteryChecker.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AHandler.h>
+
+#include <vector>
+
+namespace android {
+
+static const int kBatteryTimeoutUs = 1000000ll; // 1 seconds
+static const int kTestMarginUs = 50000ll; // 50ms
+static const int kWaitStatusChangeUs = kBatteryTimeoutUs + kTestMarginUs;
+static const int kSparseFrameIntervalUs = kBatteryTimeoutUs - kTestMarginUs;
+
+class BatteryCheckerTestHandler : public AHandler {
+    enum EventType {
+        // Events simulating MediaCodec
+        kWhatStart = 0,             // codec entering executing state
+        kWhatStop,                  // codec exiting executing state
+        kWhatActivity,        // codec queue input or dequeue output
+        kWhatReleased,     // codec released
+        kWhatCheckpoint,        // test checkpoing with expected values on On/Off
+
+        // Message for battery checker monitor (not for testing through runTest())
+        kWhatBatteryChecker,
+    };
+
+    struct Operation {
+        int32_t event;
+        int64_t delay = 0;
+        uint32_t repeatCount = 0;
+        int32_t expectedOnCounter = 0;
+        int32_t expectedOffCounter = 0;
+    };
+
+    std::vector<Operation> mOps;
+    sp<BatteryChecker> mBatteryChecker;
+    int32_t mOnCounter;
+    int32_t mOffCounter;
+    Condition mDone;
+    Mutex mLock;
+
+    BatteryCheckerTestHandler() : mOnCounter(0), mOffCounter(0) {}
+
+    void runTest(const std::vector<Operation> &ops, int64_t timeoutUs) {
+        mOps = ops;
+
+        mBatteryChecker = new BatteryChecker(
+                new AMessage(kWhatBatteryChecker, this), kBatteryTimeoutUs);
+
+        (new AMessage(ops[0].event, this))->post();
+
+        // wait for done
+        AutoMutex lock(mLock);
+        EXPECT_NE(TIMED_OUT, mDone.waitRelative(mLock, timeoutUs * 1000ll));
+    }
+
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+    friend class BatteryCheckerTest;
+};
+
+class BatteryCheckerTest : public ::testing::Test {
+public:
+    BatteryCheckerTest()
+        : mLooper(new ALooper)
+        , mHandler(new BatteryCheckerTestHandler()) {
+        mLooper->setName("BatterCheckerLooper");
+        mLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+        mLooper->registerHandler(mHandler);
+    }
+
+protected:
+    using EventType = BatteryCheckerTestHandler::EventType;
+    using Operation = BatteryCheckerTestHandler::Operation;
+
+    virtual ~BatteryCheckerTest() {
+        mLooper->stop();
+        mLooper->unregisterHandler(mHandler->id());
+    }
+
+    void runTest(const std::vector<Operation> &ops, int64_t timeoutUs) {
+        mHandler->runTest(ops, timeoutUs);
+    }
+
+    sp<ALooper> mLooper;
+    sp<BatteryCheckerTestHandler> mHandler;
+};
+
+void BatteryCheckerTestHandler::onMessageReceived(const sp<AMessage> &msg) {
+    switch(msg->what()) {
+    case kWhatStart:
+        mBatteryChecker->setExecuting(true);
+        break;
+    case kWhatStop:
+        mBatteryChecker->setExecuting(false);
+        break;
+    case kWhatActivity:
+        mBatteryChecker->onCodecActivity([this] () { mOnCounter++; });
+        break;
+    case kWhatReleased:
+        mBatteryChecker->onClientRemoved();
+        break;
+    case kWhatBatteryChecker:
+        mBatteryChecker->onCheckBatteryTimer(msg, [this] () { mOffCounter++;  });
+        break;
+    case kWhatCheckpoint:
+        // verify ON/OFF state and total events
+        EXPECT_EQ(mOnCounter, mOps[0].expectedOnCounter);
+        EXPECT_EQ(mOffCounter, mOps[0].expectedOffCounter);
+        break;
+    default:
+        TRESPASS();
+    }
+    if (msg->what() != kWhatBatteryChecker) {
+        EXPECT_EQ(msg->what(), mOps[0].event);
+        // post next message
+        if (!mOps[0].repeatCount) {
+            mOps.erase(mOps.begin());
+        } else {
+            mOps[0].repeatCount--;
+        }
+        int64_t duration = mOps[0].delay;
+        if (!mOps.empty()) {
+            (new AMessage(mOps[0].event, this))->post(duration);
+        } else {
+            AutoMutex lock(mLock);
+            mDone.signal();
+        }
+    }
+}
+
+TEST_F(BatteryCheckerTest, testNormalOperations) {
+    runTest({
+        {EventType::kWhatStart,        0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 0, 0},
+        {EventType::kWhatActivity,     33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},                 // ON
+        {EventType::kWhatActivity,     33333ll, 2*kWaitStatusChangeUs/33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+        {EventType::kWhatCheckpoint,   kWaitStatusChangeUs, 0, 1, 1}, // OFF
+    }, 10000000ll);
+}
+
+TEST_F(BatteryCheckerTest, testPauseResume) {
+    runTest({
+        {EventType::kWhatStart,        0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 0, 0},
+        {EventType::kWhatActivity,     33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},                 // ON
+        {EventType::kWhatCheckpoint,   kWaitStatusChangeUs, 0, 1, 1}, // OFF
+        {EventType::kWhatActivity,     33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 2, 1},                 // ON
+        {EventType::kWhatCheckpoint,   kWaitStatusChangeUs, 0, 2, 2}, // OFF
+    }, 10000000ll);
+}
+
+TEST_F(BatteryCheckerTest, testClientRemovedAndRestart) {
+    runTest({
+        {EventType::kWhatStart,        0ll},
+        {EventType::kWhatActivity,     33333ll, kWaitStatusChangeUs/33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+
+        // stop executing state itself shouldn't trigger any calls
+        {EventType::kWhatStop,         0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+
+        // release shouldn't trigger any calls either,
+        // client resource will be removed entirely
+        {EventType::kWhatReleased,     0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+        {EventType::kWhatCheckpoint,   kWaitStatusChangeUs, 0, 1, 0},
+
+        // start pushing buffers again, On should be received without any Off
+        {EventType::kWhatStart,        0ll},
+        {EventType::kWhatActivity,     33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 2, 0},
+
+        // double check that only new checker msg triggers OFF,
+        // left-over checker msg from stale generate discarded
+        {EventType::kWhatCheckpoint,   kWaitStatusChangeUs, 0, 2, 1},
+    }, 10000000ll);
+}
+
+TEST_F(BatteryCheckerTest, testActivityWhileNotExecuting) {
+    runTest({
+        // activity before start shouldn't trigger
+        {EventType::kWhatActivity,     0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 0, 0},
+
+        {EventType::kWhatStart,        0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 0, 0},
+
+        // activity after start before stop should trigger
+        {EventType::kWhatActivity,     33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+
+        // stop executing state itself shouldn't trigger any calls
+        {EventType::kWhatStop,         0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+
+        // keep pushing another 3 seconds after stop, expected to OFF
+        {EventType::kWhatActivity,     33333ll, kWaitStatusChangeUs/33333ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 1},
+    }, 10000000ll);
+}
+
+TEST_F(BatteryCheckerTest, testSparseActivity) {
+    runTest({
+        {EventType::kWhatStart,        0ll},
+        {EventType::kWhatCheckpoint,   0ll, 0, 0, 0},
+
+        // activity arrives sparsely with interval only slightly small than timeout
+        // should only trigger 1 ON
+        {EventType::kWhatActivity,     kSparseFrameIntervalUs, 2},
+        {EventType::kWhatCheckpoint,   0ll, 0, 1, 0},
+        {EventType::kWhatCheckpoint,   kSparseFrameIntervalUs, 0, 1, 0},
+        {EventType::kWhatCheckpoint,   kTestMarginUs, 0, 1, 1}, // OFF
+    }, 10000000ll);
+}
+} // namespace android
diff --git a/media/libstagefright/tests/DummyRecorder.cpp b/media/libstagefright/tests/DummyRecorder.cpp
new file mode 100644
index 0000000..c79e6b1
--- /dev/null
+++ b/media/libstagefright/tests/DummyRecorder.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DummyRecorder"
+// #define LOG_NDEBUG 0
+
+#include <media/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include "DummyRecorder.h"
+
+#include <utils/Log.h>
+
+namespace android {
+
+// static
+void *DummyRecorder::threadWrapper(void *pthis) {
+    ALOGV("ThreadWrapper: %p", pthis);
+    DummyRecorder *writer = static_cast<DummyRecorder *>(pthis);
+    writer->readFromSource();
+    return NULL;
+}
+
+
+status_t DummyRecorder::start() {
+    ALOGV("Start");
+    mStarted = true;
+
+    mSource->start();
+
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+    int err = pthread_create(&mThread, &attr, threadWrapper, this);
+    pthread_attr_destroy(&attr);
+
+    if (err) {
+        ALOGE("Error creating thread!");
+        return -ENODEV;
+    }
+    return OK;
+}
+
+
+status_t DummyRecorder::stop() {
+    ALOGV("Stop");
+    mStarted = false;
+
+    mSource->stop();
+    void *dummy;
+    pthread_join(mThread, &dummy);
+    status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
+
+    ALOGV("Ending the reading thread");
+    return err;
+}
+
+// pretend to read the source buffers
+void DummyRecorder::readFromSource() {
+    ALOGV("ReadFromSource");
+    if (!mStarted) {
+        return;
+    }
+
+    status_t err = OK;
+    MediaBufferBase *buffer;
+    ALOGV("A fake writer accessing the frames");
+    while (mStarted && (err = mSource->read(&buffer)) == OK){
+        // if not getting a valid buffer from source, then exit
+        if (buffer == NULL) {
+            return;
+        }
+        buffer->release();
+        buffer = NULL;
+    }
+}
+
+
+} // end of namespace android
diff --git a/media/libstagefright/tests/DummyRecorder.h b/media/libstagefright/tests/DummyRecorder.h
new file mode 100644
index 0000000..0759777
--- /dev/null
+++ b/media/libstagefright/tests/DummyRecorder.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DUMMY_RECORDER_H_
+#define DUMMY_RECORDER_H_
+
+#include <pthread.h>
+#include <utils/String8.h>
+#include <media/stagefright/foundation/ABase.h>
+
+
+namespace android {
+
+struct MediaSource;
+class MediaBuffer;
+
+class DummyRecorder {
+    public:
+    // The media source from which this will receive frames
+    sp<MediaSource> mSource;
+    bool mStarted;
+    pthread_t mThread;
+
+    status_t start();
+    status_t stop();
+
+    // actual entry point for the thread
+    void readFromSource();
+
+    // static function to wrap the actual thread entry point
+    static void *threadWrapper(void *pthis);
+
+    explicit DummyRecorder(const sp<MediaSource> &source) : mSource(source)
+                                                    , mStarted(false) {}
+    ~DummyRecorder( ) {}
+
+    private:
+
+    DISALLOW_EVIL_CONSTRUCTORS(DummyRecorder);
+};
+
+} // end of namespace android
+#endif
+
+
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
new file mode 100644
index 0000000..1b1c3b8
--- /dev/null
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -0,0 +1,944 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SurfaceMediaSource_test"
+
+#include <gtest/gtest.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
+#include <utils/Errors.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+#include <GLES2/gl2.h>
+
+#include <media/stagefright/SurfaceMediaSource.h>
+#include <media/mediarecorder.h>
+
+#include <ui/GraphicBuffer.h>
+#include <gui/Surface.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+
+#include <binder/ProcessState.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <OMX_Component.h>
+
+#include "DummyRecorder.h"
+
+
+namespace android {
+
+class GLTest : public ::testing::Test {
+protected:
+
+    GLTest():
+            mEglDisplay(EGL_NO_DISPLAY),
+            mEglSurface(EGL_NO_SURFACE),
+            mEglContext(EGL_NO_CONTEXT) {
+    }
+
+    virtual void SetUp() {
+        ALOGV("GLTest::SetUp()");
+        mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+        ASSERT_NE(EGL_NO_DISPLAY, mEglDisplay);
+
+        EGLint majorVersion;
+        EGLint minorVersion;
+        EXPECT_TRUE(eglInitialize(mEglDisplay, &majorVersion, &minorVersion));
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+        RecordProperty("EglVersionMajor", majorVersion);
+        RecordProperty("EglVersionMajor", minorVersion);
+
+        EGLint numConfigs = 0;
+        EXPECT_TRUE(eglChooseConfig(mEglDisplay, getConfigAttribs(), &mGlConfig,
+                1, &numConfigs));
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+
+        char* displaySecsEnv = getenv("GLTEST_DISPLAY_SECS");
+        if (displaySecsEnv != NULL) {
+            mDisplaySecs = atoi(displaySecsEnv);
+            if (mDisplaySecs < 0) {
+                mDisplaySecs = 0;
+            }
+        } else {
+            mDisplaySecs = 0;
+        }
+
+        if (mDisplaySecs > 0) {
+            mComposerClient = new SurfaceComposerClient;
+            ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
+
+            mSurfaceControl = mComposerClient->createSurface(
+                    String8("Test Surface"),
+                    getSurfaceWidth(), getSurfaceHeight(),
+                    PIXEL_FORMAT_RGB_888, 0);
+
+            ASSERT_TRUE(mSurfaceControl != NULL);
+            ASSERT_TRUE(mSurfaceControl->isValid());
+
+            SurfaceComposerClient::Transaction{}
+                    .setLayer(mSurfaceControl, 0x7FFFFFFF)
+                    .show(mSurfaceControl)
+                    .apply();
+
+            sp<ANativeWindow> window = mSurfaceControl->getSurface();
+            mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
+                    window.get(), NULL);
+        } else {
+            ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource");
+            sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource(
+                    getSurfaceWidth(), getSurfaceHeight()))->getProducer();
+            sp<Surface> stc = new Surface(sms);
+            sp<ANativeWindow> window = stc;
+
+            mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
+                    window.get(), NULL);
+        }
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+        ASSERT_NE(EGL_NO_SURFACE, mEglSurface);
+
+        mEglContext = eglCreateContext(mEglDisplay, mGlConfig, EGL_NO_CONTEXT,
+                getContextAttribs());
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+        ASSERT_NE(EGL_NO_CONTEXT, mEglContext);
+
+        EXPECT_TRUE(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
+                mEglContext));
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+
+        EGLint w, h;
+        EXPECT_TRUE(eglQuerySurface(mEglDisplay, mEglSurface, EGL_WIDTH, &w));
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+        EXPECT_TRUE(eglQuerySurface(mEglDisplay, mEglSurface, EGL_HEIGHT, &h));
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+        RecordProperty("EglSurfaceWidth", w);
+        RecordProperty("EglSurfaceHeight", h);
+
+        glViewport(0, 0, w, h);
+        ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+    }
+
+    virtual void TearDown() {
+        // Display the result
+        if (mDisplaySecs > 0 && mEglSurface != EGL_NO_SURFACE) {
+            eglSwapBuffers(mEglDisplay, mEglSurface);
+            sleep(mDisplaySecs);
+        }
+
+        if (mComposerClient != NULL) {
+            mComposerClient->dispose();
+        }
+        if (mEglContext != EGL_NO_CONTEXT) {
+            eglDestroyContext(mEglDisplay, mEglContext);
+        }
+        if (mEglSurface != EGL_NO_SURFACE) {
+            eglDestroySurface(mEglDisplay, mEglSurface);
+        }
+        if (mEglDisplay != EGL_NO_DISPLAY) {
+            eglTerminate(mEglDisplay);
+        }
+        ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    }
+
+    virtual EGLint const* getConfigAttribs() {
+        ALOGV("GLTest getConfigAttribs");
+        static EGLint sDefaultConfigAttribs[] = {
+            EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
+            EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+            EGL_RED_SIZE, 8,
+            EGL_GREEN_SIZE, 8,
+            EGL_BLUE_SIZE, 8,
+            EGL_ALPHA_SIZE, 8,
+            EGL_DEPTH_SIZE, 16,
+            EGL_STENCIL_SIZE, 8,
+            EGL_NONE };
+
+        return sDefaultConfigAttribs;
+    }
+
+    virtual EGLint const* getContextAttribs() {
+        static EGLint sDefaultContextAttribs[] = {
+            EGL_CONTEXT_CLIENT_VERSION, 2,
+            EGL_NONE };
+
+        return sDefaultContextAttribs;
+    }
+
+    virtual EGLint getSurfaceWidth() {
+        return 512;
+    }
+
+    virtual EGLint getSurfaceHeight() {
+        return 512;
+    }
+
+    void loadShader(GLenum shaderType, const char* pSource, GLuint* outShader) {
+        GLuint shader = glCreateShader(shaderType);
+        ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+        if (shader) {
+            glShaderSource(shader, 1, &pSource, NULL);
+            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+            glCompileShader(shader);
+            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+            GLint compiled = 0;
+            glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+            if (!compiled) {
+                GLint infoLen = 0;
+                glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+                ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+                if (infoLen) {
+                    char* buf = (char*) malloc(infoLen);
+                    if (buf) {
+                        glGetShaderInfoLog(shader, infoLen, NULL, buf);
+                        printf("Shader compile log:\n%s\n", buf);
+                        free(buf);
+                        FAIL();
+                    }
+                } else {
+                    char* buf = (char*) malloc(0x1000);
+                    if (buf) {
+                        glGetShaderInfoLog(shader, 0x1000, NULL, buf);
+                        printf("Shader compile log:\n%s\n", buf);
+                        free(buf);
+                        FAIL();
+                    }
+                }
+                glDeleteShader(shader);
+                shader = 0;
+            }
+        }
+        ASSERT_TRUE(shader != 0);
+        *outShader = shader;
+    }
+
+    void createProgram(const char* pVertexSource, const char* pFragmentSource,
+            GLuint* outPgm) {
+        GLuint vertexShader, fragmentShader;
+        {
+            SCOPED_TRACE("compiling vertex shader");
+            loadShader(GL_VERTEX_SHADER, pVertexSource, &vertexShader);
+            if (HasFatalFailure()) {
+                return;
+            }
+        }
+        {
+            SCOPED_TRACE("compiling fragment shader");
+            loadShader(GL_FRAGMENT_SHADER, pFragmentSource, &fragmentShader);
+            if (HasFatalFailure()) {
+                return;
+            }
+        }
+
+        GLuint program = glCreateProgram();
+        ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+        if (program) {
+            glAttachShader(program, vertexShader);
+            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+            glAttachShader(program, fragmentShader);
+            ASSERT_EQ(GLenum(GL_NO_ERROR), glGetError());
+            glLinkProgram(program);
+            GLint linkStatus = GL_FALSE;
+            glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+            if (linkStatus != GL_TRUE) {
+                GLint bufLength = 0;
+                glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
+                if (bufLength) {
+                    char* buf = (char*) malloc(bufLength);
+                    if (buf) {
+                        glGetProgramInfoLog(program, bufLength, NULL, buf);
+                        printf("Program link log:\n%s\n", buf);
+                        free(buf);
+                        FAIL();
+                    }
+                }
+                glDeleteProgram(program);
+                program = 0;
+            }
+        }
+        glDeleteShader(vertexShader);
+        glDeleteShader(fragmentShader);
+        ASSERT_TRUE(program != 0);
+        *outPgm = program;
+    }
+
+    static int abs(int value) {
+        return value > 0 ? value : -value;
+    }
+
+    ::testing::AssertionResult checkPixel(int x, int y, int r,
+            int g, int b, int a, int tolerance=2) {
+        GLubyte pixel[4];
+        String8 msg;
+        glReadPixels(x, y, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixel);
+        GLenum err = glGetError();
+        if (err != GL_NO_ERROR) {
+            msg += String8::format("error reading pixel: %#x", err);
+            while ((err = glGetError()) != GL_NO_ERROR) {
+                msg += String8::format(", %#x", err);
+            }
+            fprintf(stderr, "pixel check failure: %s\n", msg.string());
+            return ::testing::AssertionFailure(
+                    ::testing::Message(msg.string()));
+        }
+        if (r >= 0 && abs(r - int(pixel[0])) > tolerance) {
+            msg += String8::format("r(%d isn't %d)", pixel[0], r);
+        }
+        if (g >= 0 && abs(g - int(pixel[1])) > tolerance) {
+            if (!msg.isEmpty()) {
+                msg += " ";
+            }
+            msg += String8::format("g(%d isn't %d)", pixel[1], g);
+        }
+        if (b >= 0 && abs(b - int(pixel[2])) > tolerance) {
+            if (!msg.isEmpty()) {
+                msg += " ";
+            }
+            msg += String8::format("b(%d isn't %d)", pixel[2], b);
+        }
+        if (a >= 0 && abs(a - int(pixel[3])) > tolerance) {
+            if (!msg.isEmpty()) {
+                msg += " ";
+            }
+            msg += String8::format("a(%d isn't %d)", pixel[3], a);
+        }
+        if (!msg.isEmpty()) {
+            fprintf(stderr, "pixel check failure: %s\n", msg.string());
+            return ::testing::AssertionFailure(
+                    ::testing::Message(msg.string()));
+        } else {
+            return ::testing::AssertionSuccess();
+        }
+    }
+
+    int mDisplaySecs;
+    sp<SurfaceComposerClient> mComposerClient;
+    sp<SurfaceControl> mSurfaceControl;
+
+    EGLDisplay mEglDisplay;
+    EGLSurface mEglSurface;
+    EGLContext mEglContext;
+    EGLConfig  mGlConfig;
+};
+
+///////////////////////////////////////////////////////////////////////
+//    Class for  the NON-GL tests
+///////////////////////////////////////////////////////////////////////
+class SurfaceMediaSourceTest : public ::testing::Test {
+public:
+
+    SurfaceMediaSourceTest( ): mYuvTexWidth(176), mYuvTexHeight(144) { }
+    void oneBufferPass(int width, int height );
+    void oneBufferPassNoFill(int width, int height );
+    static void fillYV12Buffer(uint8_t* buf, int w, int h, int stride) ;
+    static void fillYV12BufferRect(uint8_t* buf, int w, int h,
+                        int stride, const android_native_rect_t& rect) ;
+protected:
+
+    virtual void SetUp() {
+        android::ProcessState::self()->startThreadPool();
+        mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
+        mSTC = new Surface(mSMS->getProducer());
+        mANW = mSTC;
+    }
+
+    virtual void TearDown() {
+        mSMS.clear();
+        mSTC.clear();
+        mANW.clear();
+    }
+
+    const int mYuvTexWidth;
+    const int mYuvTexHeight;
+
+    sp<SurfaceMediaSource> mSMS;
+    sp<Surface> mSTC;
+    sp<ANativeWindow> mANW;
+};
+
+///////////////////////////////////////////////////////////////////////
+//    Class for  the GL tests
+///////////////////////////////////////////////////////////////////////
+class SurfaceMediaSourceGLTest : public GLTest {
+public:
+
+    SurfaceMediaSourceGLTest( ): mYuvTexWidth(176), mYuvTexHeight(144) { }
+    virtual EGLint const* getConfigAttribs();
+    void oneBufferPassGL(int num = 0);
+    static sp<MediaRecorder> setUpMediaRecorder(int fileDescriptor, int videoSource,
+        int outputFormat, int videoEncoder, int width, int height, int fps);
+protected:
+
+    virtual void SetUp() {
+        ALOGV("SMS-GLTest::SetUp()");
+        android::ProcessState::self()->startThreadPool();
+        mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
+        mSTC = new Surface(mSMS->getProducer());
+        mANW = mSTC;
+
+        // Doing the setup related to the GL Side
+        GLTest::SetUp();
+    }
+
+    virtual void TearDown() {
+        mSMS.clear();
+        mSTC.clear();
+        mANW.clear();
+        GLTest::TearDown();
+    }
+
+    void setUpEGLSurfaceFromMediaRecorder(sp<MediaRecorder>& mr);
+
+    const int mYuvTexWidth;
+    const int mYuvTexHeight;
+
+    sp<SurfaceMediaSource> mSMS;
+    sp<Surface> mSTC;
+    sp<ANativeWindow> mANW;
+};
+
+/////////////////////////////////////////////////////////////////////
+// Methods in SurfaceMediaSourceGLTest
+/////////////////////////////////////////////////////////////////////
+EGLint const* SurfaceMediaSourceGLTest::getConfigAttribs() {
+        ALOGV("SurfaceMediaSourceGLTest getConfigAttribs");
+    static EGLint sDefaultConfigAttribs[] = {
+        EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
+        EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+        EGL_RED_SIZE, 8,
+        EGL_GREEN_SIZE, 8,
+        EGL_BLUE_SIZE, 8,
+        EGL_RECORDABLE_ANDROID, EGL_TRUE,
+        EGL_NONE };
+
+    return sDefaultConfigAttribs;
+}
+
+// One pass of dequeuing and queuing a GLBuffer
+void SurfaceMediaSourceGLTest::oneBufferPassGL(int num) {
+    int d = num % 50;
+    float f = 0.2f; // 0.1f * d;
+
+    glClearColor(0, 0.3, 0, 0.6);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    glEnable(GL_SCISSOR_TEST);
+    glScissor(4 + d, 4 + d, 4, 4);
+    glClearColor(1.0 - f, f, f, 1.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    glScissor(24 + d, 48 + d, 4, 4);
+    glClearColor(f, 1.0 - f, f, 1.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    glScissor(37 + d, 17 + d, 4, 4);
+    glClearColor(f, f, 1.0 - f, 1.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // The following call dequeues and queues the buffer
+    eglSwapBuffers(mEglDisplay, mEglSurface);
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    glDisable(GL_SCISSOR_TEST);
+}
+
+// Set up the MediaRecorder which runs in the same process as mediaserver
+sp<MediaRecorder> SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int videoSource,
+        int outputFormat, int videoEncoder, int width, int height, int fps) {
+    sp<MediaRecorder> mr = new MediaRecorder(String16());
+    mr->setVideoSource(videoSource);
+    mr->setOutputFormat(outputFormat);
+    mr->setVideoEncoder(videoEncoder);
+    mr->setOutputFile(fd);
+    mr->setVideoSize(width, height);
+    mr->setVideoFrameRate(fps);
+    mr->prepare();
+    ALOGV("Starting MediaRecorder...");
+    CHECK_EQ((status_t)OK, mr->start());
+    return mr;
+}
+
+// query the mediarecorder for a surfacemeidasource and create an egl surface with that
+void SurfaceMediaSourceGLTest::setUpEGLSurfaceFromMediaRecorder(sp<MediaRecorder>& mr) {
+    sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
+    mSTC = new Surface(iST);
+    mANW = mSTC;
+
+    if (mEglSurface != EGL_NO_SURFACE) {
+        EXPECT_TRUE(eglDestroySurface(mEglDisplay, mEglSurface));
+        mEglSurface = EGL_NO_SURFACE;
+    }
+    mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
+                                mANW.get(), NULL);
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    ASSERT_NE(EGL_NO_SURFACE, mEglSurface) ;
+
+    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
+            mEglContext));
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+}
+
+
+/////////////////////////////////////////////////////////////////////
+// Methods in SurfaceMediaSourceTest
+/////////////////////////////////////////////////////////////////////
+
+// One pass of dequeuing and queuing the buffer. Fill it in with
+// cpu YV12 buffer
+void SurfaceMediaSourceTest::oneBufferPass(int width, int height ) {
+    ANativeWindowBuffer* anb;
+    ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
+    ASSERT_TRUE(anb != NULL);
+
+
+    // Fill the buffer with the a checkerboard pattern
+    uint8_t* img = NULL;
+    sp<GraphicBuffer> buf(GraphicBuffer::from(anb));
+    buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
+    SurfaceMediaSourceTest::fillYV12Buffer(img, width, height, buf->getStride());
+    buf->unlock();
+
+    ASSERT_EQ(NO_ERROR, mANW->queueBuffer(mANW.get(), buf->getNativeBuffer(),
+            -1));
+}
+
+// Dequeuing and queuing the buffer without really filling it in.
+void SurfaceMediaSourceTest::oneBufferPassNoFill(
+        int /* width */, int /* height  */) {
+    ANativeWindowBuffer* anb;
+    ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
+    ASSERT_TRUE(anb != NULL);
+
+    // We do not fill the buffer in. Just queue it back.
+    sp<GraphicBuffer> buf(GraphicBuffer::from(anb));
+    ASSERT_EQ(NO_ERROR, mANW->queueBuffer(mANW.get(), buf->getNativeBuffer(),
+            -1));
+}
+
+// Fill a YV12 buffer with a multi-colored checkerboard pattern
+void SurfaceMediaSourceTest::fillYV12Buffer(uint8_t* buf, int w, int h, int stride) {
+    const int blockWidth = w > 16 ? w / 16 : 1;
+    const int blockHeight = h > 16 ? h / 16 : 1;
+    const int yuvTexOffsetY = 0;
+    int yuvTexStrideY = stride;
+    int yuvTexOffsetV = yuvTexStrideY * h;
+    int yuvTexStrideV = (yuvTexStrideY/2 + 0xf) & ~0xf;
+    int yuvTexOffsetU = yuvTexOffsetV + yuvTexStrideV * h/2;
+    int yuvTexStrideU = yuvTexStrideV;
+    for (int x = 0; x < w; x++) {
+        for (int y = 0; y < h; y++) {
+            int parityX = (x / blockWidth) & 1;
+            int parityY = (y / blockHeight) & 1;
+            unsigned char intensity = (parityX ^ parityY) ? 63 : 191;
+            buf[yuvTexOffsetY + (y * yuvTexStrideY) + x] = intensity;
+            if (x < w / 2 && y < h / 2) {
+                buf[yuvTexOffsetU + (y * yuvTexStrideU) + x] = intensity;
+                if (x * 2 < w / 2 && y * 2 < h / 2) {
+                    buf[yuvTexOffsetV + (y*2 * yuvTexStrideV) + x*2 + 0] =
+                    buf[yuvTexOffsetV + (y*2 * yuvTexStrideV) + x*2 + 1] =
+                    buf[yuvTexOffsetV + ((y*2+1) * yuvTexStrideV) + x*2 + 0] =
+                    buf[yuvTexOffsetV + ((y*2+1) * yuvTexStrideV) + x*2 + 1] =
+                        intensity;
+                }
+            }
+        }
+    }
+}
+
+// Fill a YV12 buffer with red outside a given rectangle and green inside it.
+void SurfaceMediaSourceTest::fillYV12BufferRect(uint8_t* buf, int w,
+                  int h, int stride, const android_native_rect_t& rect) {
+    const int yuvTexOffsetY = 0;
+    int yuvTexStrideY = stride;
+    int yuvTexOffsetV = yuvTexStrideY * h;
+    int yuvTexStrideV = (yuvTexStrideY/2 + 0xf) & ~0xf;
+    int yuvTexOffsetU = yuvTexOffsetV + yuvTexStrideV * h/2;
+    int yuvTexStrideU = yuvTexStrideV;
+    for (int x = 0; x < w; x++) {
+        for (int y = 0; y < h; y++) {
+            bool inside = rect.left <= x && x < rect.right &&
+                    rect.top <= y && y < rect.bottom;
+            buf[yuvTexOffsetY + (y * yuvTexStrideY) + x] = inside ? 240 : 64;
+            if (x < w / 2 && y < h / 2) {
+                bool inside = rect.left <= 2*x && 2*x < rect.right &&
+                        rect.top <= 2*y && 2*y < rect.bottom;
+                buf[yuvTexOffsetU + (y * yuvTexStrideU) + x] = 16;
+                buf[yuvTexOffsetV + (y * yuvTexStrideV) + x] =
+                                                inside ? 16 : 255;
+            }
+        }
+    }
+}  ///////// End of class SurfaceMediaSourceTest
+
+///////////////////////////////////////////////////////////////////
+// Class to imitate the recording     /////////////////////////////
+// ////////////////////////////////////////////////////////////////
+struct SimpleDummyRecorder {
+        sp<MediaSource> mSource;
+
+        explicit SimpleDummyRecorder
+                (const sp<MediaSource> &source): mSource(source) {}
+
+        status_t start() { return mSource->start();}
+        status_t stop()  { return mSource->stop();}
+
+        // fakes reading from a media source
+        status_t readFromSource() {
+            MediaBufferBase *buffer;
+            status_t err = mSource->read(&buffer);
+            if (err != OK) {
+                return err;
+            }
+            buffer->release();
+            buffer = NULL;
+            return OK;
+        }
+};
+///////////////////////////////////////////////////////////////////
+//           TESTS
+// SurfaceMediaSourceTest class contains tests that fill the buffers
+// using the cpu calls
+// SurfaceMediaSourceGLTest class contains tests that fill the buffers
+// using the GL calls.
+// TODO: None of the tests actually verify the encoded images.. so at this point,
+// these are mostly functionality tests + visual inspection
+//////////////////////////////////////////////////////////////////////
+
+// Just pass one buffer from the native_window to the SurfaceMediaSource
+// Dummy Encoder
+static int testId = 1;
+TEST_F(SurfaceMediaSourceTest, DISABLED_DummyEncodingFromCpuFilledYV12BufferNpotOneBufferPass) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("Testing OneBufferPass ******************************");
+
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
+            HAL_PIXEL_FORMAT_YV12));
+    oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+}
+
+// Pass the buffer with the wrong height and weight and should not be accepted
+// Dummy Encoder
+TEST_F(SurfaceMediaSourceTest, DISABLED_DummyEncodingFromCpuFilledYV12BufferNpotWrongSizeBufferPass) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("Testing Wrong size BufferPass ******************************");
+
+    // setting the client side buffer size different than the server size
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_dimensions(mANW.get(),
+             10, 10));
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
+            HAL_PIXEL_FORMAT_YV12));
+
+    ANativeWindowBuffer* anb;
+
+    // Note: make sure we get an ERROR back when dequeuing!
+    ASSERT_NE(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
+}
+
+// pass multiple buffers from the native_window the SurfaceMediaSource
+// Dummy Encoder
+TEST_F(SurfaceMediaSourceTest,  DISABLED_DummyEncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("Testing MultiBufferPass, Dummy Recorder *********************");
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
+            HAL_PIXEL_FORMAT_YV12));
+
+    SimpleDummyRecorder writer(mSMS);
+    writer.start();
+
+    int32_t nFramesCount = 0;
+    while (nFramesCount < 300) {
+        oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+
+        ASSERT_EQ(NO_ERROR, writer.readFromSource());
+
+        nFramesCount++;
+    }
+    writer.stop();
+}
+
+// Delayed pass of multiple buffers from the native_window the SurfaceMediaSource
+// Dummy Encoder
+TEST_F(SurfaceMediaSourceTest,  DummyLagEncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("Testing MultiBufferPass, Dummy Recorder Lagging **************");
+
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
+            HAL_PIXEL_FORMAT_YV12));
+
+    SimpleDummyRecorder writer(mSMS);
+    writer.start();
+
+    int32_t nFramesCount = 1;
+    const int FRAMES_LAG = SurfaceMediaSource::MIN_UNDEQUEUED_BUFFERS;
+
+    while (nFramesCount <= 300) {
+        ALOGV("Frame: %d", nFramesCount);
+        oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+        // Forcing the writer to lag behind a few frames
+        if (nFramesCount > FRAMES_LAG) {
+            ASSERT_EQ(NO_ERROR, writer.readFromSource());
+        }
+        nFramesCount++;
+    }
+    writer.stop();
+}
+
+// pass multiple buffers from the native_window the SurfaceMediaSource
+// A dummy writer (MULTITHREADED) is used to simulate actual MPEG4Writer
+TEST_F(SurfaceMediaSourceTest, DummyThreadedEncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("Testing MultiBufferPass, Dummy Recorder Multi-Threaded **********");
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
+            HAL_PIXEL_FORMAT_YV12));
+
+    DummyRecorder writer(mSMS);
+    writer.start();
+
+    int32_t nFramesCount = 0;
+    while (nFramesCount <= 300) {
+        ALOGV("Frame: %d", nFramesCount);
+        oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+
+        nFramesCount++;
+    }
+    writer.stop();
+}
+
+// Test to examine actual encoding using mediarecorder
+// We use the mediaserver to create a mediarecorder and send
+// it back to us. So SurfaceMediaSource lives in the same process
+// as the mediaserver.
+// Very close to the actual camera, except that the
+// buffers are filled and queueud by the CPU instead of GL.
+TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaServer) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("************** Testing the whole pipeline with actual MediaRecorder ***********");
+    ALOGV("************** SurfaceMediaSource is same process as mediaserver    ***********");
+
+    const char *fileName = "/sdcard/outputSurfEncMSource.mp4";
+    int fd = open(fileName, O_RDWR | O_CREAT, 0744);
+    if (fd < 0) {
+        ALOGE("ERROR: Could not open the the file %s, fd = %d !!", fileName, fd);
+    }
+    CHECK(fd >= 0);
+
+    sp<MediaRecorder> mr = SurfaceMediaSourceGLTest::setUpMediaRecorder(fd,
+            VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264,
+            mYuvTexWidth, mYuvTexHeight, 30);
+    // get the reference to the surfacemediasource living in
+    // mediaserver that is created by stagefrightrecorder
+    sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
+    mSTC = new Surface(iST);
+    mANW = mSTC;
+    ASSERT_EQ(NO_ERROR, native_window_api_connect(mANW.get(), NATIVE_WINDOW_API_CPU));
+    ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
+                                                HAL_PIXEL_FORMAT_YV12));
+
+    int32_t nFramesCount = 0;
+    while (nFramesCount <= 300) {
+        oneBufferPassNoFill(mYuvTexWidth, mYuvTexHeight);
+        nFramesCount++;
+        ALOGV("framesCount = %d", nFramesCount);
+    }
+
+    ASSERT_EQ(NO_ERROR, native_window_api_disconnect(mANW.get(), NATIVE_WINDOW_API_CPU));
+    ALOGV("Stopping MediaRecorder...");
+    CHECK_EQ((status_t)OK, mr->stop());
+    mr.clear();
+    close(fd);
+}
+
+//////////////////////////////////////////////////////////////////////
+// GL tests
+/////////////////////////////////////////////////////////////////////
+
+// Test to examine whether we can choose the Recordable Android GLConfig
+// DummyRecorder used- no real encoding here
+TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("Verify creating a surface w/ right config + dummy writer*********");
+
+    mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
+    mSTC = new Surface(mSMS->getProducer());
+    mANW = mSTC;
+
+    DummyRecorder writer(mSMS);
+    writer.start();
+
+    if (mEglSurface != EGL_NO_SURFACE) {
+        EXPECT_TRUE(eglDestroySurface(mEglDisplay, mEglSurface));
+        mEglSurface = EGL_NO_SURFACE;
+    }
+
+    mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
+                                mANW.get(), NULL);
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    ASSERT_NE(EGL_NO_SURFACE, mEglSurface) ;
+
+    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface,
+            mEglContext));
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+
+    int32_t nFramesCount = 0;
+    while (nFramesCount <= 300) {
+        oneBufferPassGL();
+        nFramesCount++;
+        ALOGV("framesCount = %d", nFramesCount);
+    }
+
+    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
+            EGL_NO_CONTEXT));
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    eglDestroySurface(mEglDisplay, mEglSurface);
+    mEglSurface = EGL_NO_SURFACE;
+
+    writer.stop();
+}
+// Test to examine whether we can render GL buffers in to the surface
+// created with the native window handle
+TEST_F(SurfaceMediaSourceGLTest, RenderingToRecordableEGLSurfaceWorks) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("RenderingToRecordableEGLSurfaceWorks *********************");
+    // Do the producer side of things
+    glClearColor(0.6, 0.6, 0.6, 0.6);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    glEnable(GL_SCISSOR_TEST);
+    glScissor(4, 4, 4, 4);
+    glClearColor(1.0, 0.0, 0.0, 1.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    glScissor(24, 48, 4, 4);
+    glClearColor(0.0, 1.0, 0.0, 1.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    glScissor(37, 17, 4, 4);
+    glClearColor(0.0, 0.0, 1.0, 1.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    EXPECT_TRUE(checkPixel( 0,  0, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(63,  0, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(63, 63, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel( 0, 63, 153, 153, 153, 153));
+
+    EXPECT_TRUE(checkPixel( 4,  7, 255,   0,   0, 255));
+    EXPECT_TRUE(checkPixel(25, 51,   0, 255,   0, 255));
+    EXPECT_TRUE(checkPixel(40, 19,   0,   0, 255, 255));
+    EXPECT_TRUE(checkPixel(29, 51, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel( 5, 32, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(13,  8, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(46,  3, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(30, 33, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel( 6, 52, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(55, 33, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(16, 29, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel( 1, 30, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(41, 37, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(46, 29, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel(15, 25, 153, 153, 153, 153));
+    EXPECT_TRUE(checkPixel( 3, 52, 153, 153, 153, 153));
+}
+
+// Test to examine the actual encoding with GL buffers
+// Actual encoder, Actual GL Buffers Filled SurfaceMediaSource
+// The same pattern is rendered every frame
+TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaSameImageEachBufNpotWrite) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("************** Testing the whole pipeline with actual Recorder ***********");
+    ALOGV("************** GL Filling the buffers ***********");
+    // Note: No need to set the colorformat for the buffers. The colorformat is
+    // in the GRAlloc buffers itself.
+
+    const char *fileName = "/sdcard/outputSurfEncMSourceGL.mp4";
+    int fd = open(fileName, O_RDWR | O_CREAT, 0744);
+    if (fd < 0) {
+        ALOGE("ERROR: Could not open the the file %s, fd = %d !!", fileName, fd);
+    }
+    CHECK(fd >= 0);
+
+    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
+            OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
+
+    // get the reference to the surfacemediasource living in
+    // mediaserver that is created by stagefrightrecorder
+    setUpEGLSurfaceFromMediaRecorder(mr);
+
+    int32_t nFramesCount = 0;
+    while (nFramesCount <= 300) {
+        oneBufferPassGL();
+        nFramesCount++;
+        ALOGV("framesCount = %d", nFramesCount);
+    }
+
+    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
+            EGL_NO_CONTEXT));
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    eglDestroySurface(mEglDisplay, mEglSurface);
+    mEglSurface = EGL_NO_SURFACE;
+
+    ALOGV("Stopping MediaRecorder...");
+    CHECK_EQ((status_t)OK, mr->stop());
+    mr.clear();
+    close(fd);
+}
+
+// Test to examine the actual encoding from the GL Buffers
+// Actual encoder, Actual GL Buffers Filled SurfaceMediaSource
+// A different pattern is rendered every frame
+TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaDiffImageEachBufNpotWrite) {
+    ALOGV("Test # %d", testId++);
+    ALOGV("************** Testing the whole pipeline with actual Recorder ***********");
+    ALOGV("************** Diff GL Filling the buffers ***********");
+    // Note: No need to set the colorformat for the buffers. The colorformat is
+    // in the GRAlloc buffers itself.
+
+    const char *fileName = "/sdcard/outputSurfEncMSourceGLDiff.mp4";
+    int fd = open(fileName, O_RDWR | O_CREAT, 0744);
+    if (fd < 0) {
+        ALOGE("ERROR: Could not open the the file %s, fd = %d !!", fileName, fd);
+    }
+    CHECK(fd >= 0);
+
+    sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
+            OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
+
+    // get the reference to the surfacemediasource living in
+    // mediaserver that is created by stagefrightrecorder
+    setUpEGLSurfaceFromMediaRecorder(mr);
+
+    int32_t nFramesCount = 0;
+    while (nFramesCount <= 300) {
+        oneBufferPassGL(nFramesCount);
+        nFramesCount++;
+        ALOGV("framesCount = %d", nFramesCount);
+    }
+
+    EXPECT_TRUE(eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE,
+            EGL_NO_CONTEXT));
+    ASSERT_EQ(EGL_SUCCESS, eglGetError());
+    eglDestroySurface(mEglDisplay, mEglSurface);
+    mEglSurface = EGL_NO_SURFACE;
+
+    ALOGV("Stopping MediaRecorder...");
+    CHECK_EQ((status_t)OK, mr->stop());
+    mr.clear();
+    close(fd);
+}
+} // namespace android
diff --git a/media/libstagefright/timedtext/TextDescriptions2.cpp b/media/libstagefright/timedtext/TextDescriptions2.cpp
index fd42d3a..f48eacc 100644
--- a/media/libstagefright/timedtext/TextDescriptions2.cpp
+++ b/media/libstagefright/timedtext/TextDescriptions2.cpp
@@ -145,7 +145,7 @@
         tmpData += 8;
         size_t remaining = size - 8;
 
-        if (chunkSize <= 8 || size < chunkSize) {
+        if (size < chunkSize) {
             return OK;
         }
         switch(chunkType) {
diff --git a/media/libstagefright/wifi-display/Android.bp b/media/libstagefright/wifi-display/Android.bp
new file mode 100644
index 0000000..fd00a3a
--- /dev/null
+++ b/media/libstagefright/wifi-display/Android.bp
@@ -0,0 +1,52 @@
+cc_library_shared {
+    name: "libstagefright_wfd",
+
+    srcs: [
+        "MediaSender.cpp",
+        "Parameters.cpp",
+        "rtp/RTPSender.cpp",
+        "source/Converter.cpp",
+        "source/MediaPuller.cpp",
+        "source/PlaybackSession.cpp",
+        "source/RepeaterSource.cpp",
+        "source/TSPacketizer.cpp",
+        "source/WifiDisplaySource.cpp",
+        "VideoFormats.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/media/libstagefright",
+        "frameworks/native/include/media/openmax",
+        "frameworks/native/include/media/hardware",
+        "frameworks/av/media/libstagefright/mpeg2ts",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libmedia",
+        "libmedia_omx",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libui",
+        "libgui",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Wno-multichar",
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+        diag: {
+            cfi: true,
+        },
+    },
+}
diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp
new file mode 100644
index 0000000..9c30556
--- /dev/null
+++ b/media/libstagefright/wifi-display/MediaSender.cpp
@@ -0,0 +1,519 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSender"
+#include <utils/Log.h>
+
+#include "MediaSender.h"
+
+#include "rtp/RTPSender.h"
+#include "source/TSPacketizer.h"
+
+#include <media/stagefright/foundation/avc_utils.h>
+
+#include <media/IHDCP.h>
+#include <media/stagefright/MediaBufferBase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ANetworkSession.h>
+#include <ui/GraphicBuffer.h>
+
+namespace android {
+
+MediaSender::MediaSender(
+        const sp<ANetworkSession> &netSession,
+        const sp<AMessage> &notify)
+    : mNetSession(netSession),
+      mNotify(notify),
+      mMode(MODE_UNDEFINED),
+      mGeneration(0),
+      mPrevTimeUs(-1ll),
+      mInitDoneCount(0),
+      mLogFile(NULL) {
+    // mLogFile = fopen("/data/misc/log.ts", "wb");
+}
+
+MediaSender::~MediaSender() {
+    if (mLogFile != NULL) {
+        fclose(mLogFile);
+        mLogFile = NULL;
+    }
+}
+
+status_t MediaSender::setHDCP(const sp<IHDCP> &hdcp) {
+    if (mMode != MODE_UNDEFINED) {
+        return INVALID_OPERATION;
+    }
+
+    mHDCP = hdcp;
+
+    return OK;
+}
+
+ssize_t MediaSender::addTrack(const sp<AMessage> &format, uint32_t flags) {
+    if (mMode != MODE_UNDEFINED) {
+        return INVALID_OPERATION;
+    }
+
+    TrackInfo info;
+    info.mFormat = format;
+    info.mFlags = flags;
+    info.mPacketizerTrackIndex = -1;
+
+    AString mime;
+    CHECK(format->findString("mime", &mime));
+    info.mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
+
+    size_t index = mTrackInfos.size();
+    mTrackInfos.push_back(info);
+
+    return index;
+}
+
+status_t MediaSender::initAsync(
+        ssize_t trackIndex,
+        const char *remoteHost,
+        int32_t remoteRTPPort,
+        RTPSender::TransportMode rtpMode,
+        int32_t remoteRTCPPort,
+        RTPSender::TransportMode rtcpMode,
+        int32_t *localRTPPort) {
+    if (trackIndex < 0) {
+        if (mMode != MODE_UNDEFINED) {
+            return INVALID_OPERATION;
+        }
+
+        uint32_t flags = 0;
+        if (mHDCP != NULL) {
+            // XXX Determine proper HDCP version.
+            flags |= TSPacketizer::EMIT_HDCP20_DESCRIPTOR;
+        }
+        mTSPacketizer = new TSPacketizer(flags);
+
+        status_t err = OK;
+        for (size_t i = 0; i < mTrackInfos.size(); ++i) {
+            TrackInfo *info = &mTrackInfos.editItemAt(i);
+
+            ssize_t packetizerTrackIndex =
+                mTSPacketizer->addTrack(info->mFormat);
+
+            if (packetizerTrackIndex < 0) {
+                err = packetizerTrackIndex;
+                break;
+            }
+
+            info->mPacketizerTrackIndex = packetizerTrackIndex;
+        }
+
+        if (err == OK) {
+            sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
+            notify->setInt32("generation", mGeneration);
+            mTSSender = new RTPSender(mNetSession, notify);
+            looper()->registerHandler(mTSSender);
+
+            err = mTSSender->initAsync(
+                    remoteHost,
+                    remoteRTPPort,
+                    rtpMode,
+                    remoteRTCPPort,
+                    rtcpMode,
+                    localRTPPort);
+
+            if (err != OK) {
+                looper()->unregisterHandler(mTSSender->id());
+                mTSSender.clear();
+            }
+        }
+
+        if (err != OK) {
+            for (size_t i = 0; i < mTrackInfos.size(); ++i) {
+                TrackInfo *info = &mTrackInfos.editItemAt(i);
+                info->mPacketizerTrackIndex = -1;
+            }
+
+            mTSPacketizer.clear();
+            return err;
+        }
+
+        mMode = MODE_TRANSPORT_STREAM;
+        mInitDoneCount = 1;
+
+        return OK;
+    }
+
+    if (mMode == MODE_TRANSPORT_STREAM) {
+        return INVALID_OPERATION;
+    }
+
+    if ((size_t)trackIndex >= mTrackInfos.size()) {
+        return -ERANGE;
+    }
+
+    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
+
+    if (info->mSender != NULL) {
+        return INVALID_OPERATION;
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);
+    notify->setInt32("generation", mGeneration);
+    notify->setSize("trackIndex", trackIndex);
+
+    info->mSender = new RTPSender(mNetSession, notify);
+    looper()->registerHandler(info->mSender);
+
+    status_t err = info->mSender->initAsync(
+            remoteHost,
+            remoteRTPPort,
+            rtpMode,
+            remoteRTCPPort,
+            rtcpMode,
+            localRTPPort);
+
+    if (err != OK) {
+        looper()->unregisterHandler(info->mSender->id());
+        info->mSender.clear();
+
+        return err;
+    }
+
+    if (mMode == MODE_UNDEFINED) {
+        mInitDoneCount = mTrackInfos.size();
+    }
+
+    mMode = MODE_ELEMENTARY_STREAMS;
+
+    return OK;
+}
+
+status_t MediaSender::queueAccessUnit(
+        size_t trackIndex, const sp<ABuffer> &accessUnit) {
+    if (mMode == MODE_UNDEFINED) {
+        return INVALID_OPERATION;
+    }
+
+    if (trackIndex >= mTrackInfos.size()) {
+        return -ERANGE;
+    }
+
+    if (mMode == MODE_TRANSPORT_STREAM) {
+        TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
+        info->mAccessUnits.push_back(accessUnit);
+
+        mTSPacketizer->extractCSDIfNecessary(info->mPacketizerTrackIndex);
+
+        for (;;) {
+            ssize_t minTrackIndex = -1;
+            int64_t minTimeUs = -1ll;
+
+            for (size_t i = 0; i < mTrackInfos.size(); ++i) {
+                const TrackInfo &info = mTrackInfos.itemAt(i);
+
+                if (info.mAccessUnits.empty()) {
+                    minTrackIndex = -1;
+                    minTimeUs = -1ll;
+                    break;
+                }
+
+                int64_t timeUs;
+                const sp<ABuffer> &accessUnit = *info.mAccessUnits.begin();
+                CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+                if (minTrackIndex < 0 || timeUs < minTimeUs) {
+                    minTrackIndex = i;
+                    minTimeUs = timeUs;
+                }
+            }
+
+            if (minTrackIndex < 0) {
+                return OK;
+            }
+
+            TrackInfo *info = &mTrackInfos.editItemAt(minTrackIndex);
+            sp<ABuffer> accessUnit = *info->mAccessUnits.begin();
+            info->mAccessUnits.erase(info->mAccessUnits.begin());
+
+            sp<ABuffer> tsPackets;
+            status_t err = packetizeAccessUnit(
+                    minTrackIndex, accessUnit, &tsPackets);
+
+            if (err == OK) {
+                if (mLogFile != NULL) {
+                    fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile);
+                }
+
+                int64_t timeUs;
+                CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+                tsPackets->meta()->setInt64("timeUs", timeUs);
+
+                err = mTSSender->queueBuffer(
+                        tsPackets,
+                        33 /* packetType */,
+                        RTPSender::PACKETIZATION_TRANSPORT_STREAM);
+            }
+
+            if (err != OK) {
+                return err;
+            }
+        }
+    }
+
+    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
+
+    return info->mSender->queueBuffer(
+            accessUnit,
+            info->mIsAudio ? 96 : 97 /* packetType */,
+            info->mIsAudio
+                ? RTPSender::PACKETIZATION_AAC : RTPSender::PACKETIZATION_H264);
+}
+
+void MediaSender::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatSenderNotify:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+            if (generation != mGeneration) {
+                break;
+            }
+
+            onSenderNotify(msg);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void MediaSender::onSenderNotify(const sp<AMessage> &msg) {
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case RTPSender::kWhatInitDone:
+        {
+            --mInitDoneCount;
+
+            int32_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            if (err != OK) {
+                notifyInitDone(err);
+                ++mGeneration;
+                break;
+            }
+
+            if (mInitDoneCount == 0) {
+                notifyInitDone(OK);
+            }
+            break;
+        }
+
+        case RTPSender::kWhatError:
+        {
+            int32_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            notifyError(err);
+            break;
+        }
+
+        case kWhatNetworkStall:
+        {
+            size_t numBytesQueued;
+            CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
+
+            notifyNetworkStall(numBytesQueued);
+            break;
+        }
+
+        case kWhatInformSender:
+        {
+            int64_t avgLatencyUs;
+            CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
+
+            int64_t maxLatencyUs;
+            CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
+
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("what", kWhatInformSender);
+            notify->setInt64("avgLatencyUs", avgLatencyUs);
+            notify->setInt64("maxLatencyUs", maxLatencyUs);
+            notify->post();
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void MediaSender::notifyInitDone(status_t err) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatInitDone);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+void MediaSender::notifyError(status_t err) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+void MediaSender::notifyNetworkStall(size_t numBytesQueued) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatNetworkStall);
+    notify->setSize("numBytesQueued", numBytesQueued);
+    notify->post();
+}
+
+status_t MediaSender::packetizeAccessUnit(
+        size_t trackIndex,
+        sp<ABuffer> accessUnit,
+        sp<ABuffer> *tsPackets) {
+    const TrackInfo &info = mTrackInfos.itemAt(trackIndex);
+
+    uint32_t flags = 0;
+
+    bool isHDCPEncrypted = false;
+    uint64_t inputCTR;
+    uint8_t HDCP_private_data[16];
+
+    bool manuallyPrependSPSPPS =
+        !info.mIsAudio
+        && (info.mFlags & FLAG_MANUALLY_PREPEND_SPS_PPS)
+        && IsIDR(accessUnit->data(), accessUnit->size());
+
+    if (mHDCP != NULL && !info.mIsAudio) {
+        isHDCPEncrypted = true;
+
+        if (manuallyPrependSPSPPS) {
+            accessUnit = mTSPacketizer->prependCSD(
+                    info.mPacketizerTrackIndex, accessUnit);
+        }
+
+        status_t err;
+        native_handle_t* handle;
+        if (accessUnit->meta()->findPointer("handle", (void**)&handle)
+                && handle != NULL) {
+            int32_t rangeLength, rangeOffset;
+            sp<AMessage> notify;
+            CHECK(accessUnit->meta()->findInt32("rangeOffset", &rangeOffset));
+            CHECK(accessUnit->meta()->findInt32("rangeLength", &rangeLength));
+            CHECK(accessUnit->meta()->findMessage("notify", &notify)
+                    && notify != NULL);
+            CHECK_GE((int32_t)accessUnit->size(), rangeLength);
+
+            sp<GraphicBuffer> grbuf(new GraphicBuffer(
+                    rangeOffset + rangeLength /* width */, 1 /* height */,
+                    HAL_PIXEL_FORMAT_Y8, 1 /* layerCount */,
+                    GRALLOC_USAGE_HW_VIDEO_ENCODER,
+                    rangeOffset + rangeLength /* stride */, handle,
+                    false /* keepOwnership */));
+
+            err = mHDCP->encryptNative(
+                    grbuf, rangeOffset, rangeLength,
+                    trackIndex  /* streamCTR */,
+                    &inputCTR,
+                    accessUnit->data());
+            notify->post();
+        } else {
+            err = mHDCP->encrypt(
+                    accessUnit->data(), accessUnit->size(),
+                    trackIndex  /* streamCTR */,
+                    &inputCTR,
+                    accessUnit->data());
+        }
+
+        if (err != OK) {
+            ALOGE("Failed to HDCP-encrypt media data (err %d)",
+                  err);
+
+            return err;
+        }
+
+        HDCP_private_data[0] = 0x00;
+
+        HDCP_private_data[1] =
+            (((trackIndex >> 30) & 3) << 1) | 1;
+
+        HDCP_private_data[2] = (trackIndex >> 22) & 0xff;
+
+        HDCP_private_data[3] =
+            (((trackIndex >> 15) & 0x7f) << 1) | 1;
+
+        HDCP_private_data[4] = (trackIndex >> 7) & 0xff;
+
+        HDCP_private_data[5] =
+            ((trackIndex & 0x7f) << 1) | 1;
+
+        HDCP_private_data[6] = 0x00;
+
+        HDCP_private_data[7] =
+            (((inputCTR >> 60) & 0x0f) << 1) | 1;
+
+        HDCP_private_data[8] = (inputCTR >> 52) & 0xff;
+
+        HDCP_private_data[9] =
+            (((inputCTR >> 45) & 0x7f) << 1) | 1;
+
+        HDCP_private_data[10] = (inputCTR >> 37) & 0xff;
+
+        HDCP_private_data[11] =
+            (((inputCTR >> 30) & 0x7f) << 1) | 1;
+
+        HDCP_private_data[12] = (inputCTR >> 22) & 0xff;
+
+        HDCP_private_data[13] =
+            (((inputCTR >> 15) & 0x7f) << 1) | 1;
+
+        HDCP_private_data[14] = (inputCTR >> 7) & 0xff;
+
+        HDCP_private_data[15] =
+            ((inputCTR & 0x7f) << 1) | 1;
+
+        flags |= TSPacketizer::IS_ENCRYPTED;
+    } else if (manuallyPrependSPSPPS) {
+        flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES;
+    }
+
+    int64_t timeUs = ALooper::GetNowUs();
+    if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) {
+        flags |= TSPacketizer::EMIT_PCR;
+        flags |= TSPacketizer::EMIT_PAT_AND_PMT;
+
+        mPrevTimeUs = timeUs;
+    }
+
+    mTSPacketizer->packetize(
+            info.mPacketizerTrackIndex,
+            accessUnit,
+            tsPackets,
+            flags,
+            !isHDCPEncrypted ? NULL : HDCP_private_data,
+            !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data),
+            info.mIsAudio ? 2 : 0 /* numStuffingBytes */);
+
+    return OK;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h
new file mode 100644
index 0000000..04538ea
--- /dev/null
+++ b/media/libstagefright/wifi-display/MediaSender.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_SENDER_H_
+
+#define MEDIA_SENDER_H_
+
+#include "rtp/RTPSender.h"
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <utils/Errors.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct ANetworkSession;
+struct AMessage;
+struct IHDCP;
+struct TSPacketizer;
+
+// This class facilitates sending of data from one or more media tracks
+// through one or more RTP channels, either providing a 1:1 mapping from
+// track to RTP channel or muxing all tracks into a single RTP channel and
+// using transport stream encapsulation.
+// Optionally the (video) data is encrypted using the provided hdcp object.
+struct MediaSender : public AHandler {
+    enum {
+        kWhatInitDone,
+        kWhatError,
+        kWhatNetworkStall,
+        kWhatInformSender,
+    };
+
+    MediaSender(
+            const sp<ANetworkSession> &netSession,
+            const sp<AMessage> &notify);
+
+    status_t setHDCP(const sp<IHDCP> &hdcp);
+
+    enum FlagBits {
+        FLAG_MANUALLY_PREPEND_SPS_PPS = 1,
+    };
+    ssize_t addTrack(const sp<AMessage> &format, uint32_t flags);
+
+    // If trackIndex == -1, initialize for transport stream muxing.
+    status_t initAsync(
+            ssize_t trackIndex,
+            const char *remoteHost,
+            int32_t remoteRTPPort,
+            RTPSender::TransportMode rtpMode,
+            int32_t remoteRTCPPort,
+            RTPSender::TransportMode rtcpMode,
+            int32_t *localRTPPort);
+
+    status_t queueAccessUnit(
+            size_t trackIndex, const sp<ABuffer> &accessUnit);
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual ~MediaSender();
+
+private:
+    enum {
+        kWhatSenderNotify,
+    };
+
+    enum Mode {
+        MODE_UNDEFINED,
+        MODE_TRANSPORT_STREAM,
+        MODE_ELEMENTARY_STREAMS,
+    };
+
+    struct TrackInfo {
+        sp<AMessage> mFormat;
+        uint32_t mFlags;
+        sp<RTPSender> mSender;
+        List<sp<ABuffer> > mAccessUnits;
+        ssize_t mPacketizerTrackIndex;
+        bool mIsAudio;
+    };
+
+    sp<ANetworkSession> mNetSession;
+    sp<AMessage> mNotify;
+
+    sp<IHDCP> mHDCP;
+
+    Mode mMode;
+    int32_t mGeneration;
+
+    Vector<TrackInfo> mTrackInfos;
+
+    sp<TSPacketizer> mTSPacketizer;
+    sp<RTPSender> mTSSender;
+    int64_t mPrevTimeUs;
+
+    size_t mInitDoneCount;
+
+    FILE *mLogFile;
+
+    void onSenderNotify(const sp<AMessage> &msg);
+
+    void notifyInitDone(status_t err);
+    void notifyError(status_t err);
+    void notifyNetworkStall(size_t numBytesQueued);
+
+    status_t packetizeAccessUnit(
+            size_t trackIndex,
+            sp<ABuffer> accessUnit,
+            sp<ABuffer> *tsPackets);
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaSender);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_SENDER_H_
+
diff --git a/media/libstagefright/wifi-display/Parameters.cpp b/media/libstagefright/wifi-display/Parameters.cpp
new file mode 100644
index 0000000..d2a61ea
--- /dev/null
+++ b/media/libstagefright/wifi-display/Parameters.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Parameters.h"
+
+#include <media/stagefright/MediaErrors.h>
+
+namespace android {
+
+// static
+sp<Parameters> Parameters::Parse(const char *data, size_t size) {
+    sp<Parameters> params = new Parameters;
+    status_t err = params->parse(data, size);
+
+    if (err != OK) {
+        return NULL;
+    }
+
+    return params;
+}
+
+Parameters::Parameters() {}
+
+Parameters::~Parameters() {}
+
+status_t Parameters::parse(const char *data, size_t size) {
+    size_t i = 0;
+    while (i < size) {
+        size_t nameStart = i;
+        while (i < size && data[i] != ':') {
+            ++i;
+        }
+
+        if (i == size || i == nameStart) {
+            return ERROR_MALFORMED;
+        }
+
+        AString name(&data[nameStart], i - nameStart);
+        name.trim();
+        name.tolower();
+
+        ++i;
+
+        size_t valueStart = i;
+
+        while (i + 1 < size && (data[i] != '\r' || data[i + 1] != '\n')) {
+            ++i;
+        }
+
+        AString value(&data[valueStart], i - valueStart);
+        value.trim();
+
+        mDict.add(name, value);
+
+        while (i + 1 < size && data[i] == '\r' && data[i + 1] == '\n') {
+            i += 2;
+        }
+    }
+
+    return OK;
+}
+
+bool Parameters::findParameter(const char *name, AString *value) const {
+    AString key = name;
+    key.tolower();
+
+    ssize_t index = mDict.indexOfKey(key);
+
+    if (index < 0) {
+        value->clear();
+
+        return false;
+    }
+
+    *value = mDict.valueAt(index);
+    return true;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/wifi-display/Parameters.h b/media/libstagefright/wifi-display/Parameters.h
new file mode 100644
index 0000000..a5e787e
--- /dev/null
+++ b/media/libstagefright/wifi-display/Parameters.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AString.h>
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct Parameters : public RefBase {
+    static sp<Parameters> Parse(const char *data, size_t size);
+
+    bool findParameter(const char *name, AString *value) const;
+
+protected:
+    virtual ~Parameters();
+
+private:
+    KeyedVector<AString, AString> mDict;
+
+    Parameters();
+    status_t parse(const char *data, size_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Parameters);
+};
+
+}  // namespace android
diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp
new file mode 100644
index 0000000..dbc511c
--- /dev/null
+++ b/media/libstagefright/wifi-display/VideoFormats.cpp
@@ -0,0 +1,550 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFormats"
+#include <utils/Log.h>
+
+#include "VideoFormats.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+// static
+const VideoFormats::config_t VideoFormats::mResolutionTable[][32] = {
+    {
+        // CEA Resolutions
+        { 640, 480, 60, false, 0, 0},
+        { 720, 480, 60, false, 0, 0},
+        { 720, 480, 60, true, 0, 0},
+        { 720, 576, 50, false, 0, 0},
+        { 720, 576, 50, true, 0, 0},
+        { 1280, 720, 30, false, 0, 0},
+        { 1280, 720, 60, false, 0, 0},
+        { 1920, 1080, 30, false, 0, 0},
+        { 1920, 1080, 60, false, 0, 0},
+        { 1920, 1080, 60, true, 0, 0},
+        { 1280, 720, 25, false, 0, 0},
+        { 1280, 720, 50, false, 0, 0},
+        { 1920, 1080, 25, false, 0, 0},
+        { 1920, 1080, 50, false, 0, 0},
+        { 1920, 1080, 50, true, 0, 0},
+        { 1280, 720, 24, false, 0, 0},
+        { 1920, 1080, 24, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+    },
+    {
+        // VESA Resolutions
+        { 800, 600, 30, false, 0, 0},
+        { 800, 600, 60, false, 0, 0},
+        { 1024, 768, 30, false, 0, 0},
+        { 1024, 768, 60, false, 0, 0},
+        { 1152, 864, 30, false, 0, 0},
+        { 1152, 864, 60, false, 0, 0},
+        { 1280, 768, 30, false, 0, 0},
+        { 1280, 768, 60, false, 0, 0},
+        { 1280, 800, 30, false, 0, 0},
+        { 1280, 800, 60, false, 0, 0},
+        { 1360, 768, 30, false, 0, 0},
+        { 1360, 768, 60, false, 0, 0},
+        { 1366, 768, 30, false, 0, 0},
+        { 1366, 768, 60, false, 0, 0},
+        { 1280, 1024, 30, false, 0, 0},
+        { 1280, 1024, 60, false, 0, 0},
+        { 1400, 1050, 30, false, 0, 0},
+        { 1400, 1050, 60, false, 0, 0},
+        { 1440, 900, 30, false, 0, 0},
+        { 1440, 900, 60, false, 0, 0},
+        { 1600, 900, 30, false, 0, 0},
+        { 1600, 900, 60, false, 0, 0},
+        { 1600, 1200, 30, false, 0, 0},
+        { 1600, 1200, 60, false, 0, 0},
+        { 1680, 1024, 30, false, 0, 0},
+        { 1680, 1024, 60, false, 0, 0},
+        { 1680, 1050, 30, false, 0, 0},
+        { 1680, 1050, 60, false, 0, 0},
+        { 1920, 1200, 30, false, 0, 0},
+        { 1920, 1200, 60, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+    },
+    {
+        // HH Resolutions
+        { 800, 480, 30, false, 0, 0},
+        { 800, 480, 60, false, 0, 0},
+        { 854, 480, 30, false, 0, 0},
+        { 854, 480, 60, false, 0, 0},
+        { 864, 480, 30, false, 0, 0},
+        { 864, 480, 60, false, 0, 0},
+        { 640, 360, 30, false, 0, 0},
+        { 640, 360, 60, false, 0, 0},
+        { 960, 540, 30, false, 0, 0},
+        { 960, 540, 60, false, 0, 0},
+        { 848, 480, 30, false, 0, 0},
+        { 848, 480, 60, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+        { 0, 0, 0, false, 0, 0},
+    }
+};
+
+VideoFormats::VideoFormats() {
+    memcpy(mConfigs, mResolutionTable, sizeof(mConfigs));
+
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        mResolutionEnabled[i] = 0;
+    }
+
+    setNativeResolution(RESOLUTION_CEA, 0);  // default to 640x480 p60
+}
+
+void VideoFormats::setNativeResolution(ResolutionType type, size_t index) {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    mNativeType = type;
+    mNativeIndex = index;
+
+    setResolutionEnabled(type, index);
+}
+
+void VideoFormats::getNativeResolution(
+        ResolutionType *type, size_t *index) const {
+    *type = mNativeType;
+    *index = mNativeIndex;
+}
+
+void VideoFormats::disableAll() {
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        mResolutionEnabled[i] = 0;
+        for (size_t j = 0; j < 32; j++) {
+            mConfigs[i][j].profile = mConfigs[i][j].level = 0;
+        }
+    }
+}
+
+void VideoFormats::enableAll() {
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        mResolutionEnabled[i] = 0xffffffff;
+        for (size_t j = 0; j < 32; j++) {
+            mConfigs[i][j].profile = (1ul << PROFILE_CBP);
+            mConfigs[i][j].level = (1ul << LEVEL_31);
+        }
+    }
+}
+
+void VideoFormats::enableResolutionUpto(
+        ResolutionType type, size_t index,
+        ProfileType profile, LevelType level) {
+    size_t width, height, fps, score;
+    bool interlaced;
+    if (!GetConfiguration(type, index, &width, &height,
+            &fps, &interlaced)) {
+        ALOGE("Maximum resolution not found!");
+        return;
+    }
+    score = width * height * fps * (!interlaced + 1);
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        for (size_t j = 0; j < 32; j++) {
+            if (GetConfiguration((ResolutionType)i, j,
+                    &width, &height, &fps, &interlaced)
+                    && score >= width * height * fps * (!interlaced + 1)) {
+                setResolutionEnabled((ResolutionType)i, j);
+                setProfileLevel((ResolutionType)i, j, profile, level);
+            }
+        }
+    }
+}
+
+void VideoFormats::setResolutionEnabled(
+        ResolutionType type, size_t index, bool enabled) {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    if (enabled) {
+        mResolutionEnabled[type] |= (1ul << index);
+        mConfigs[type][index].profile = (1ul << PROFILE_CBP);
+        mConfigs[type][index].level = (1ul << LEVEL_31);
+    } else {
+        mResolutionEnabled[type] &= ~(1ul << index);
+        mConfigs[type][index].profile = 0;
+        mConfigs[type][index].level = 0;
+    }
+}
+
+void VideoFormats::setProfileLevel(
+        ResolutionType type, size_t index,
+        ProfileType profile, LevelType level) {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    mConfigs[type][index].profile = (1ul << profile);
+    mConfigs[type][index].level = (1ul << level);
+}
+
+void VideoFormats::getProfileLevel(
+        ResolutionType type, size_t index,
+        ProfileType *profile, LevelType *level) const{
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    int i, bestProfile = -1, bestLevel = -1;
+
+    for (i = 0; i < kNumProfileTypes; ++i) {
+        if (mConfigs[type][index].profile & (1ul << i)) {
+            bestProfile = i;
+        }
+    }
+
+    for (i = 0; i < kNumLevelTypes; ++i) {
+        if (mConfigs[type][index].level & (1ul << i)) {
+            bestLevel = i;
+        }
+    }
+
+    if (bestProfile == -1 || bestLevel == -1) {
+        ALOGE("Profile or level not set for resolution type %d, index %zu",
+                type, index);
+        bestProfile = PROFILE_CBP;
+        bestLevel = LEVEL_31;
+    }
+
+    *profile = (ProfileType) bestProfile;
+    *level = (LevelType) bestLevel;
+}
+
+bool VideoFormats::isResolutionEnabled(
+        ResolutionType type, size_t index) const {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    return mResolutionEnabled[type] & (1ul << index);
+}
+
+// static
+bool VideoFormats::GetConfiguration(
+        ResolutionType type,
+        size_t index,
+        size_t *width, size_t *height, size_t *framesPerSecond,
+        bool *interlaced) {
+    CHECK_LT(type, kNumResolutionTypes);
+
+    if (index >= 32) {
+        return false;
+    }
+
+    const config_t *config = &mResolutionTable[type][index];
+
+    if (config->width == 0) {
+        return false;
+    }
+
+    if (width) {
+        *width = config->width;
+    }
+
+    if (height) {
+        *height = config->height;
+    }
+
+    if (framesPerSecond) {
+        *framesPerSecond = config->framesPerSecond;
+    }
+
+    if (interlaced) {
+        *interlaced = config->interlaced;
+    }
+
+    return true;
+}
+
+bool VideoFormats::parseH264Codec(const char *spec) {
+    unsigned profile, level, res[3];
+
+    if (sscanf(
+            spec,
+            "%02x %02x %08X %08X %08X",
+            &profile,
+            &level,
+            &res[0],
+            &res[1],
+            &res[2]) != 5) {
+        return false;
+    }
+
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        for (size_t j = 0; j < 32; ++j) {
+            if (res[i] & (1ul << j)){
+                mResolutionEnabled[i] |= (1ul << j);
+                if (profile > mConfigs[i][j].profile) {
+                    // prefer higher profile (even if level is lower)
+                    mConfigs[i][j].profile = profile;
+                    mConfigs[i][j].level = level;
+                } else if (profile == mConfigs[i][j].profile &&
+                           level > mConfigs[i][j].level) {
+                    mConfigs[i][j].level = level;
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+// static
+bool VideoFormats::GetProfileLevel(
+        ProfileType profile, LevelType level, unsigned *profileIdc,
+        unsigned *levelIdc, unsigned *constraintSet) {
+    CHECK_LT(profile, kNumProfileTypes);
+    CHECK_LT(level, kNumLevelTypes);
+
+    static const unsigned kProfileIDC[kNumProfileTypes] = {
+        66,     // PROFILE_CBP
+        100,    // PROFILE_CHP
+    };
+
+    static const unsigned kLevelIDC[kNumLevelTypes] = {
+        31,     // LEVEL_31
+        32,     // LEVEL_32
+        40,     // LEVEL_40
+        41,     // LEVEL_41
+        42,     // LEVEL_42
+    };
+
+    static const unsigned kConstraintSet[kNumProfileTypes] = {
+        0xc0,   // PROFILE_CBP
+        0x0c,   // PROFILE_CHP
+    };
+
+    if (profileIdc) {
+        *profileIdc = kProfileIDC[profile];
+    }
+
+    if (levelIdc) {
+        *levelIdc = kLevelIDC[level];
+    }
+
+    if (constraintSet) {
+        *constraintSet = kConstraintSet[profile];
+    }
+
+    return true;
+}
+
+bool VideoFormats::parseFormatSpec(const char *spec) {
+    CHECK_EQ(kNumResolutionTypes, 3);
+
+    disableAll();
+
+    unsigned native, dummy;
+    size_t size = strlen(spec);
+    size_t offset = 0;
+
+    if (sscanf(spec, "%02x %02x ", &native, &dummy) != 2) {
+        return false;
+    }
+
+    offset += 6; // skip native and preferred-display-mode-supported
+    CHECK_LE(offset + 58, size);
+    while (offset < size) {
+        parseH264Codec(spec + offset);
+        offset += 60; // skip H.264-codec + ", "
+    }
+
+    mNativeIndex = native >> 3;
+    mNativeType = (ResolutionType)(native & 7);
+
+    bool success;
+    if (mNativeType >= kNumResolutionTypes) {
+        success = false;
+    } else {
+        success = GetConfiguration(
+                mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
+    }
+
+    if (!success) {
+        ALOGW("sink advertised an illegal native resolution, fortunately "
+              "this value is ignored for the time being...");
+    }
+
+    return true;
+}
+
+AString VideoFormats::getFormatSpec(bool forM4Message) const {
+    CHECK_EQ(kNumResolutionTypes, 3);
+
+    // wfd_video_formats:
+    // 1 byte "native"
+    // 1 byte "preferred-display-mode-supported" 0 or 1
+    // one or more avc codec structures
+    //   1 byte profile
+    //   1 byte level
+    //   4 byte CEA mask
+    //   4 byte VESA mask
+    //   4 byte HH mask
+    //   1 byte latency
+    //   2 byte min-slice-slice
+    //   2 byte slice-enc-params
+    //   1 byte framerate-control-support
+    //   max-hres (none or 2 byte)
+    //   max-vres (none or 2 byte)
+
+    return AStringPrintf(
+            "%02x 00 %02x %02x %08x %08x %08x 00 0000 0000 00 none none",
+            forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType),
+            mConfigs[mNativeType][mNativeIndex].profile,
+            mConfigs[mNativeType][mNativeIndex].level,
+            mResolutionEnabled[0],
+            mResolutionEnabled[1],
+            mResolutionEnabled[2]);
+}
+
+// static
+bool VideoFormats::PickBestFormat(
+        const VideoFormats &sinkSupported,
+        const VideoFormats &sourceSupported,
+        ResolutionType *chosenType,
+        size_t *chosenIndex,
+        ProfileType *chosenProfile,
+        LevelType *chosenLevel) {
+#if 0
+    // Support for the native format is a great idea, the spec includes
+    // these features, but nobody supports it and the tests don't validate it.
+
+    ResolutionType nativeType;
+    size_t nativeIndex;
+    sinkSupported.getNativeResolution(&nativeType, &nativeIndex);
+    if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+        if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+            ALOGI("Choosing sink's native resolution");
+            *chosenType = nativeType;
+            *chosenIndex = nativeIndex;
+            return true;
+        }
+    } else {
+        ALOGW("Sink advertised native resolution that it doesn't "
+              "actually support... ignoring");
+    }
+
+    sourceSupported.getNativeResolution(&nativeType, &nativeIndex);
+    if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+        if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+            ALOGI("Choosing source's native resolution");
+            *chosenType = nativeType;
+            *chosenIndex = nativeIndex;
+            return true;
+        }
+    } else {
+        ALOGW("Source advertised native resolution that it doesn't "
+              "actually support... ignoring");
+    }
+#endif
+
+    bool first = true;
+    uint32_t bestScore = 0;
+    size_t bestType = 0;
+    size_t bestIndex = 0;
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        for (size_t j = 0; j < 32; ++j) {
+            size_t width, height, framesPerSecond;
+            bool interlaced;
+            if (!GetConfiguration(
+                        (ResolutionType)i,
+                        j,
+                        &width, &height, &framesPerSecond, &interlaced)) {
+                break;
+            }
+
+            if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j)
+                    || !sourceSupported.isResolutionEnabled(
+                        (ResolutionType)i, j)) {
+                continue;
+            }
+
+            ALOGV("type %zu, index %zu, %zu x %zu %c%zu supported",
+                  i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond);
+
+            uint32_t score = width * height * framesPerSecond;
+            if (!interlaced) {
+                score *= 2;
+            }
+
+            if (first || score > bestScore) {
+                bestScore = score;
+                bestType = i;
+                bestIndex = j;
+
+                first = false;
+            }
+        }
+    }
+
+    if (first) {
+        return false;
+    }
+
+    *chosenType = (ResolutionType)bestType;
+    *chosenIndex = bestIndex;
+
+    // Pick the best profile/level supported by both sink and source.
+    ProfileType srcProfile, sinkProfile;
+    LevelType srcLevel, sinkLevel;
+    sourceSupported.getProfileLevel(
+                        (ResolutionType)bestType, bestIndex,
+                        &srcProfile, &srcLevel);
+    sinkSupported.getProfileLevel(
+                        (ResolutionType)bestType, bestIndex,
+                        &sinkProfile, &sinkLevel);
+    *chosenProfile = srcProfile < sinkProfile ? srcProfile : sinkProfile;
+    *chosenLevel = srcLevel < sinkLevel ? srcLevel : sinkLevel;
+
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h
new file mode 100644
index 0000000..fd38fd1
--- /dev/null
+++ b/media/libstagefright/wifi-display/VideoFormats.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FORMATS_H_
+
+#define VIDEO_FORMATS_H_
+
+#include <media/stagefright/foundation/ABase.h>
+
+#include <stdint.h>
+
+namespace android {
+
+struct AString;
+
+// This class encapsulates that video resolution capabilities of a wfd source
+// or sink as outlined in the wfd specs. Currently three sets of resolutions
+// are specified, each of which supports up to 32 resolutions.
+// In addition to its capabilities each sink/source also publishes its
+// "native" resolution, presumably one that is preferred among all others
+// because it wouldn't require any scaling and directly corresponds to the
+// display capabilities/pixels.
+struct VideoFormats {
+    VideoFormats();
+
+    struct config_t {
+        size_t width, height, framesPerSecond;
+        bool interlaced;
+        unsigned char profile, level;
+    };
+
+    enum ProfileType {
+        PROFILE_CBP = 0,
+        PROFILE_CHP,
+        kNumProfileTypes,
+    };
+
+    enum LevelType {
+        LEVEL_31 = 0,
+        LEVEL_32,
+        LEVEL_40,
+        LEVEL_41,
+        LEVEL_42,
+        kNumLevelTypes,
+    };
+
+    enum ResolutionType {
+        RESOLUTION_CEA,
+        RESOLUTION_VESA,
+        RESOLUTION_HH,
+        kNumResolutionTypes,
+    };
+
+    void setNativeResolution(ResolutionType type, size_t index);
+    void getNativeResolution(ResolutionType *type, size_t *index) const;
+
+    void disableAll();
+    void enableAll();
+    void enableResolutionUpto(
+            ResolutionType type, size_t index,
+            ProfileType profile, LevelType level);
+
+    void setResolutionEnabled(
+            ResolutionType type, size_t index, bool enabled = true);
+
+    bool isResolutionEnabled(ResolutionType type, size_t index) const;
+
+    void setProfileLevel(
+            ResolutionType type, size_t index,
+            ProfileType profile, LevelType level);
+
+    void getProfileLevel(
+            ResolutionType type, size_t index,
+            ProfileType *profile, LevelType *level) const;
+
+    static bool GetConfiguration(
+            ResolutionType type, size_t index,
+            size_t *width, size_t *height, size_t *framesPerSecond,
+            bool *interlaced);
+
+    static bool GetProfileLevel(
+            ProfileType profile, LevelType level,
+            unsigned *profileIdc, unsigned *levelIdc,
+            unsigned *constraintSet);
+
+    bool parseFormatSpec(const char *spec);
+    AString getFormatSpec(bool forM4Message = false) const;
+
+    static bool PickBestFormat(
+            const VideoFormats &sinkSupported,
+            const VideoFormats &sourceSupported,
+            ResolutionType *chosenType,
+            size_t *chosenIndex,
+            ProfileType *chosenProfile,
+            LevelType *chosenLevel);
+
+private:
+    bool parseH264Codec(const char *spec);
+    ResolutionType mNativeType;
+    size_t mNativeIndex;
+
+    uint32_t mResolutionEnabled[kNumResolutionTypes];
+    static const config_t mResolutionTable[kNumResolutionTypes][32];
+    config_t mConfigs[kNumResolutionTypes][32];
+
+    DISALLOW_EVIL_CONSTRUCTORS(VideoFormats);
+};
+
+}  // namespace android
+
+#endif  // VIDEO_FORMATS_H_
+
diff --git a/media/libstagefright/wifi-display/rtp/RTPBase.h b/media/libstagefright/wifi-display/rtp/RTPBase.h
new file mode 100644
index 0000000..194f1ee
--- /dev/null
+++ b/media/libstagefright/wifi-display/rtp/RTPBase.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_BASE_H_
+
+#define RTP_BASE_H_
+
+namespace android {
+
+struct RTPBase {
+    enum PacketizationMode {
+        PACKETIZATION_TRANSPORT_STREAM,
+        PACKETIZATION_H264,
+        PACKETIZATION_AAC,
+        PACKETIZATION_NONE,
+    };
+
+    enum TransportMode {
+        TRANSPORT_UNDEFINED,
+        TRANSPORT_NONE,
+        TRANSPORT_UDP,
+        TRANSPORT_TCP,
+        TRANSPORT_TCP_INTERLEAVED,
+    };
+
+    // Really UDP _payload_ size
+    const unsigned int kMaxUDPPacketSize = 1472;   // 1472 good, 1473 bad on Android@Home
+
+    static int32_t PickRandomRTPPort();
+};
+
+}  // namespace android
+
+#endif  // RTP_BASE_H_
+
+
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
new file mode 100644
index 0000000..f7d141f
--- /dev/null
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
@@ -0,0 +1,809 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTPSender"
+#include <utils/Log.h>
+
+#include "RTPSender.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ANetworkSession.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/ByteUtils.h>
+
+namespace android {
+
+RTPSender::RTPSender(
+        const sp<ANetworkSession> &netSession,
+        const sp<AMessage> &notify)
+    : mNetSession(netSession),
+      mNotify(notify),
+      mRTPMode(TRANSPORT_UNDEFINED),
+      mRTCPMode(TRANSPORT_UNDEFINED),
+      mRTPSessionID(0),
+      mRTCPSessionID(0),
+      mRTPConnected(false),
+      mRTCPConnected(false),
+      mLastNTPTime(0),
+      mLastRTPTime(0),
+      mNumRTPSent(0),
+      mNumRTPOctetsSent(0),
+      mNumSRsSent(0),
+      mRTPSeqNo(0),
+      mHistorySize(0) {
+}
+
+RTPSender::~RTPSender() {
+    if (mRTCPSessionID != 0) {
+        mNetSession->destroySession(mRTCPSessionID);
+        mRTCPSessionID = 0;
+    }
+
+    if (mRTPSessionID != 0) {
+        mNetSession->destroySession(mRTPSessionID);
+        mRTPSessionID = 0;
+    }
+}
+
+// static
+int32_t RTPBase::PickRandomRTPPort() {
+    // Pick an even integer in range [1024, 65534)
+
+    static const size_t kRange = (65534 - 1024) / 2;
+
+    return (int32_t)(((float)(kRange + 1) * rand()) / RAND_MAX) * 2 + 1024;
+}
+
+status_t RTPSender::initAsync(
+        const char *remoteHost,
+        int32_t remoteRTPPort,
+        TransportMode rtpMode,
+        int32_t remoteRTCPPort,
+        TransportMode rtcpMode,
+        int32_t *outLocalRTPPort) {
+    if (mRTPMode != TRANSPORT_UNDEFINED
+            || rtpMode == TRANSPORT_UNDEFINED
+            || rtpMode == TRANSPORT_NONE
+            || rtcpMode == TRANSPORT_UNDEFINED) {
+        return INVALID_OPERATION;
+    }
+
+    CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
+    CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
+
+    if ((rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0)
+            || (rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0)) {
+        return INVALID_OPERATION;
+    }
+
+    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, this);
+
+    sp<AMessage> rtcpNotify;
+    if (remoteRTCPPort >= 0) {
+        rtcpNotify = new AMessage(kWhatRTCPNotify, this);
+    }
+
+    CHECK_EQ(mRTPSessionID, 0);
+    CHECK_EQ(mRTCPSessionID, 0);
+
+    int32_t localRTPPort;
+
+    for (;;) {
+        localRTPPort = PickRandomRTPPort();
+
+        status_t err;
+        if (rtpMode == TRANSPORT_UDP) {
+            err = mNetSession->createUDPSession(
+                    localRTPPort,
+                    remoteHost,
+                    remoteRTPPort,
+                    rtpNotify,
+                    &mRTPSessionID);
+        } else {
+            CHECK_EQ(rtpMode, TRANSPORT_TCP);
+            err = mNetSession->createTCPDatagramSession(
+                    localRTPPort,
+                    remoteHost,
+                    remoteRTPPort,
+                    rtpNotify,
+                    &mRTPSessionID);
+        }
+
+        if (err != OK) {
+            continue;
+        }
+
+        if (remoteRTCPPort < 0) {
+            break;
+        }
+
+        if (rtcpMode == TRANSPORT_UDP) {
+            err = mNetSession->createUDPSession(
+                    localRTPPort + 1,
+                    remoteHost,
+                    remoteRTCPPort,
+                    rtcpNotify,
+                    &mRTCPSessionID);
+        } else {
+            CHECK_EQ(rtcpMode, TRANSPORT_TCP);
+            err = mNetSession->createTCPDatagramSession(
+                    localRTPPort + 1,
+                    remoteHost,
+                    remoteRTCPPort,
+                    rtcpNotify,
+                    &mRTCPSessionID);
+        }
+
+        if (err == OK) {
+            break;
+        }
+
+        mNetSession->destroySession(mRTPSessionID);
+        mRTPSessionID = 0;
+    }
+
+    if (rtpMode == TRANSPORT_UDP) {
+        mRTPConnected = true;
+    }
+
+    if (rtcpMode == TRANSPORT_UDP) {
+        mRTCPConnected = true;
+    }
+
+    mRTPMode = rtpMode;
+    mRTCPMode = rtcpMode;
+    *outLocalRTPPort = localRTPPort;
+
+    if (mRTPMode == TRANSPORT_UDP
+            && (mRTCPMode == TRANSPORT_UDP || mRTCPMode == TRANSPORT_NONE)) {
+        notifyInitDone(OK);
+    }
+
+    return OK;
+}
+
+status_t RTPSender::queueBuffer(
+        const sp<ABuffer> &buffer, uint8_t packetType, PacketizationMode mode) {
+    status_t err;
+
+    switch (mode) {
+        case PACKETIZATION_NONE:
+            err = queueRawPacket(buffer, packetType);
+            break;
+
+        case PACKETIZATION_TRANSPORT_STREAM:
+            err = queueTSPackets(buffer, packetType);
+            break;
+
+        case PACKETIZATION_H264:
+            err  = queueAVCBuffer(buffer, packetType);
+            break;
+
+        default:
+            TRESPASS();
+    }
+
+    return err;
+}
+
+status_t RTPSender::queueRawPacket(
+        const sp<ABuffer> &packet, uint8_t packetType) {
+    CHECK_LE(packet->size(), kMaxUDPPacketSize - 12);
+
+    int64_t timeUs;
+    CHECK(packet->meta()->findInt64("timeUs", &timeUs));
+
+    sp<ABuffer> udpPacket = new ABuffer(12 + packet->size());
+
+    udpPacket->setInt32Data(mRTPSeqNo);
+
+    uint8_t *rtp = udpPacket->data();
+    rtp[0] = 0x80;
+    rtp[1] = packetType;
+
+    rtp[2] = (mRTPSeqNo >> 8) & 0xff;
+    rtp[3] = mRTPSeqNo & 0xff;
+    ++mRTPSeqNo;
+
+    uint32_t rtpTime = (timeUs * 9) / 100ll;
+
+    rtp[4] = rtpTime >> 24;
+    rtp[5] = (rtpTime >> 16) & 0xff;
+    rtp[6] = (rtpTime >> 8) & 0xff;
+    rtp[7] = rtpTime & 0xff;
+
+    rtp[8] = kSourceID >> 24;
+    rtp[9] = (kSourceID >> 16) & 0xff;
+    rtp[10] = (kSourceID >> 8) & 0xff;
+    rtp[11] = kSourceID & 0xff;
+
+    memcpy(&rtp[12], packet->data(), packet->size());
+
+    return sendRTPPacket(
+            udpPacket,
+            true /* storeInHistory */,
+            true /* timeValid */,
+            ALooper::GetNowUs());
+}
+
+status_t RTPSender::queueTSPackets(
+        const sp<ABuffer> &tsPackets, uint8_t packetType) {
+    CHECK_EQ(0u, tsPackets->size() % 188);
+
+    int64_t timeUs;
+    CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs));
+
+    size_t srcOffset = 0;
+    while (srcOffset < tsPackets->size()) {
+        sp<ABuffer> udpPacket =
+            new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188);
+
+        udpPacket->setInt32Data(mRTPSeqNo);
+
+        uint8_t *rtp = udpPacket->data();
+        rtp[0] = 0x80;
+        rtp[1] = packetType;
+
+        rtp[2] = (mRTPSeqNo >> 8) & 0xff;
+        rtp[3] = mRTPSeqNo & 0xff;
+        ++mRTPSeqNo;
+
+        int64_t nowUs = ALooper::GetNowUs();
+        uint32_t rtpTime = (nowUs * 9) / 100ll;
+
+        rtp[4] = rtpTime >> 24;
+        rtp[5] = (rtpTime >> 16) & 0xff;
+        rtp[6] = (rtpTime >> 8) & 0xff;
+        rtp[7] = rtpTime & 0xff;
+
+        rtp[8] = kSourceID >> 24;
+        rtp[9] = (kSourceID >> 16) & 0xff;
+        rtp[10] = (kSourceID >> 8) & 0xff;
+        rtp[11] = kSourceID & 0xff;
+
+        size_t numTSPackets = (tsPackets->size() - srcOffset) / 188;
+        if (numTSPackets > kMaxNumTSPacketsPerRTPPacket) {
+            numTSPackets = kMaxNumTSPacketsPerRTPPacket;
+        }
+
+        memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188);
+
+        udpPacket->setRange(0, 12 + numTSPackets * 188);
+
+        srcOffset += numTSPackets * 188;
+        bool isLastPacket = (srcOffset == tsPackets->size());
+
+        status_t err = sendRTPPacket(
+                udpPacket,
+                true /* storeInHistory */,
+                isLastPacket /* timeValid */,
+                timeUs);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    return OK;
+}
+
+status_t RTPSender::queueAVCBuffer(
+        const sp<ABuffer> &accessUnit, uint8_t packetType) {
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    uint32_t rtpTime = (timeUs * 9 / 100ll);
+
+    List<sp<ABuffer> > packets;
+
+    sp<ABuffer> out = new ABuffer(kMaxUDPPacketSize);
+    size_t outBytesUsed = 12;  // Placeholder for RTP header.
+
+    const uint8_t *data = accessUnit->data();
+    size_t size = accessUnit->size();
+    const uint8_t *nalStart;
+    size_t nalSize;
+    while (getNextNALUnit(
+                &data, &size, &nalStart, &nalSize,
+                true /* startCodeFollows */) == OK) {
+        size_t bytesNeeded = nalSize + 2;
+        if (outBytesUsed == 12) {
+            ++bytesNeeded;
+        }
+
+        if (outBytesUsed + bytesNeeded > out->capacity()) {
+            bool emitSingleNALPacket = false;
+
+            if (outBytesUsed == 12
+                    && outBytesUsed + nalSize <= out->capacity()) {
+                // We haven't emitted anything into the current packet yet and
+                // this NAL unit fits into a single-NAL-unit-packet while
+                // it wouldn't have fit as part of a STAP-A packet.
+
+                memcpy(out->data() + outBytesUsed, nalStart, nalSize);
+                outBytesUsed += nalSize;
+
+                emitSingleNALPacket = true;
+            }
+
+            if (outBytesUsed > 12) {
+                out->setRange(0, outBytesUsed);
+                packets.push_back(out);
+                out = new ABuffer(kMaxUDPPacketSize);
+                outBytesUsed = 12;  // Placeholder for RTP header
+            }
+
+            if (emitSingleNALPacket) {
+                continue;
+            }
+        }
+
+        if (outBytesUsed + bytesNeeded <= out->capacity()) {
+            uint8_t *dst = out->data() + outBytesUsed;
+
+            if (outBytesUsed == 12) {
+                *dst++ = 24;  // STAP-A header
+            }
+
+            *dst++ = (nalSize >> 8) & 0xff;
+            *dst++ = nalSize & 0xff;
+            memcpy(dst, nalStart, nalSize);
+
+            outBytesUsed += bytesNeeded;
+            continue;
+        }
+
+        // This single NAL unit does not fit into a single RTP packet,
+        // we need to emit an FU-A.
+
+        CHECK_EQ(outBytesUsed, 12u);
+
+        uint8_t nalType = nalStart[0] & 0x1f;
+        uint8_t nri = (nalStart[0] >> 5) & 3;
+
+        size_t srcOffset = 1;
+        while (srcOffset < nalSize) {
+            size_t copy = out->capacity() - outBytesUsed - 2;
+            if (copy > nalSize - srcOffset) {
+                copy = nalSize - srcOffset;
+            }
+
+            uint8_t *dst = out->data() + outBytesUsed;
+            dst[0] = (nri << 5) | 28;
+
+            dst[1] = nalType;
+
+            if (srcOffset == 1) {
+                dst[1] |= 0x80;
+            }
+
+            if (srcOffset + copy == nalSize) {
+                dst[1] |= 0x40;
+            }
+
+            memcpy(&dst[2], nalStart + srcOffset, copy);
+            srcOffset += copy;
+
+            out->setRange(0, outBytesUsed + copy + 2);
+
+            packets.push_back(out);
+            out = new ABuffer(kMaxUDPPacketSize);
+            outBytesUsed = 12;  // Placeholder for RTP header
+        }
+    }
+
+    if (outBytesUsed > 12) {
+        out->setRange(0, outBytesUsed);
+        packets.push_back(out);
+    }
+
+    while (!packets.empty()) {
+        sp<ABuffer> out = *packets.begin();
+        packets.erase(packets.begin());
+
+        out->setInt32Data(mRTPSeqNo);
+
+        bool last = packets.empty();
+
+        uint8_t *dst = out->data();
+
+        dst[0] = 0x80;
+
+        dst[1] = packetType;
+        if (last) {
+            dst[1] |= 1 << 7;  // M-bit
+        }
+
+        dst[2] = (mRTPSeqNo >> 8) & 0xff;
+        dst[3] = mRTPSeqNo & 0xff;
+        ++mRTPSeqNo;
+
+        dst[4] = rtpTime >> 24;
+        dst[5] = (rtpTime >> 16) & 0xff;
+        dst[6] = (rtpTime >> 8) & 0xff;
+        dst[7] = rtpTime & 0xff;
+        dst[8] = kSourceID >> 24;
+        dst[9] = (kSourceID >> 16) & 0xff;
+        dst[10] = (kSourceID >> 8) & 0xff;
+        dst[11] = kSourceID & 0xff;
+
+        status_t err = sendRTPPacket(out, true /* storeInHistory */);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    return OK;
+}
+
+status_t RTPSender::sendRTPPacket(
+        const sp<ABuffer> &buffer, bool storeInHistory,
+        bool timeValid, int64_t timeUs) {
+    CHECK(mRTPConnected);
+
+    status_t err = mNetSession->sendRequest(
+            mRTPSessionID, buffer->data(), buffer->size(),
+            timeValid, timeUs);
+
+    if (err != OK) {
+        return err;
+    }
+
+    mLastNTPTime = GetNowNTP();
+    mLastRTPTime = U32_AT(buffer->data() + 4);
+
+    ++mNumRTPSent;
+    mNumRTPOctetsSent += buffer->size() - 12;
+
+    if (storeInHistory) {
+        if (mHistorySize == kMaxHistorySize) {
+            mHistory.erase(mHistory.begin());
+        } else {
+            ++mHistorySize;
+        }
+        mHistory.push_back(buffer);
+    }
+
+    return OK;
+}
+
+// static
+uint64_t RTPSender::GetNowNTP() {
+    struct timeval tv;
+    gettimeofday(&tv, NULL /* timezone */);
+
+    uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
+
+    nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
+
+    uint64_t hi = nowUs / 1000000ll;
+    uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
+
+    return (hi << 32) | lo;
+}
+
+void RTPSender::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatRTPNotify:
+        case kWhatRTCPNotify:
+            onNetNotify(msg->what() == kWhatRTPNotify, msg);
+            break;
+
+        default:
+            TRESPASS();
+    }
+}
+
+void RTPSender::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
+    int32_t reason;
+    CHECK(msg->findInt32("reason", &reason));
+
+    switch (reason) {
+        case ANetworkSession::kWhatError:
+        {
+            int32_t sessionID;
+            CHECK(msg->findInt32("sessionID", &sessionID));
+
+            int32_t err;
+            CHECK(msg->findInt32("err", &err));
+
+            int32_t errorOccuredDuringSend;
+            CHECK(msg->findInt32("send", &errorOccuredDuringSend));
+
+            AString detail;
+            CHECK(msg->findString("detail", &detail));
+
+            ALOGE("An error occurred during %s in session %d "
+                  "(%d, '%s' (%s)).",
+                  errorOccuredDuringSend ? "send" : "receive",
+                  sessionID,
+                  err,
+                  detail.c_str(),
+                  strerror(-err));
+
+            mNetSession->destroySession(sessionID);
+
+            if (sessionID == mRTPSessionID) {
+                mRTPSessionID = 0;
+            } else if (sessionID == mRTCPSessionID) {
+                mRTCPSessionID = 0;
+            }
+
+            if (!mRTPConnected
+                    || (mRTPMode != TRANSPORT_NONE && !mRTCPConnected)) {
+                // We haven't completed initialization, attach the error
+                // to the notification instead.
+                notifyInitDone(err);
+                break;
+            }
+
+            notifyError(err);
+            break;
+        }
+
+        case ANetworkSession::kWhatDatagram:
+        {
+            sp<ABuffer> data;
+            CHECK(msg->findBuffer("data", &data));
+
+            if (isRTP) {
+                ALOGW("Huh? Received data on RTP connection...");
+            } else {
+                onRTCPData(data);
+            }
+            break;
+        }
+
+        case ANetworkSession::kWhatConnected:
+        {
+            int32_t sessionID;
+            CHECK(msg->findInt32("sessionID", &sessionID));
+
+            if  (isRTP) {
+                CHECK_EQ(mRTPMode, TRANSPORT_TCP);
+                CHECK_EQ(sessionID, mRTPSessionID);
+                mRTPConnected = true;
+            } else {
+                CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
+                CHECK_EQ(sessionID, mRTCPSessionID);
+                mRTCPConnected = true;
+            }
+
+            if (mRTPConnected
+                    && (mRTCPMode == TRANSPORT_NONE || mRTCPConnected)) {
+                notifyInitDone(OK);
+            }
+            break;
+        }
+
+        case ANetworkSession::kWhatNetworkStall:
+        {
+            size_t numBytesQueued;
+            CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
+
+            notifyNetworkStall(numBytesQueued);
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    while (size > 0) {
+        if (size < 8) {
+            // Too short to be a valid RTCP header
+            return ERROR_MALFORMED;
+        }
+
+        if ((data[0] >> 6) != 2) {
+            // Unsupported version.
+            return ERROR_UNSUPPORTED;
+        }
+
+        if (data[0] & 0x20) {
+            // Padding present.
+
+            size_t paddingLength = data[size - 1];
+
+            if (paddingLength + 12 > size) {
+                // If we removed this much padding we'd end up with something
+                // that's too short to be a valid RTP header.
+                return ERROR_MALFORMED;
+            }
+
+            size -= paddingLength;
+        }
+
+        size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
+
+        if (size < headerLength) {
+            // Only received a partial packet?
+            return ERROR_MALFORMED;
+        }
+
+        switch (data[1]) {
+            case 200:
+            case 201:  // RR
+                parseReceiverReport(data, headerLength);
+                break;
+
+            case 202:  // SDES
+            case 203:
+                break;
+
+            case 204:  // APP
+                parseAPP(data, headerLength);
+                break;
+
+            case 205:  // TSFB (transport layer specific feedback)
+                parseTSFB(data, headerLength);
+                break;
+
+            case 206:  // PSFB (payload specific feedback)
+                // hexdump(data, headerLength);
+                break;
+
+            default:
+            {
+                ALOGW("Unknown RTCP packet type %u of size %zu",
+                        (unsigned)data[1], headerLength);
+                break;
+            }
+        }
+
+        data += headerLength;
+        size -= headerLength;
+    }
+
+    return OK;
+}
+
+status_t RTPSender::parseReceiverReport(
+        const uint8_t *data, size_t /* size */) {
+    float fractionLost = data[12] / 256.0f;
+
+    ALOGI("lost %.2f %% of packets during report interval.",
+          100.0f * fractionLost);
+
+    return OK;
+}
+
+status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) {
+    if ((data[0] & 0x1f) != 1) {
+        return ERROR_UNSUPPORTED;  // We only support NACK for now.
+    }
+
+    uint32_t srcId = U32_AT(&data[8]);
+    if (srcId != kSourceID) {
+        return ERROR_MALFORMED;
+    }
+
+    for (size_t i = 12; i < size; i += 4) {
+        uint16_t seqNo = U16_AT(&data[i]);
+        uint16_t blp = U16_AT(&data[i + 2]);
+
+        List<sp<ABuffer> >::iterator it = mHistory.begin();
+        bool foundSeqNo = false;
+        while (it != mHistory.end()) {
+            const sp<ABuffer> &buffer = *it;
+
+            uint16_t bufferSeqNo = buffer->int32Data() & 0xffff;
+
+            bool retransmit = false;
+            if (bufferSeqNo == seqNo) {
+                retransmit = true;
+            } else if (blp != 0) {
+                for (size_t i = 0; i < 16; ++i) {
+                    if ((blp & (1 << i))
+                        && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) {
+                        blp &= ~(1 << i);
+                        retransmit = true;
+                    }
+                }
+            }
+
+            if (retransmit) {
+                ALOGV("retransmitting seqNo %d", bufferSeqNo);
+
+                CHECK_EQ((status_t)OK,
+                         sendRTPPacket(buffer, false /* storeInHistory */));
+
+                if (bufferSeqNo == seqNo) {
+                    foundSeqNo = true;
+                }
+
+                if (foundSeqNo && blp == 0) {
+                    break;
+                }
+            }
+
+            ++it;
+        }
+
+        if (!foundSeqNo || blp != 0) {
+            ALOGI("Some sequence numbers were no longer available for "
+                  "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)",
+                  seqNo, foundSeqNo, blp);
+
+            if (!mHistory.empty()) {
+                int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff;
+                int32_t latest = (*--mHistory.end())->int32Data() & 0xffff;
+
+                ALOGI("have seq numbers from %d - %d", earliest, latest);
+            }
+        }
+    }
+
+    return OK;
+}
+
+status_t RTPSender::parseAPP(const uint8_t *data, size_t size) {
+    static const size_t late_offset = 8;
+    static const char late_string[] = "late";
+    static const size_t avgLatencyUs_offset = late_offset + sizeof(late_string) - 1;
+    static const size_t maxLatencyUs_offset = avgLatencyUs_offset + sizeof(int64_t);
+
+    if ((size >= (maxLatencyUs_offset + sizeof(int64_t)))
+            && !memcmp(late_string, &data[late_offset], sizeof(late_string) - 1)) {
+        int64_t avgLatencyUs = (int64_t)U64_AT(&data[avgLatencyUs_offset]);
+        int64_t maxLatencyUs = (int64_t)U64_AT(&data[maxLatencyUs_offset]);
+
+        sp<AMessage> notify = mNotify->dup();
+        notify->setInt32("what", kWhatInformSender);
+        notify->setInt64("avgLatencyUs", avgLatencyUs);
+        notify->setInt64("maxLatencyUs", maxLatencyUs);
+        notify->post();
+    }
+
+    return OK;
+}
+
+void RTPSender::notifyInitDone(status_t err) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatInitDone);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+void RTPSender::notifyError(status_t err) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+void RTPSender::notifyNetworkStall(size_t numBytesQueued) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatNetworkStall);
+    notify->setSize("numBytesQueued", numBytesQueued);
+    notify->post();
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h
new file mode 100644
index 0000000..bedfd01
--- /dev/null
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_SENDER_H_
+
+#define RTP_SENDER_H_
+
+#include "RTPBase.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct ANetworkSession;
+
+// An object of this class facilitates sending of media data over an RTP
+// channel. The channel is established over a UDP or TCP connection depending
+// on which "TransportMode" was chosen. In addition different RTP packetization
+// schemes are supported such as "Transport Stream Packets over RTP",
+// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
+struct RTPSender : public RTPBase, public AHandler {
+    enum {
+        kWhatInitDone,
+        kWhatError,
+        kWhatNetworkStall,
+        kWhatInformSender,
+    };
+    RTPSender(
+            const sp<ANetworkSession> &netSession,
+            const sp<AMessage> &notify);
+
+    status_t initAsync(
+              const char *remoteHost,
+              int32_t remoteRTPPort,
+              TransportMode rtpMode,
+              int32_t remoteRTCPPort,
+              TransportMode rtcpMode,
+              int32_t *outLocalRTPPort);
+
+    status_t queueBuffer(
+            const sp<ABuffer> &buffer,
+            uint8_t packetType,
+            PacketizationMode mode);
+
+protected:
+    virtual ~RTPSender();
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatRTPNotify,
+        kWhatRTCPNotify,
+    };
+
+    const unsigned int kMaxNumTSPacketsPerRTPPacket = (kMaxUDPPacketSize - 12) / 188;
+    const unsigned int kMaxHistorySize              = 1024;
+    const unsigned int kSourceID                    = 0xdeadbeef;
+
+    sp<ANetworkSession> mNetSession;
+    sp<AMessage> mNotify;
+    TransportMode mRTPMode;
+    TransportMode mRTCPMode;
+    int32_t mRTPSessionID;
+    int32_t mRTCPSessionID;
+    bool mRTPConnected;
+    bool mRTCPConnected;
+
+    uint64_t mLastNTPTime;
+    uint32_t mLastRTPTime;
+    uint32_t mNumRTPSent;
+    uint32_t mNumRTPOctetsSent;
+    uint32_t mNumSRsSent;
+
+    uint32_t mRTPSeqNo;
+
+    List<sp<ABuffer> > mHistory;
+    size_t mHistorySize;
+
+    static uint64_t GetNowNTP();
+
+    status_t queueRawPacket(const sp<ABuffer> &tsPackets, uint8_t packetType);
+    status_t queueTSPackets(const sp<ABuffer> &tsPackets, uint8_t packetType);
+    status_t queueAVCBuffer(const sp<ABuffer> &accessUnit, uint8_t packetType);
+
+    status_t sendRTPPacket(
+            const sp<ABuffer> &packet, bool storeInHistory,
+            bool timeValid = false, int64_t timeUs = -1ll);
+
+    void onNetNotify(bool isRTP, const sp<AMessage> &msg);
+
+    status_t onRTCPData(const sp<ABuffer> &data);
+    status_t parseReceiverReport(const uint8_t *data, size_t size);
+    status_t parseTSFB(const uint8_t *data, size_t size);
+    status_t parseAPP(const uint8_t *data, size_t size);
+
+    void notifyInitDone(status_t err);
+    void notifyError(status_t err);
+    void notifyNetworkStall(size_t numBytesQueued);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTPSender);
+};
+
+}  // namespace android
+
+#endif  // RTP_SENDER_H_
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
new file mode 100644
index 0000000..d5c7ae2
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -0,0 +1,826 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Converter"
+#include <utils/Log.h>
+
+#include "Converter.h"
+
+#include "MediaPuller.h"
+
+#include <cutils/properties.h>
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBufferBase.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/MediaBufferHolder.h>
+#include <media/stagefright/foundation/avc_utils.h>
+
+#include <arpa/inet.h>
+
+#include <OMX_Video.h>
+
+namespace android {
+
+Converter::Converter(
+        const sp<AMessage> &notify,
+        const sp<ALooper> &codecLooper,
+        const sp<AMessage> &outputFormat,
+        uint32_t flags)
+    : mNotify(notify),
+      mCodecLooper(codecLooper),
+      mOutputFormat(outputFormat),
+      mFlags(flags),
+      mIsVideo(false),
+      mIsH264(false),
+      mIsPCMAudio(false),
+      mNeedToManuallyPrependSPSPPS(false),
+      mDoMoreWorkPending(false)
+#if ENABLE_SILENCE_DETECTION
+      ,mFirstSilentFrameUs(-1ll)
+      ,mInSilentMode(false)
+#endif
+      ,mPrevVideoBitrate(-1)
+      ,mNumFramesToDrop(0)
+      ,mEncodingSuspended(false)
+    {
+    AString mime;
+    CHECK(mOutputFormat->findString("mime", &mime));
+
+    if (!strncasecmp("video/", mime.c_str(), 6)) {
+        mIsVideo = true;
+
+        mIsH264 = !strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC);
+    } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime.c_str())) {
+        mIsPCMAudio = true;
+    }
+}
+
+void Converter::releaseEncoder() {
+    if (mEncoder == NULL) {
+        return;
+    }
+
+    mEncoder->release();
+    mEncoder.clear();
+
+    mInputBufferQueue.clear();
+    mEncoderInputBuffers.clear();
+    mEncoderOutputBuffers.clear();
+}
+
+Converter::~Converter() {
+    CHECK(mEncoder == NULL);
+}
+
+void Converter::shutdownAsync() {
+    ALOGV("shutdown");
+    (new AMessage(kWhatShutdown, this))->post();
+}
+
+status_t Converter::init() {
+    status_t err = initEncoder();
+
+    if (err != OK) {
+        releaseEncoder();
+    }
+
+    return err;
+}
+
+sp<IGraphicBufferProducer> Converter::getGraphicBufferProducer() {
+    CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+    return mGraphicBufferProducer;
+}
+
+size_t Converter::getInputBufferCount() const {
+    return mEncoderInputBuffers.size();
+}
+
+sp<AMessage> Converter::getOutputFormat() const {
+    return mOutputFormat;
+}
+
+bool Converter::needToManuallyPrependSPSPPS() const {
+    return mNeedToManuallyPrependSPSPPS;
+}
+
+// static
+int32_t Converter::GetInt32Property(
+        const char *propName, int32_t defaultValue) {
+    char val[PROPERTY_VALUE_MAX];
+    if (property_get(propName, val, NULL)) {
+        char *end;
+        unsigned long x = strtoul(val, &end, 10);
+
+        if (*end == '\0' && end > val && x > 0) {
+            return x;
+        }
+    }
+
+    return defaultValue;
+}
+
+status_t Converter::initEncoder() {
+    AString outputMIME;
+    CHECK(mOutputFormat->findString("mime", &outputMIME));
+
+    bool isAudio = !strncasecmp(outputMIME.c_str(), "audio/", 6);
+
+    if (!mIsPCMAudio) {
+        mEncoder = MediaCodec::CreateByType(
+                mCodecLooper, outputMIME.c_str(), true /* encoder */);
+
+        if (mEncoder == NULL) {
+            return ERROR_UNSUPPORTED;
+        }
+    }
+
+    if (mIsPCMAudio) {
+        return OK;
+    }
+
+    int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000);
+    int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000);
+    mPrevVideoBitrate = videoBitrate;
+
+    ALOGI("using audio bitrate of %d bps, video bitrate of %d bps",
+          audioBitrate, videoBitrate);
+
+    if (isAudio) {
+        mOutputFormat->setInt32("bitrate", audioBitrate);
+    } else {
+        mOutputFormat->setInt32("bitrate", videoBitrate);
+        mOutputFormat->setInt32("bitrate-mode", OMX_Video_ControlRateConstant);
+        mOutputFormat->setInt32("frame-rate", 30);
+        mOutputFormat->setInt32("i-frame-interval", 15);  // Iframes every 15 secs
+
+        // Configure encoder to use intra macroblock refresh mode
+        mOutputFormat->setInt32("intra-refresh-mode", OMX_VIDEO_IntraRefreshCyclic);
+
+        int width, height, mbs;
+        if (!mOutputFormat->findInt32("width", &width)
+                || !mOutputFormat->findInt32("height", &height)) {
+            return ERROR_UNSUPPORTED;
+        }
+
+        // Update macroblocks in a cyclic fashion with 10% of all MBs within
+        // frame gets updated at one time. It takes about 10 frames to
+        // completely update a whole video frame. If the frame rate is 30,
+        // it takes about 333 ms in the best case (if next frame is not an IDR)
+        // to recover from a lost/corrupted packet.
+        mbs = (((width + 15) / 16) * ((height + 15) / 16) * 10) / 100;
+        mOutputFormat->setInt32("intra-refresh-CIR-mbs", mbs);
+    }
+
+    ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
+
+    mNeedToManuallyPrependSPSPPS = false;
+
+    status_t err = NO_INIT;
+
+    if (!isAudio) {
+        sp<AMessage> tmp = mOutputFormat->dup();
+        tmp->setInt32("prepend-sps-pps-to-idr-frames", 1);
+
+        err = mEncoder->configure(
+                tmp,
+                NULL /* nativeWindow */,
+                NULL /* crypto */,
+                MediaCodec::CONFIGURE_FLAG_ENCODE);
+
+        if (err == OK) {
+            // Encoder supported prepending SPS/PPS, we don't need to emulate
+            // it.
+            mOutputFormat = tmp;
+        } else {
+            mNeedToManuallyPrependSPSPPS = true;
+
+            ALOGI("We going to manually prepend SPS and PPS to IDR frames.");
+        }
+    }
+
+    if (err != OK) {
+        // We'll get here for audio or if we failed to configure the encoder
+        // to automatically prepend SPS/PPS in the case of video.
+
+        err = mEncoder->configure(
+                    mOutputFormat,
+                    NULL /* nativeWindow */,
+                    NULL /* crypto */,
+                    MediaCodec::CONFIGURE_FLAG_ENCODE);
+    }
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (mFlags & FLAG_USE_SURFACE_INPUT) {
+        CHECK(mIsVideo);
+
+        err = mEncoder->createInputSurface(&mGraphicBufferProducer);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    err = mEncoder->start();
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (mFlags & FLAG_USE_SURFACE_INPUT) {
+        scheduleDoMoreWork();
+    }
+
+    return OK;
+}
+
+void Converter::notifyError(status_t err) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatError);
+    notify->setInt32("err", err);
+    notify->post();
+}
+
+// static
+bool Converter::IsSilence(const sp<ABuffer> &accessUnit) {
+    const uint8_t *ptr = accessUnit->data();
+    const uint8_t *end = ptr + accessUnit->size();
+    while (ptr < end) {
+        if (*ptr != 0) {
+            return false;
+        }
+        ++ptr;
+    }
+
+    return true;
+}
+
+void Converter::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatMediaPullerNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (!mIsPCMAudio && mEncoder == NULL) {
+                ALOGV("got msg '%s' after encoder shutdown.",
+                      msg->debugString().c_str());
+
+                if (what == MediaPuller::kWhatAccessUnit) {
+                    sp<ABuffer> accessUnit;
+                    CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+                    accessUnit->meta()->setObject("mediaBufferHolder", sp<MediaBufferHolder>(nullptr));
+                }
+                break;
+            }
+
+            if (what == MediaPuller::kWhatEOS) {
+                mInputBufferQueue.push_back(NULL);
+
+                feedEncoderInputBuffers();
+
+                scheduleDoMoreWork();
+            } else {
+                CHECK_EQ(what, MediaPuller::kWhatAccessUnit);
+
+                sp<ABuffer> accessUnit;
+                CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+                if (mNumFramesToDrop > 0 || mEncodingSuspended) {
+                    if (mNumFramesToDrop > 0) {
+                        --mNumFramesToDrop;
+                        ALOGI("dropping frame.");
+                    }
+
+                    accessUnit->meta()->setObject("mediaBufferHolder", sp<MediaBufferHolder>(nullptr));
+                    break;
+                }
+
+#if 0
+                MediaBuffer *mbuf =
+                    (MediaBuffer *)(accessUnit->getMediaBufferBase());
+                if (mbuf != NULL) {
+                    ALOGI("queueing mbuf %p", mbuf);
+                    mbuf->release();
+                }
+#endif
+
+#if ENABLE_SILENCE_DETECTION
+                if (!mIsVideo) {
+                    if (IsSilence(accessUnit)) {
+                        if (mInSilentMode) {
+                            break;
+                        }
+
+                        int64_t nowUs = ALooper::GetNowUs();
+
+                        if (mFirstSilentFrameUs < 0ll) {
+                            mFirstSilentFrameUs = nowUs;
+                        } else if (nowUs >= mFirstSilentFrameUs + 10000000ll) {
+                            mInSilentMode = true;
+                            ALOGI("audio in silent mode now.");
+                            break;
+                        }
+                    } else {
+                        if (mInSilentMode) {
+                            ALOGI("audio no longer in silent mode.");
+                        }
+                        mInSilentMode = false;
+                        mFirstSilentFrameUs = -1ll;
+                    }
+                }
+#endif
+
+                mInputBufferQueue.push_back(accessUnit);
+
+                feedEncoderInputBuffers();
+
+                scheduleDoMoreWork();
+            }
+            break;
+        }
+
+        case kWhatEncoderActivity:
+        {
+#if 0
+            int64_t whenUs;
+            if (msg->findInt64("whenUs", &whenUs)) {
+                int64_t nowUs = ALooper::GetNowUs();
+                ALOGI("[%s] kWhatEncoderActivity after %lld us",
+                      mIsVideo ? "video" : "audio", nowUs - whenUs);
+            }
+#endif
+
+            mDoMoreWorkPending = false;
+
+            if (mEncoder == NULL) {
+                break;
+            }
+
+            status_t err = doMoreWork();
+
+            if (err != OK) {
+                notifyError(err);
+            } else {
+                scheduleDoMoreWork();
+            }
+            break;
+        }
+
+        case kWhatRequestIDRFrame:
+        {
+            if (mEncoder == NULL) {
+                break;
+            }
+
+            if (mIsVideo) {
+                ALOGV("requesting IDR frame");
+                mEncoder->requestIDRFrame();
+            }
+            break;
+        }
+
+        case kWhatShutdown:
+        {
+            ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio");
+
+            releaseEncoder();
+
+            AString mime;
+            CHECK(mOutputFormat->findString("mime", &mime));
+            ALOGI("encoder (%s) shut down.", mime.c_str());
+
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("what", kWhatShutdownCompleted);
+            notify->post();
+            break;
+        }
+
+        case kWhatDropAFrame:
+        {
+            ++mNumFramesToDrop;
+            break;
+        }
+
+        case kWhatReleaseOutputBuffer:
+        {
+            if (mEncoder != NULL) {
+                size_t bufferIndex;
+                CHECK(msg->findInt32("bufferIndex", (int32_t*)&bufferIndex));
+                CHECK(bufferIndex < mEncoderOutputBuffers.size());
+                mEncoder->releaseOutputBuffer(bufferIndex);
+            }
+            break;
+        }
+
+        case kWhatSuspendEncoding:
+        {
+            int32_t suspend;
+            CHECK(msg->findInt32("suspend", &suspend));
+
+            mEncodingSuspended = suspend;
+
+            if (mFlags & FLAG_USE_SURFACE_INPUT) {
+                sp<AMessage> params = new AMessage;
+                params->setInt32("drop-input-frames",suspend);
+                mEncoder->setParameters(params);
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void Converter::scheduleDoMoreWork() {
+    if (mIsPCMAudio) {
+        // There's no encoder involved in this case.
+        return;
+    }
+
+    if (mDoMoreWorkPending) {
+        return;
+    }
+
+    mDoMoreWorkPending = true;
+
+#if 1
+    if (mEncoderActivityNotify == NULL) {
+        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, this);
+    }
+    mEncoder->requestActivityNotification(mEncoderActivityNotify->dup());
+#else
+    sp<AMessage> notify = new AMessage(kWhatEncoderActivity, this);
+    notify->setInt64("whenUs", ALooper::GetNowUs());
+    mEncoder->requestActivityNotification(notify);
+#endif
+}
+
+status_t Converter::feedRawAudioInputBuffers() {
+    // Split incoming PCM audio into buffers of 6 AUs of 80 audio frames each
+    // and add a 4 byte header according to the wifi display specs.
+
+    while (!mInputBufferQueue.empty()) {
+        sp<ABuffer> buffer = *mInputBufferQueue.begin();
+        mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+        int16_t *ptr = (int16_t *)buffer->data();
+        int16_t *stop = (int16_t *)(buffer->data() + buffer->size());
+        while (ptr < stop) {
+            *ptr = htons(*ptr);
+            ++ptr;
+        }
+
+        static const size_t kFrameSize = 2 * sizeof(int16_t);  // stereo
+        static const size_t kFramesPerAU = 80;
+        static const size_t kNumAUsPerPESPacket = 6;
+
+        if (mPartialAudioAU != NULL) {
+            size_t bytesMissingForFullAU =
+                kNumAUsPerPESPacket * kFramesPerAU * kFrameSize
+                - mPartialAudioAU->size() + 4;
+
+            size_t copy = buffer->size();
+            if(copy > bytesMissingForFullAU) {
+                copy = bytesMissingForFullAU;
+            }
+
+            memcpy(mPartialAudioAU->data() + mPartialAudioAU->size(),
+                   buffer->data(),
+                   copy);
+
+            mPartialAudioAU->setRange(0, mPartialAudioAU->size() + copy);
+
+            buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
+
+            int64_t timeUs;
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+            int64_t copyUs = (int64_t)((copy / kFrameSize) * 1E6 / 48000.0);
+            timeUs += copyUs;
+            buffer->meta()->setInt64("timeUs", timeUs);
+
+            if (bytesMissingForFullAU == copy) {
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("what", kWhatAccessUnit);
+                notify->setBuffer("accessUnit", mPartialAudioAU);
+                notify->post();
+
+                mPartialAudioAU.clear();
+            }
+        }
+
+        while (buffer->size() > 0) {
+            sp<ABuffer> partialAudioAU =
+                new ABuffer(
+                        4
+                        + kNumAUsPerPESPacket * kFrameSize * kFramesPerAU);
+
+            uint8_t *ptr = partialAudioAU->data();
+            ptr[0] = 0xa0;  // 10100000b
+            ptr[1] = kNumAUsPerPESPacket;
+            ptr[2] = 0;  // reserved, audio _emphasis_flag = 0
+
+            static const unsigned kQuantizationWordLength = 0;  // 16-bit
+            static const unsigned kAudioSamplingFrequency = 2;  // 48Khz
+            static const unsigned kNumberOfAudioChannels = 1;  // stereo
+
+            ptr[3] = (kQuantizationWordLength << 6)
+                    | (kAudioSamplingFrequency << 3)
+                    | kNumberOfAudioChannels;
+
+            size_t copy = buffer->size();
+            if (copy > partialAudioAU->size() - 4) {
+                copy = partialAudioAU->size() - 4;
+            }
+
+            memcpy(&ptr[4], buffer->data(), copy);
+
+            partialAudioAU->setRange(0, 4 + copy);
+            buffer->setRange(buffer->offset() + copy, buffer->size() - copy);
+
+            int64_t timeUs;
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+            partialAudioAU->meta()->setInt64("timeUs", timeUs);
+
+            int64_t copyUs = (int64_t)((copy / kFrameSize) * 1E6 / 48000.0);
+            timeUs += copyUs;
+            buffer->meta()->setInt64("timeUs", timeUs);
+
+            if (copy == partialAudioAU->capacity() - 4) {
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("what", kWhatAccessUnit);
+                notify->setBuffer("accessUnit", partialAudioAU);
+                notify->post();
+
+                partialAudioAU.clear();
+                continue;
+            }
+
+            mPartialAudioAU = partialAudioAU;
+        }
+    }
+
+    return OK;
+}
+
+status_t Converter::feedEncoderInputBuffers() {
+    if (mIsPCMAudio) {
+        return feedRawAudioInputBuffers();
+    }
+
+    while (!mInputBufferQueue.empty()
+            && !mAvailEncoderInputIndices.empty()) {
+        sp<ABuffer> buffer = *mInputBufferQueue.begin();
+        mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+        size_t bufferIndex = *mAvailEncoderInputIndices.begin();
+        mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
+
+        int64_t timeUs = 0ll;
+        uint32_t flags = 0;
+
+        if (buffer != NULL) {
+            CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+            memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
+                   buffer->data(),
+                   buffer->size());
+
+            MediaBufferBase *mediaBuffer = NULL;
+            sp<RefBase> holder;
+
+            if (buffer->meta()->findObject("mediaBufferHolder", &holder)) {
+                mediaBuffer = (holder != nullptr) ?
+                    static_cast<MediaBufferHolder*>(holder.get())->mediaBuffer() : nullptr;
+            }
+            if (mediaBuffer != NULL) {
+                mEncoderInputBuffers.itemAt(bufferIndex)->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mediaBuffer));
+
+                buffer->meta()->setObject("mediaBufferHolder", sp<MediaBufferHolder>(nullptr));
+            }
+        } else {
+            flags = MediaCodec::BUFFER_FLAG_EOS;
+        }
+
+        status_t err = mEncoder->queueInputBuffer(
+                bufferIndex, 0, (buffer == NULL) ? 0 : buffer->size(),
+                timeUs, flags);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    return OK;
+}
+
+sp<ABuffer> Converter::prependCSD(const sp<ABuffer> &accessUnit) const {
+    CHECK(mCSD0 != NULL);
+
+    sp<ABuffer> dup = new ABuffer(accessUnit->size() + mCSD0->size());
+    memcpy(dup->data(), mCSD0->data(), mCSD0->size());
+    memcpy(dup->data() + mCSD0->size(), accessUnit->data(), accessUnit->size());
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    dup->meta()->setInt64("timeUs", timeUs);
+
+    return dup;
+}
+
+status_t Converter::doMoreWork() {
+    status_t err;
+
+    if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+        for (;;) {
+            size_t bufferIndex;
+            err = mEncoder->dequeueInputBuffer(&bufferIndex);
+
+            if (err != OK) {
+                break;
+            }
+
+            mAvailEncoderInputIndices.push_back(bufferIndex);
+        }
+
+        feedEncoderInputBuffers();
+    }
+
+    for (;;) {
+        size_t bufferIndex;
+        size_t offset;
+        size_t size;
+        int64_t timeUs;
+        uint32_t flags;
+        native_handle_t* handle = NULL;
+        err = mEncoder->dequeueOutputBuffer(
+                &bufferIndex, &offset, &size, &timeUs, &flags);
+
+        if (err != OK) {
+            if (err == INFO_FORMAT_CHANGED) {
+                continue;
+            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+                mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+                continue;
+            }
+
+            if (err == -EAGAIN) {
+                err = OK;
+            }
+            break;
+        }
+
+        if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("what", kWhatEOS);
+            notify->post();
+        } else {
+#if 0
+            if (mIsVideo) {
+                int32_t videoBitrate = GetInt32Property(
+                        "media.wfd.video-bitrate", 5000000);
+
+                setVideoBitrate(videoBitrate);
+            }
+#endif
+
+            sp<ABuffer> buffer;
+            sp<MediaCodecBuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
+
+            if (outbuf->meta()->findPointer("handle", (void**)&handle) &&
+                    handle != NULL) {
+                int32_t rangeLength, rangeOffset;
+                CHECK(outbuf->meta()->findInt32("rangeOffset", &rangeOffset));
+                CHECK(outbuf->meta()->findInt32("rangeLength", &rangeLength));
+                outbuf->meta()->setPointer("handle", NULL);
+
+                // MediaSender will post the following message when HDCP
+                // is done, to release the output buffer back to encoder.
+                sp<AMessage> notify(new AMessage(kWhatReleaseOutputBuffer, this));
+                notify->setInt32("bufferIndex", bufferIndex);
+
+                buffer = new ABuffer(
+                        rangeLength > (int32_t)size ? rangeLength : size);
+                buffer->meta()->setPointer("handle", handle);
+                buffer->meta()->setInt32("rangeOffset", rangeOffset);
+                buffer->meta()->setInt32("rangeLength", rangeLength);
+                buffer->meta()->setMessage("notify", notify);
+            } else {
+                buffer = new ABuffer(size);
+            }
+
+            buffer->meta()->setInt64("timeUs", timeUs);
+
+            ALOGV("[%s] time %lld us (%.2f secs)",
+                    mIsVideo ? "video" : "audio", (long long)timeUs, timeUs / 1E6);
+
+            memcpy(buffer->data(), outbuf->base() + offset, size);
+
+            if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+                if (!handle) {
+                    if (mIsH264) {
+                        mCSD0 = buffer;
+                    }
+                    mOutputFormat->setBuffer("csd-0", buffer);
+                }
+            } else {
+                if (mNeedToManuallyPrependSPSPPS
+                        && mIsH264
+                        && (mFlags & FLAG_PREPEND_CSD_IF_NECESSARY)
+                        && IsIDR(buffer->data(), buffer->size())) {
+                    buffer = prependCSD(buffer);
+                }
+
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("what", kWhatAccessUnit);
+                notify->setBuffer("accessUnit", buffer);
+                notify->post();
+            }
+        }
+
+        if (!handle) {
+            mEncoder->releaseOutputBuffer(bufferIndex);
+        }
+
+        if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+            break;
+        }
+    }
+
+    return err;
+}
+
+void Converter::requestIDRFrame() {
+    (new AMessage(kWhatRequestIDRFrame, this))->post();
+}
+
+void Converter::dropAFrame() {
+    // Unsupported in surface input mode.
+    CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT));
+
+    (new AMessage(kWhatDropAFrame, this))->post();
+}
+
+void Converter::suspendEncoding(bool suspend) {
+    sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, this);
+    msg->setInt32("suspend", suspend);
+    msg->post();
+}
+
+int32_t Converter::getVideoBitrate() const {
+    return mPrevVideoBitrate;
+}
+
+void Converter::setVideoBitrate(int32_t bitRate) {
+    if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) {
+        sp<AMessage> params = new AMessage;
+        params->setInt32("video-bitrate", bitRate);
+
+        mEncoder->setParameters(params);
+
+        mPrevVideoBitrate = bitRate;
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h
new file mode 100644
index 0000000..ad95ab5
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/Converter.h
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CONVERTER_H_
+
+#define CONVERTER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+class IGraphicBufferProducer;
+struct MediaCodec;
+class MediaCodecBuffer;
+
+#define ENABLE_SILENCE_DETECTION        0
+
+// Utility class that receives media access units and converts them into
+// media access unit of a different format.
+// Right now this'll convert raw video into H.264 and raw audio into AAC.
+struct Converter : public AHandler {
+    enum {
+        kWhatAccessUnit,
+        kWhatEOS,
+        kWhatError,
+        kWhatShutdownCompleted,
+    };
+
+    enum FlagBits {
+        FLAG_USE_SURFACE_INPUT          = 1,
+        FLAG_PREPEND_CSD_IF_NECESSARY   = 2,
+    };
+    Converter(const sp<AMessage> &notify,
+              const sp<ALooper> &codecLooper,
+              const sp<AMessage> &outputFormat,
+              uint32_t flags = 0);
+
+    status_t init();
+
+    sp<IGraphicBufferProducer> getGraphicBufferProducer();
+
+    size_t getInputBufferCount() const;
+
+    sp<AMessage> getOutputFormat() const;
+    bool needToManuallyPrependSPSPPS() const;
+
+    void feedAccessUnit(const sp<ABuffer> &accessUnit);
+    void signalEOS();
+
+    void requestIDRFrame();
+
+    void dropAFrame();
+    void suspendEncoding(bool suspend);
+
+    void shutdownAsync();
+
+    int32_t getVideoBitrate() const;
+    void setVideoBitrate(int32_t bitrate);
+
+    static int32_t GetInt32Property(const char *propName, int32_t defaultValue);
+
+    enum {
+        // MUST not conflict with private enums below.
+        kWhatMediaPullerNotify = 'pulN',
+    };
+
+protected:
+    virtual ~Converter();
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    enum {
+        kWhatDoMoreWork,
+        kWhatRequestIDRFrame,
+        kWhatSuspendEncoding,
+        kWhatShutdown,
+        kWhatEncoderActivity,
+        kWhatDropAFrame,
+        kWhatReleaseOutputBuffer,
+    };
+
+    sp<AMessage> mNotify;
+    sp<ALooper> mCodecLooper;
+    sp<AMessage> mOutputFormat;
+    uint32_t mFlags;
+    bool mIsVideo;
+    bool mIsH264;
+    bool mIsPCMAudio;
+    bool mNeedToManuallyPrependSPSPPS;
+
+    sp<MediaCodec> mEncoder;
+    sp<AMessage> mEncoderActivityNotify;
+
+    sp<IGraphicBufferProducer> mGraphicBufferProducer;
+
+    Vector<sp<MediaCodecBuffer> > mEncoderInputBuffers;
+    Vector<sp<MediaCodecBuffer> > mEncoderOutputBuffers;
+
+    List<size_t> mAvailEncoderInputIndices;
+
+    List<sp<ABuffer> > mInputBufferQueue;
+
+    sp<ABuffer> mCSD0;
+
+    bool mDoMoreWorkPending;
+
+#if ENABLE_SILENCE_DETECTION
+    int64_t mFirstSilentFrameUs;
+    bool mInSilentMode;
+#endif
+
+    sp<ABuffer> mPartialAudioAU;
+
+    int32_t mPrevVideoBitrate;
+
+    int32_t mNumFramesToDrop;
+    bool mEncodingSuspended;
+
+    status_t initEncoder();
+    void releaseEncoder();
+
+    status_t feedEncoderInputBuffers();
+
+    void scheduleDoMoreWork();
+    status_t doMoreWork();
+
+    void notifyError(status_t err);
+
+    // Packetizes raw PCM audio data available in mInputBufferQueue
+    // into a format suitable for transport stream inclusion and
+    // notifies the observer.
+    status_t feedRawAudioInputBuffers();
+
+    static bool IsSilence(const sp<ABuffer> &accessUnit);
+
+    sp<ABuffer> prependCSD(const sp<ABuffer> &accessUnit) const;
+
+    DISALLOW_EVIL_CONSTRUCTORS(Converter);
+};
+
+}  // namespace android
+
+#endif  // CONVERTER_H_
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp
new file mode 100644
index 0000000..ba836e1
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp
@@ -0,0 +1,227 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaPuller"
+#include <utils/Log.h>
+
+#include "MediaPuller.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBufferBase.h>
+#include <media/MediaBufferHolder.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaPuller::MediaPuller(
+        const sp<MediaSource> &source, const sp<AMessage> &notify)
+    : mSource(source),
+      mNotify(notify),
+      mPullGeneration(0),
+      mIsAudio(false),
+      mPaused(false) {
+    sp<MetaData> meta = source->getFormat();
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    mIsAudio = !strncasecmp(mime, "audio/", 6);
+}
+
+MediaPuller::~MediaPuller() {
+}
+
+status_t MediaPuller::postSynchronouslyAndReturnError(
+        const sp<AMessage> &msg) {
+    sp<AMessage> response;
+    status_t err = msg->postAndAwaitResponse(&response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (!response->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+status_t MediaPuller::start() {
+    return postSynchronouslyAndReturnError(new AMessage(kWhatStart, this));
+}
+
+void MediaPuller::stopAsync(const sp<AMessage> &notify) {
+    sp<AMessage> msg = new AMessage(kWhatStop, this);
+    msg->setMessage("notify", notify);
+    msg->post();
+}
+
+void MediaPuller::pause() {
+    (new AMessage(kWhatPause, this))->post();
+}
+
+void MediaPuller::resume() {
+    (new AMessage(kWhatResume, this))->post();
+}
+
+void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatStart:
+        {
+            status_t err;
+            if (mIsAudio) {
+                // This atrocity causes AudioSource to deliver absolute
+                // systemTime() based timestamps (off by 1 us).
+                sp<MetaData> params = new MetaData;
+                params->setInt64(kKeyTime, 1ll);
+                err = mSource->start(params.get());
+            } else {
+                err = mSource->start();
+                if (err != OK) {
+                    ALOGE("source failed to start w/ err %d", err);
+                }
+            }
+
+            if (err == OK) {
+                schedulePull();
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatStop:
+        {
+            sp<MetaData> meta = mSource->getFormat();
+            const char *tmp;
+            CHECK(meta->findCString(kKeyMIMEType, &tmp));
+            AString mime = tmp;
+
+            ALOGI("MediaPuller(%s) stopping.", mime.c_str());
+            mSource->stop();
+            ALOGI("MediaPuller(%s) stopped.", mime.c_str());
+            ++mPullGeneration;
+
+            sp<AMessage> notify;
+            CHECK(msg->findMessage("notify", &notify));
+            notify->post();
+            break;
+        }
+
+        case kWhatPull:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mPullGeneration) {
+                break;
+            }
+
+            MediaBufferBase *mbuf;
+            status_t err = mSource->read(&mbuf);
+
+            if (mPaused) {
+                if (err == OK) {
+                    mbuf->release();
+                    mbuf = NULL;
+                }
+
+                schedulePull();
+                break;
+            }
+
+            if (err != OK) {
+                if (err == ERROR_END_OF_STREAM) {
+                    ALOGI("stream ended.");
+                } else {
+                    ALOGE("error %d reading stream.", err);
+                }
+
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("what", kWhatEOS);
+                notify->post();
+            } else {
+                int64_t timeUs;
+                CHECK(mbuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+                sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());
+
+                memcpy(accessUnit->data(),
+                       (const uint8_t *)mbuf->data() + mbuf->range_offset(),
+                       mbuf->range_length());
+
+                accessUnit->meta()->setInt64("timeUs", timeUs);
+
+                if (mIsAudio) {
+                    mbuf->release();
+                    mbuf = NULL;
+                } else {
+                    // video encoder will release MediaBufferBase when done
+                    // with underlying data.
+                    accessUnit->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
+                    mbuf->release();
+                    mbuf = NULL;
+                }
+
+                sp<AMessage> notify = mNotify->dup();
+
+                notify->setInt32("what", kWhatAccessUnit);
+                notify->setBuffer("accessUnit", accessUnit);
+                notify->post();
+
+                if (mbuf != NULL) {
+                    ALOGV("posted mbuf %p", mbuf);
+                }
+
+                schedulePull();
+            }
+            break;
+        }
+
+        case kWhatPause:
+        {
+            mPaused = true;
+            break;
+        }
+
+        case kWhatResume:
+        {
+            mPaused = false;
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void MediaPuller::schedulePull() {
+    sp<AMessage> msg = new AMessage(kWhatPull, this);
+    msg->setInt32("generation", mPullGeneration);
+    msg->post();
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.h b/media/libstagefright/wifi-display/source/MediaPuller.h
new file mode 100644
index 0000000..1291bb3
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/MediaPuller.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_PULLER_H_
+
+#define MEDIA_PULLER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct MediaSource;
+
+struct MediaPuller : public AHandler {
+    enum {
+        kWhatEOS,
+        kWhatAccessUnit
+    };
+
+    MediaPuller(const sp<MediaSource> &source, const sp<AMessage> &notify);
+
+    status_t start();
+    void stopAsync(const sp<AMessage> &notify);
+
+    void pause();
+    void resume();
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual ~MediaPuller();
+
+private:
+    enum {
+        kWhatStart,
+        kWhatStop,
+        kWhatPull,
+        kWhatPause,
+        kWhatResume,
+    };
+
+    sp<MediaSource> mSource;
+    sp<AMessage> mNotify;
+    int32_t mPullGeneration;
+    bool mIsAudio;
+    bool mPaused;
+
+    status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
+    void schedulePull();
+
+    DISALLOW_EVIL_CONSTRUCTORS(MediaPuller);
+};
+
+}  // namespace android
+
+#endif  // MEDIA_PULLER_H_
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
new file mode 100644
index 0000000..4183a0d
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -0,0 +1,1113 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PlaybackSession"
+#include <utils/Log.h>
+
+#include "PlaybackSession.h"
+
+#include "Converter.h"
+#include "MediaPuller.h"
+#include "RepeaterSource.h"
+#include <media/stagefright/foundation/avc_utils.h>
+#include "WifiDisplaySource.h"
+
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <media/IHDCP.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/AudioSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <media/stagefright/SurfaceMediaSource.h>
+#include <media/stagefright/Utils.h>
+#include <media/IOMX.h>
+
+#include <OMX_IVCommon.h>
+
+namespace android {
+
+struct WifiDisplaySource::PlaybackSession::Track : public AHandler {
+    enum {
+        kWhatStopped,
+    };
+
+    Track(const sp<AMessage> &notify,
+          const sp<ALooper> &pullLooper,
+          const sp<ALooper> &codecLooper,
+          const sp<MediaPuller> &mediaPuller,
+          const sp<Converter> &converter);
+
+    Track(const sp<AMessage> &notify, const sp<AMessage> &format);
+
+    void setRepeaterSource(const sp<RepeaterSource> &source);
+
+    sp<AMessage> getFormat();
+    bool isAudio() const;
+
+    const sp<Converter> &converter() const;
+    const sp<RepeaterSource> &repeaterSource() const;
+
+    ssize_t mediaSenderTrackIndex() const;
+    void setMediaSenderTrackIndex(size_t index);
+
+    status_t start();
+    void stopAsync();
+
+    void pause();
+    void resume();
+
+    void queueAccessUnit(const sp<ABuffer> &accessUnit);
+    sp<ABuffer> dequeueAccessUnit();
+
+    bool hasOutputBuffer(int64_t *timeUs) const;
+    void queueOutputBuffer(const sp<ABuffer> &accessUnit);
+    sp<ABuffer> dequeueOutputBuffer();
+
+#if SUSPEND_VIDEO_IF_IDLE
+    bool isSuspended() const;
+#endif
+
+    size_t countQueuedOutputBuffers() const {
+        return mQueuedOutputBuffers.size();
+    }
+
+    void requestIDRFrame();
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual ~Track();
+
+private:
+    enum {
+        kWhatMediaPullerStopped,
+    };
+
+    sp<AMessage> mNotify;
+    sp<ALooper> mPullLooper;
+    sp<ALooper> mCodecLooper;
+    sp<MediaPuller> mMediaPuller;
+    sp<Converter> mConverter;
+    sp<AMessage> mFormat;
+    bool mStarted;
+    ssize_t mMediaSenderTrackIndex;
+    bool mIsAudio;
+    List<sp<ABuffer> > mQueuedAccessUnits;
+    sp<RepeaterSource> mRepeaterSource;
+    List<sp<ABuffer> > mQueuedOutputBuffers;
+    int64_t mLastOutputBufferQueuedTimeUs;
+
+    static bool IsAudioFormat(const sp<AMessage> &format);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+WifiDisplaySource::PlaybackSession::Track::Track(
+        const sp<AMessage> &notify,
+        const sp<ALooper> &pullLooper,
+        const sp<ALooper> &codecLooper,
+        const sp<MediaPuller> &mediaPuller,
+        const sp<Converter> &converter)
+    : mNotify(notify),
+      mPullLooper(pullLooper),
+      mCodecLooper(codecLooper),
+      mMediaPuller(mediaPuller),
+      mConverter(converter),
+      mStarted(false),
+      mIsAudio(IsAudioFormat(mConverter->getOutputFormat())),
+      mLastOutputBufferQueuedTimeUs(-1ll) {
+}
+
+WifiDisplaySource::PlaybackSession::Track::Track(
+        const sp<AMessage> &notify, const sp<AMessage> &format)
+    : mNotify(notify),
+      mFormat(format),
+      mStarted(false),
+      mIsAudio(IsAudioFormat(format)),
+      mLastOutputBufferQueuedTimeUs(-1ll) {
+}
+
+WifiDisplaySource::PlaybackSession::Track::~Track() {
+    CHECK(!mStarted);
+}
+
+// static
+bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat(
+        const sp<AMessage> &format) {
+    AString mime;
+    CHECK(format->findString("mime", &mime));
+
+    return !strncasecmp(mime.c_str(), "audio/", 6);
+}
+
+sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() {
+    return mFormat != NULL ? mFormat : mConverter->getOutputFormat();
+}
+
+bool WifiDisplaySource::PlaybackSession::Track::isAudio() const {
+    return mIsAudio;
+}
+
+const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() const {
+    return mConverter;
+}
+
+const sp<RepeaterSource> &
+WifiDisplaySource::PlaybackSession::Track::repeaterSource() const {
+    return mRepeaterSource;
+}
+
+ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const {
+    CHECK_GE(mMediaSenderTrackIndex, 0);
+    return mMediaSenderTrackIndex;
+}
+
+void WifiDisplaySource::PlaybackSession::Track::setMediaSenderTrackIndex(
+        size_t index) {
+    mMediaSenderTrackIndex = index;
+}
+
+status_t WifiDisplaySource::PlaybackSession::Track::start() {
+    ALOGV("Track::start isAudio=%d", mIsAudio);
+
+    CHECK(!mStarted);
+
+    status_t err = OK;
+
+    if (mMediaPuller != NULL) {
+        err = mMediaPuller->start();
+    }
+
+    if (err == OK) {
+        mStarted = true;
+    }
+
+    return err;
+}
+
+void WifiDisplaySource::PlaybackSession::Track::stopAsync() {
+    ALOGV("Track::stopAsync isAudio=%d", mIsAudio);
+
+    if (mConverter != NULL) {
+        mConverter->shutdownAsync();
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, this);
+
+    if (mStarted && mMediaPuller != NULL) {
+        if (mRepeaterSource != NULL) {
+            // Let's unblock MediaPuller's MediaSource::read().
+            mRepeaterSource->wakeUp();
+        }
+
+        mMediaPuller->stopAsync(msg);
+    } else {
+        mStarted = false;
+        msg->post();
+    }
+}
+
+void WifiDisplaySource::PlaybackSession::Track::pause() {
+    mMediaPuller->pause();
+}
+
+void WifiDisplaySource::PlaybackSession::Track::resume() {
+    mMediaPuller->resume();
+}
+
+void WifiDisplaySource::PlaybackSession::Track::onMessageReceived(
+        const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatMediaPullerStopped:
+        {
+            mConverter.clear();
+
+            mStarted = false;
+
+            sp<AMessage> notify = mNotify->dup();
+            notify->setInt32("what", kWhatStopped);
+            notify->post();
+
+            ALOGI("kWhatStopped %s posted", mIsAudio ? "audio" : "video");
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void WifiDisplaySource::PlaybackSession::Track::queueAccessUnit(
+        const sp<ABuffer> &accessUnit) {
+    mQueuedAccessUnits.push_back(accessUnit);
+}
+
+sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueAccessUnit() {
+    if (mQueuedAccessUnits.empty()) {
+        return NULL;
+    }
+
+    sp<ABuffer> accessUnit = *mQueuedAccessUnits.begin();
+    CHECK(accessUnit != NULL);
+
+    mQueuedAccessUnits.erase(mQueuedAccessUnits.begin());
+
+    return accessUnit;
+}
+
+void WifiDisplaySource::PlaybackSession::Track::setRepeaterSource(
+        const sp<RepeaterSource> &source) {
+    mRepeaterSource = source;
+}
+
+void WifiDisplaySource::PlaybackSession::Track::requestIDRFrame() {
+    if (mIsAudio) {
+        return;
+    }
+
+    if (mRepeaterSource != NULL) {
+        mRepeaterSource->wakeUp();
+    }
+
+    mConverter->requestIDRFrame();
+}
+
+bool WifiDisplaySource::PlaybackSession::Track::hasOutputBuffer(
+        int64_t *timeUs) const {
+    *timeUs = 0ll;
+
+    if (mQueuedOutputBuffers.empty()) {
+        return false;
+    }
+
+    const sp<ABuffer> &outputBuffer = *mQueuedOutputBuffers.begin();
+
+    CHECK(outputBuffer->meta()->findInt64("timeUs", timeUs));
+
+    return true;
+}
+
+void WifiDisplaySource::PlaybackSession::Track::queueOutputBuffer(
+        const sp<ABuffer> &accessUnit) {
+    mQueuedOutputBuffers.push_back(accessUnit);
+    mLastOutputBufferQueuedTimeUs = ALooper::GetNowUs();
+}
+
+sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueOutputBuffer() {
+    CHECK(!mQueuedOutputBuffers.empty());
+
+    sp<ABuffer> outputBuffer = *mQueuedOutputBuffers.begin();
+    mQueuedOutputBuffers.erase(mQueuedOutputBuffers.begin());
+
+    return outputBuffer;
+}
+
+#if SUSPEND_VIDEO_IF_IDLE
+bool WifiDisplaySource::PlaybackSession::Track::isSuspended() const {
+    if (!mQueuedOutputBuffers.empty()) {
+        return false;
+    }
+
+    if (mLastOutputBufferQueuedTimeUs < 0ll) {
+        // We've never seen an output buffer queued, but tracks start
+        // out live, not suspended.
+        return false;
+    }
+
+    // If we've not seen new output data for 60ms or more, we consider
+    // this track suspended for the time being.
+    return (ALooper::GetNowUs() - mLastOutputBufferQueuedTimeUs) > 60000ll;
+}
+#endif
+
+////////////////////////////////////////////////////////////////////////////////
+
+WifiDisplaySource::PlaybackSession::PlaybackSession(
+        const String16 &opPackageName,
+        const sp<ANetworkSession> &netSession,
+        const sp<AMessage> &notify,
+        const in_addr &interfaceAddr,
+        const sp<IHDCP> &hdcp,
+        const char *path)
+    : mOpPackageName(opPackageName),
+      mNetSession(netSession),
+      mNotify(notify),
+      mInterfaceAddr(interfaceAddr),
+      mHDCP(hdcp),
+      mLocalRTPPort(-1),
+      mWeAreDead(false),
+      mPaused(false),
+      mLastLifesignUs(),
+      mVideoTrackIndex(-1),
+      mPrevTimeUs(-1ll),
+      mPullExtractorPending(false),
+      mPullExtractorGeneration(0),
+      mFirstSampleTimeRealUs(-1ll),
+      mFirstSampleTimeUs(-1ll) {
+    if (path != NULL) {
+        mMediaPath.setTo(path);
+    }
+}
+
+status_t WifiDisplaySource::PlaybackSession::init(
+        const char *clientIP,
+        int32_t clientRtp,
+        RTPSender::TransportMode rtpMode,
+        int32_t clientRtcp,
+        RTPSender::TransportMode rtcpMode,
+        bool enableAudio,
+        bool usePCMAudio,
+        bool enableVideo,
+        VideoFormats::ResolutionType videoResolutionType,
+        size_t videoResolutionIndex,
+        VideoFormats::ProfileType videoProfileType,
+        VideoFormats::LevelType videoLevelType) {
+    sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, this);
+    mMediaSender = new MediaSender(mNetSession, notify);
+    looper()->registerHandler(mMediaSender);
+
+    mMediaSender->setHDCP(mHDCP);
+
+    status_t err = setupPacketizer(
+            enableAudio,
+            usePCMAudio,
+            enableVideo,
+            videoResolutionType,
+            videoResolutionIndex,
+            videoProfileType,
+            videoLevelType);
+
+    if (err == OK) {
+        err = mMediaSender->initAsync(
+                -1 /* trackIndex */,
+                clientIP,
+                clientRtp,
+                rtpMode,
+                clientRtcp,
+                rtcpMode,
+                &mLocalRTPPort);
+    }
+
+    if (err != OK) {
+        mLocalRTPPort = -1;
+
+        looper()->unregisterHandler(mMediaSender->id());
+        mMediaSender.clear();
+
+        return err;
+    }
+
+    updateLiveness();
+
+    return OK;
+}
+
+WifiDisplaySource::PlaybackSession::~PlaybackSession() {
+}
+
+int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const {
+    return mLocalRTPPort;
+}
+
+int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const {
+    return mLastLifesignUs;
+}
+
+void WifiDisplaySource::PlaybackSession::updateLiveness() {
+    mLastLifesignUs = ALooper::GetNowUs();
+}
+
+status_t WifiDisplaySource::PlaybackSession::play() {
+    updateLiveness();
+
+    (new AMessage(kWhatResume, this))->post();
+
+    return OK;
+}
+
+status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() {
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start());
+    }
+
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatSessionEstablished);
+    notify->post();
+
+    return OK;
+}
+
+status_t WifiDisplaySource::PlaybackSession::pause() {
+    updateLiveness();
+
+    (new AMessage(kWhatPause, this))->post();
+
+    return OK;
+}
+
+void WifiDisplaySource::PlaybackSession::destroyAsync() {
+    ALOGI("destroyAsync");
+
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        mTracks.valueAt(i)->stopAsync();
+    }
+}
+
+void WifiDisplaySource::PlaybackSession::onMessageReceived(
+        const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatConverterNotify:
+        {
+            if (mWeAreDead) {
+                ALOGV("dropping msg '%s' because we're dead",
+                      msg->debugString().c_str());
+
+                break;
+            }
+
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            if (what == Converter::kWhatAccessUnit) {
+                sp<ABuffer> accessUnit;
+                CHECK(msg->findBuffer("accessUnit", &accessUnit));
+
+                const sp<Track> &track = mTracks.valueFor(trackIndex);
+
+                status_t err = mMediaSender->queueAccessUnit(
+                        track->mediaSenderTrackIndex(),
+                        accessUnit);
+
+                if (err != OK) {
+                    notifySessionDead();
+                }
+                break;
+            } else if (what == Converter::kWhatEOS) {
+                CHECK_EQ(what, Converter::kWhatEOS);
+
+                ALOGI("output EOS on track %zu", trackIndex);
+
+                ssize_t index = mTracks.indexOfKey(trackIndex);
+                CHECK_GE(index, 0);
+
+                const sp<Converter> &converter =
+                    mTracks.valueAt(index)->converter();
+                looper()->unregisterHandler(converter->id());
+
+                mTracks.removeItemsAt(index);
+
+                if (mTracks.isEmpty()) {
+                    ALOGI("Reached EOS");
+                }
+            } else if (what != Converter::kWhatShutdownCompleted) {
+                CHECK_EQ(what, Converter::kWhatError);
+
+                status_t err;
+                CHECK(msg->findInt32("err", &err));
+
+                ALOGE("converter signaled error %d", err);
+
+                notifySessionDead();
+            }
+            break;
+        }
+
+        case kWhatMediaSenderNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == MediaSender::kWhatInitDone) {
+                status_t err;
+                CHECK(msg->findInt32("err", &err));
+
+                if (err == OK) {
+                    onMediaSenderInitialized();
+                } else {
+                    notifySessionDead();
+                }
+            } else if (what == MediaSender::kWhatError) {
+                notifySessionDead();
+            } else if (what == MediaSender::kWhatNetworkStall) {
+                size_t numBytesQueued;
+                CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
+
+                if (mVideoTrackIndex >= 0) {
+                    const sp<Track> &videoTrack =
+                        mTracks.valueFor(mVideoTrackIndex);
+
+                    sp<Converter> converter = videoTrack->converter();
+                    if (converter != NULL) {
+                        converter->dropAFrame();
+                    }
+                }
+            } else if (what == MediaSender::kWhatInformSender) {
+                onSinkFeedback(msg);
+            } else {
+                TRESPASS();
+            }
+            break;
+        }
+
+        case kWhatTrackNotify:
+        {
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            if (what == Track::kWhatStopped) {
+                ALOGI("Track %zu stopped", trackIndex);
+
+                sp<Track> track = mTracks.valueFor(trackIndex);
+                looper()->unregisterHandler(track->id());
+                mTracks.removeItem(trackIndex);
+                track.clear();
+
+                if (!mTracks.isEmpty()) {
+                    ALOGI("not all tracks are stopped yet");
+                    break;
+                }
+
+                looper()->unregisterHandler(mMediaSender->id());
+                mMediaSender.clear();
+
+                sp<AMessage> notify = mNotify->dup();
+                notify->setInt32("what", kWhatSessionDestroyed);
+                notify->post();
+            }
+            break;
+        }
+
+        case kWhatPause:
+        {
+            if (mExtractor != NULL) {
+                ++mPullExtractorGeneration;
+                mFirstSampleTimeRealUs = -1ll;
+                mFirstSampleTimeUs = -1ll;
+            }
+
+            if (mPaused) {
+                break;
+            }
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                mTracks.editValueAt(i)->pause();
+            }
+
+            mPaused = true;
+            break;
+        }
+
+        case kWhatResume:
+        {
+            if (mExtractor != NULL) {
+                schedulePullExtractor();
+            }
+
+            if (!mPaused) {
+                break;
+            }
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                mTracks.editValueAt(i)->resume();
+            }
+
+            mPaused = false;
+            break;
+        }
+
+        case kWhatPullExtractorSample:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mPullExtractorGeneration) {
+                break;
+            }
+
+            mPullExtractorPending = false;
+
+            onPullExtractor();
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp<AMessage> &msg) {
+    int64_t avgLatencyUs;
+    CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
+
+    int64_t maxLatencyUs;
+    CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
+
+    ALOGI("sink reports avg. latency of %lld ms (max %lld ms)",
+          avgLatencyUs / 1000ll,
+          maxLatencyUs / 1000ll);
+
+    if (mVideoTrackIndex >= 0) {
+        const sp<Track> &videoTrack = mTracks.valueFor(mVideoTrackIndex);
+        sp<Converter> converter = videoTrack->converter();
+
+        if (converter != NULL) {
+            int32_t videoBitrate =
+                Converter::GetInt32Property("media.wfd.video-bitrate", -1);
+
+            char val[PROPERTY_VALUE_MAX];
+            if (videoBitrate < 0
+                    && property_get("media.wfd.video-bitrate", val, NULL)
+                    && !strcasecmp("adaptive", val)) {
+                videoBitrate = converter->getVideoBitrate();
+
+                if (avgLatencyUs > 300000ll) {
+                    videoBitrate *= 0.6;
+                } else if (avgLatencyUs < 100000ll) {
+                    videoBitrate *= 1.1;
+                }
+            }
+
+            if (videoBitrate > 0) {
+                if (videoBitrate < 500000) {
+                    videoBitrate = 500000;
+                } else if (videoBitrate > 10000000) {
+                    videoBitrate = 10000000;
+                }
+
+                if (videoBitrate != converter->getVideoBitrate()) {
+                    ALOGI("setting video bitrate to %d bps", videoBitrate);
+
+                    converter->setVideoBitrate(videoBitrate);
+                }
+            }
+        }
+
+        sp<RepeaterSource> repeaterSource = videoTrack->repeaterSource();
+        if (repeaterSource != NULL) {
+            double rateHz =
+                Converter::GetInt32Property(
+                        "media.wfd.video-framerate", -1);
+
+            char val[PROPERTY_VALUE_MAX];
+            if (rateHz < 0.0
+                    && property_get("media.wfd.video-framerate", val, NULL)
+                    && !strcasecmp("adaptive", val)) {
+                 rateHz = repeaterSource->getFrameRate();
+
+                if (avgLatencyUs > 300000ll) {
+                    rateHz *= 0.9;
+                } else if (avgLatencyUs < 200000ll) {
+                    rateHz *= 1.1;
+                }
+            }
+
+            if (rateHz > 0) {
+                if (rateHz < 5.0) {
+                    rateHz = 5.0;
+                } else if (rateHz > 30.0) {
+                    rateHz = 30.0;
+                }
+
+                if (rateHz != repeaterSource->getFrameRate()) {
+                    ALOGI("setting frame rate to %.2f Hz", rateHz);
+
+                    repeaterSource->setFrameRate(rateHz);
+                }
+            }
+        }
+    }
+}
+
+status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
+        bool enableAudio, bool enableVideo) {
+    mExtractor = new NuMediaExtractor;
+
+    status_t err = mExtractor->setDataSource(
+            NULL /* httpService */, mMediaPath.c_str());
+
+    if (err != OK) {
+        return err;
+    }
+
+    size_t n = mExtractor->countTracks();
+    bool haveAudio = false;
+    bool haveVideo = false;
+    for (size_t i = 0; i < n; ++i) {
+        sp<AMessage> format;
+        err = mExtractor->getTrackFormat(i, &format);
+
+        if (err != OK) {
+            continue;
+        }
+
+        AString mime;
+        CHECK(format->findString("mime", &mime));
+
+        bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
+        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
+
+        if (isAudio && enableAudio && !haveAudio) {
+            haveAudio = true;
+        } else if (isVideo && enableVideo && !haveVideo) {
+            haveVideo = true;
+        } else {
+            continue;
+        }
+
+        err = mExtractor->selectTrack(i);
+
+        size_t trackIndex = mTracks.size();
+
+        sp<AMessage> notify = new AMessage(kWhatTrackNotify, this);
+        notify->setSize("trackIndex", trackIndex);
+
+        sp<Track> track = new Track(notify, format);
+        looper()->registerHandler(track);
+
+        mTracks.add(trackIndex, track);
+
+        mExtractorTrackToInternalTrack.add(i, trackIndex);
+
+        if (isVideo) {
+            mVideoTrackIndex = trackIndex;
+        }
+
+        uint32_t flags = MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
+
+        ssize_t mediaSenderTrackIndex =
+            mMediaSender->addTrack(format, flags);
+        CHECK_GE(mediaSenderTrackIndex, 0);
+
+        track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
+
+        if ((haveAudio || !enableAudio) && (haveVideo || !enableVideo)) {
+            break;
+        }
+    }
+
+    return OK;
+}
+
+void WifiDisplaySource::PlaybackSession::schedulePullExtractor() {
+    if (mPullExtractorPending) {
+        return;
+    }
+
+    int64_t delayUs = 1000000; // default delay is 1 sec
+    int64_t sampleTimeUs;
+    status_t err = mExtractor->getSampleTime(&sampleTimeUs);
+
+    if (err == OK) {
+        int64_t nowUs = ALooper::GetNowUs();
+
+        if (mFirstSampleTimeRealUs < 0ll) {
+            mFirstSampleTimeRealUs = nowUs;
+            mFirstSampleTimeUs = sampleTimeUs;
+        }
+
+        int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs;
+        delayUs = whenUs - nowUs;
+    } else {
+        ALOGW("could not get sample time (%d)", err);
+    }
+
+    sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, this);
+    msg->setInt32("generation", mPullExtractorGeneration);
+    msg->post(delayUs);
+
+    mPullExtractorPending = true;
+}
+
+void WifiDisplaySource::PlaybackSession::onPullExtractor() {
+    sp<ABuffer> accessUnit = new ABuffer(1024 * 1024);
+    status_t err = mExtractor->readSampleData(accessUnit);
+    if (err != OK) {
+        // EOS.
+        return;
+    }
+
+    int64_t timeUs;
+    CHECK_EQ((status_t)OK, mExtractor->getSampleTime(&timeUs));
+
+    accessUnit->meta()->setInt64(
+            "timeUs", mFirstSampleTimeRealUs + timeUs - mFirstSampleTimeUs);
+
+    size_t trackIndex;
+    CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex));
+
+    sp<AMessage> msg = new AMessage(kWhatConverterNotify, this);
+
+    msg->setSize(
+            "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex));
+
+    msg->setInt32("what", Converter::kWhatAccessUnit);
+    msg->setBuffer("accessUnit", accessUnit);
+    msg->post();
+
+    mExtractor->advance();
+
+    schedulePullExtractor();
+}
+
+status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
+        bool enableAudio,
+        bool usePCMAudio,
+        bool enableVideo,
+        VideoFormats::ResolutionType videoResolutionType,
+        size_t videoResolutionIndex,
+        VideoFormats::ProfileType videoProfileType,
+        VideoFormats::LevelType videoLevelType) {
+    CHECK(enableAudio || enableVideo);
+
+    if (!mMediaPath.empty()) {
+        return setupMediaPacketizer(enableAudio, enableVideo);
+    }
+
+    if (enableVideo) {
+        status_t err = addVideoSource(
+                videoResolutionType, videoResolutionIndex, videoProfileType,
+                videoLevelType);
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    if (!enableAudio) {
+        return OK;
+    }
+
+    return addAudioSource(usePCMAudio);
+}
+
+status_t WifiDisplaySource::PlaybackSession::addSource(
+        bool isVideo, const sp<MediaSource> &source, bool isRepeaterSource,
+        bool usePCMAudio, unsigned profileIdc, unsigned levelIdc,
+        unsigned constraintSet, size_t *numInputBuffers) {
+    CHECK(!usePCMAudio || !isVideo);
+    CHECK(!isRepeaterSource || isVideo);
+    CHECK(!profileIdc || isVideo);
+    CHECK(!levelIdc || isVideo);
+    CHECK(!constraintSet || isVideo);
+
+    sp<ALooper> pullLooper = new ALooper;
+    pullLooper->setName("pull_looper");
+
+    pullLooper->start(
+            false /* runOnCallingThread */,
+            false /* canCallJava */,
+            PRIORITY_AUDIO);
+
+    sp<ALooper> codecLooper = new ALooper;
+    codecLooper->setName("codec_looper");
+
+    codecLooper->start(
+            false /* runOnCallingThread */,
+            false /* canCallJava */,
+            PRIORITY_AUDIO);
+
+    size_t trackIndex;
+
+    sp<AMessage> notify;
+
+    trackIndex = mTracks.size();
+
+    sp<AMessage> format;
+    status_t err = convertMetaDataToMessage(source->getFormat(), &format);
+    CHECK_EQ(err, (status_t)OK);
+
+    if (isVideo) {
+        format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
+        format->setInt32(
+                "android._input-metadata-buffer-type", kMetadataBufferTypeANWBuffer);
+        format->setInt32("android._store-metadata-in-buffers-output", (mHDCP != NULL)
+                && (mHDCP->getCaps() & HDCPModule::HDCP_CAPS_ENCRYPT_NATIVE));
+        format->setInt32(
+                "color-format", OMX_COLOR_FormatAndroidOpaque);
+        format->setInt32("profile-idc", profileIdc);
+        format->setInt32("level-idc", levelIdc);
+        format->setInt32("constraint-set", constraintSet);
+    } else {
+        if (usePCMAudio) {
+            format->setInt32("pcm-encoding", kAudioEncodingPcm16bit);
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+        } else {
+            format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+        }
+    }
+
+    notify = new AMessage(kWhatConverterNotify, this);
+    notify->setSize("trackIndex", trackIndex);
+
+    sp<Converter> converter = new Converter(notify, codecLooper, format);
+
+    looper()->registerHandler(converter);
+
+    err = converter->init();
+    if (err != OK) {
+        ALOGE("%s converter returned err %d", isVideo ? "video" : "audio", err);
+
+        looper()->unregisterHandler(converter->id());
+        return err;
+    }
+
+    notify = new AMessage(Converter::kWhatMediaPullerNotify, converter);
+    notify->setSize("trackIndex", trackIndex);
+
+    sp<MediaPuller> puller = new MediaPuller(source, notify);
+    pullLooper->registerHandler(puller);
+
+    if (numInputBuffers != NULL) {
+        *numInputBuffers = converter->getInputBufferCount();
+    }
+
+    notify = new AMessage(kWhatTrackNotify, this);
+    notify->setSize("trackIndex", trackIndex);
+
+    sp<Track> track = new Track(
+            notify, pullLooper, codecLooper, puller, converter);
+
+    if (isRepeaterSource) {
+        track->setRepeaterSource(static_cast<RepeaterSource *>(source.get()));
+    }
+
+    looper()->registerHandler(track);
+
+    mTracks.add(trackIndex, track);
+
+    if (isVideo) {
+        mVideoTrackIndex = trackIndex;
+    }
+
+    uint32_t flags = 0;
+    if (converter->needToManuallyPrependSPSPPS()) {
+        flags |= MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
+    }
+
+    ssize_t mediaSenderTrackIndex =
+        mMediaSender->addTrack(converter->getOutputFormat(), flags);
+    CHECK_GE(mediaSenderTrackIndex, 0);
+
+    track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
+
+    return OK;
+}
+
+status_t WifiDisplaySource::PlaybackSession::addVideoSource(
+        VideoFormats::ResolutionType videoResolutionType,
+        size_t videoResolutionIndex,
+        VideoFormats::ProfileType videoProfileType,
+        VideoFormats::LevelType videoLevelType) {
+    size_t width, height, framesPerSecond;
+    bool interlaced;
+    CHECK(VideoFormats::GetConfiguration(
+                videoResolutionType,
+                videoResolutionIndex,
+                &width,
+                &height,
+                &framesPerSecond,
+                &interlaced));
+
+    unsigned profileIdc, levelIdc, constraintSet;
+    CHECK(VideoFormats::GetProfileLevel(
+                videoProfileType,
+                videoLevelType,
+                &profileIdc,
+                &levelIdc,
+                &constraintSet));
+
+    sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height);
+
+    source->setUseAbsoluteTimestamps();
+
+    sp<RepeaterSource> videoSource =
+        new RepeaterSource(source, framesPerSecond);
+
+    size_t numInputBuffers;
+    status_t err = addSource(
+            true /* isVideo */, videoSource, true /* isRepeaterSource */,
+            false /* usePCMAudio */, profileIdc, levelIdc, constraintSet,
+            &numInputBuffers);
+
+    if (err != OK) {
+        return err;
+    }
+
+    err = source->setMaxAcquiredBufferCount(numInputBuffers);
+    CHECK_EQ(err, (status_t)OK);
+
+    mProducer = source->getProducer();
+
+    return OK;
+}
+
+status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
+    sp<AudioSource> audioSource = new AudioSource(
+            AUDIO_SOURCE_REMOTE_SUBMIX,
+            mOpPackageName,
+            48000 /* sampleRate */,
+            2 /* channelCount */);
+
+    if (audioSource->initCheck() == OK) {
+        return addSource(
+                false /* isVideo */, audioSource, false /* isRepeaterSource */,
+                usePCMAudio, 0 /* profileIdc */, 0 /* levelIdc */,
+                0 /* constraintSet */, NULL /* numInputBuffers */);
+    }
+
+    ALOGW("Unable to instantiate audio source");
+
+    return OK;
+}
+
+sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
+    return mProducer;
+}
+
+void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        const sp<Track> &track = mTracks.valueAt(i);
+
+        track->requestIDRFrame();
+    }
+}
+
+void WifiDisplaySource::PlaybackSession::notifySessionDead() {
+    // Inform WifiDisplaySource of our premature death (wish).
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("what", kWhatSessionDead);
+    notify->post();
+
+    mWeAreDead = true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
new file mode 100644
index 0000000..f6673df
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.h
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PLAYBACK_SESSION_H_
+
+#define PLAYBACK_SESSION_H_
+
+#include "MediaSender.h"
+#include "VideoFormats.h"
+#include "WifiDisplaySource.h"
+
+#include <utils/String16.h>
+
+namespace android {
+
+struct ABuffer;
+struct IHDCP;
+class IGraphicBufferProducer;
+struct MediaPuller;
+struct MediaSource;
+struct MediaSender;
+struct NuMediaExtractor;
+
+// Encapsulates the state of an RTP/RTCP session in the context of wifi
+// display.
+struct WifiDisplaySource::PlaybackSession : public AHandler {
+    PlaybackSession(
+            const String16 &opPackageName,
+            const sp<ANetworkSession> &netSession,
+            const sp<AMessage> &notify,
+            const struct in_addr &interfaceAddr,
+            const sp<IHDCP> &hdcp,
+            const char *path = NULL);
+
+    status_t init(
+            const char *clientIP,
+            int32_t clientRtp,
+            RTPSender::TransportMode rtpMode,
+            int32_t clientRtcp,
+            RTPSender::TransportMode rtcpMode,
+            bool enableAudio,
+            bool usePCMAudio,
+            bool enableVideo,
+            VideoFormats::ResolutionType videoResolutionType,
+            size_t videoResolutionIndex,
+            VideoFormats::ProfileType videoProfileType,
+            VideoFormats::LevelType videoLevelType);
+
+    void destroyAsync();
+
+    int32_t getRTPPort() const;
+
+    int64_t getLastLifesignUs() const;
+    void updateLiveness();
+
+    status_t play();
+    status_t finishPlay();
+    status_t pause();
+
+    sp<IGraphicBufferProducer> getSurfaceTexture();
+
+    void requestIDRFrame();
+
+    enum {
+        kWhatSessionDead,
+        kWhatBinaryData,
+        kWhatSessionEstablished,
+        kWhatSessionDestroyed,
+    };
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual ~PlaybackSession();
+
+private:
+    struct Track;
+
+    enum {
+        kWhatMediaPullerNotify,
+        kWhatConverterNotify,
+        kWhatTrackNotify,
+        kWhatUpdateSurface,
+        kWhatPause,
+        kWhatResume,
+        kWhatMediaSenderNotify,
+        kWhatPullExtractorSample,
+    };
+
+    String16 mOpPackageName;
+
+    sp<ANetworkSession> mNetSession;
+    sp<AMessage> mNotify;
+    in_addr mInterfaceAddr;
+    sp<IHDCP> mHDCP;
+    AString mMediaPath;
+
+    sp<MediaSender> mMediaSender;
+    int32_t mLocalRTPPort;
+
+    bool mWeAreDead;
+    bool mPaused;
+
+    int64_t mLastLifesignUs;
+
+    sp<IGraphicBufferProducer> mProducer;
+
+    KeyedVector<size_t, sp<Track> > mTracks;
+    ssize_t mVideoTrackIndex;
+
+    int64_t mPrevTimeUs;
+
+    sp<NuMediaExtractor> mExtractor;
+    KeyedVector<size_t, size_t> mExtractorTrackToInternalTrack;
+    bool mPullExtractorPending;
+    int32_t mPullExtractorGeneration;
+    int64_t mFirstSampleTimeRealUs;
+    int64_t mFirstSampleTimeUs;
+
+    status_t setupMediaPacketizer(bool enableAudio, bool enableVideo);
+
+    status_t setupPacketizer(
+            bool enableAudio,
+            bool usePCMAudio,
+            bool enableVideo,
+            VideoFormats::ResolutionType videoResolutionType,
+            size_t videoResolutionIndex,
+            VideoFormats::ProfileType videoProfileType,
+            VideoFormats::LevelType videoLevelType);
+
+    status_t addSource(
+            bool isVideo,
+            const sp<MediaSource> &source,
+            bool isRepeaterSource,
+            bool usePCMAudio,
+            unsigned profileIdc,
+            unsigned levelIdc,
+            unsigned contraintSet,
+            size_t *numInputBuffers);
+
+    status_t addVideoSource(
+            VideoFormats::ResolutionType videoResolutionType,
+            size_t videoResolutionIndex,
+            VideoFormats::ProfileType videoProfileType,
+            VideoFormats::LevelType videoLevelType);
+
+    status_t addAudioSource(bool usePCMAudio);
+
+    status_t onMediaSenderInitialized();
+
+    void notifySessionDead();
+
+    void schedulePullExtractor();
+    void onPullExtractor();
+
+    void onSinkFeedback(const sp<AMessage> &msg);
+
+    DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession);
+};
+
+}  // namespace android
+
+#endif  // PLAYBACK_SESSION_H_
+
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
new file mode 100644
index 0000000..e225a02
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
@@ -0,0 +1,219 @@
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RepeaterSource"
+#include <utils/Log.h>
+
+#include "RepeaterSource.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBufferBase.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+RepeaterSource::RepeaterSource(const sp<MediaSource> &source, double rateHz)
+    : mStarted(false),
+      mSource(source),
+      mRateHz(rateHz),
+      mBuffer(NULL),
+      mResult(OK),
+      mLastBufferUpdateUs(-1ll),
+      mStartTimeUs(-1ll),
+      mFrameCount(0) {
+}
+
+RepeaterSource::~RepeaterSource() {
+    CHECK(!mStarted);
+}
+
+double RepeaterSource::getFrameRate() const {
+    return mRateHz;
+}
+
+void RepeaterSource::setFrameRate(double rateHz) {
+    Mutex::Autolock autoLock(mLock);
+
+    if (rateHz == mRateHz) {
+        return;
+    }
+
+    if (mStartTimeUs >= 0ll) {
+        int64_t nextTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
+        mStartTimeUs = nextTimeUs;
+        mFrameCount = 0;
+    }
+    mRateHz = rateHz;
+}
+
+status_t RepeaterSource::start(MetaData *params) {
+    CHECK(!mStarted);
+
+    status_t err = mSource->start(params);
+
+    if (err != OK) {
+        return err;
+    }
+
+    mBuffer = NULL;
+    mResult = OK;
+    mStartTimeUs = -1ll;
+    mFrameCount = 0;
+
+    mLooper = new ALooper;
+    mLooper->setName("repeater_looper");
+    mLooper->start();
+
+    mReflector = new AHandlerReflector<RepeaterSource>(this);
+    mLooper->registerHandler(mReflector);
+
+    postRead();
+
+    mStarted = true;
+
+    return OK;
+}
+
+status_t RepeaterSource::stop() {
+    CHECK(mStarted);
+
+    ALOGV("stopping");
+
+    status_t err = mSource->stop();
+
+    if (mLooper != NULL) {
+        mLooper->stop();
+        mLooper.clear();
+
+        mReflector.clear();
+    }
+
+    if (mBuffer != NULL) {
+        ALOGV("releasing mbuf %p", mBuffer);
+        mBuffer->release();
+        mBuffer = NULL;
+    }
+
+
+    ALOGV("stopped");
+
+    mStarted = false;
+
+    return err;
+}
+
+sp<MetaData> RepeaterSource::getFormat() {
+    return mSource->getFormat();
+}
+
+status_t RepeaterSource::read(
+        MediaBufferBase **buffer, const ReadOptions *options) {
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode seekMode;
+    CHECK(options == NULL || !options->getSeekTo(&seekTimeUs, &seekMode));
+
+    for (;;) {
+        int64_t bufferTimeUs = -1ll;
+
+        if (mStartTimeUs < 0ll) {
+            Mutex::Autolock autoLock(mLock);
+            while ((mLastBufferUpdateUs < 0ll || mBuffer == NULL)
+                    && mResult == OK) {
+                mCondition.wait(mLock);
+            }
+
+            ALOGV("now resuming.");
+            mStartTimeUs = ALooper::GetNowUs();
+            bufferTimeUs = mStartTimeUs;
+        } else {
+            bufferTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
+
+            int64_t nowUs = ALooper::GetNowUs();
+            int64_t delayUs = bufferTimeUs - nowUs;
+
+            if (delayUs > 0ll) {
+                usleep(delayUs);
+            }
+        }
+
+        bool stale = false;
+
+        {
+            Mutex::Autolock autoLock(mLock);
+            if (mResult != OK) {
+                CHECK(mBuffer == NULL);
+                return mResult;
+            }
+
+#if SUSPEND_VIDEO_IF_IDLE
+            int64_t nowUs = ALooper::GetNowUs();
+            if (nowUs - mLastBufferUpdateUs > 1000000ll) {
+                mLastBufferUpdateUs = -1ll;
+                stale = true;
+            } else
+#endif
+            {
+                mBuffer->add_ref();
+                *buffer = mBuffer;
+                (*buffer)->meta_data().setInt64(kKeyTime, bufferTimeUs);
+                ++mFrameCount;
+            }
+        }
+
+        if (!stale) {
+            break;
+        }
+
+        mStartTimeUs = -1ll;
+        mFrameCount = 0;
+        ALOGV("now dormant");
+    }
+
+    return OK;
+}
+
+void RepeaterSource::postRead() {
+    (new AMessage(kWhatRead, mReflector))->post();
+}
+
+void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatRead:
+        {
+            MediaBufferBase *buffer;
+            status_t err = mSource->read(&buffer);
+
+            ALOGV("read mbuf %p", buffer);
+
+            Mutex::Autolock autoLock(mLock);
+            if (mBuffer != NULL) {
+                mBuffer->release();
+                mBuffer = NULL;
+            }
+            mBuffer = buffer;
+            mResult = err;
+            mLastBufferUpdateUs = ALooper::GetNowUs();
+
+            mCondition.broadcast();
+
+            if (err == OK) {
+                postRead();
+            }
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void RepeaterSource::wakeUp() {
+    ALOGV("wakeUp");
+    Mutex::Autolock autoLock(mLock);
+    if (mLastBufferUpdateUs < 0ll && mBuffer != NULL) {
+        mLastBufferUpdateUs = ALooper::GetNowUs();
+        mCondition.broadcast();
+    }
+}
+
+}  // namespace android
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h
new file mode 100644
index 0000000..c1cb633
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.h
@@ -0,0 +1,67 @@
+#ifndef REPEATER_SOURCE_H_
+
+#define REPEATER_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandlerReflector.h>
+#include <media/MediaSource.h>
+
+#define SUSPEND_VIDEO_IF_IDLE   0
+
+namespace android {
+
+// This MediaSource delivers frames at a constant rate by repeating buffers
+// if necessary.
+struct RepeaterSource : public MediaSource {
+    RepeaterSource(const sp<MediaSource> &source, double rateHz);
+
+    virtual status_t start(MetaData *params);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBufferBase **buffer, const ReadOptions *options);
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+    // If RepeaterSource is currently dormant, because SurfaceFlinger didn't
+    // send updates in a while, this is its wakeup call.
+    void wakeUp();
+
+    double getFrameRate() const;
+    void setFrameRate(double rateHz);
+
+protected:
+    virtual ~RepeaterSource();
+
+private:
+    enum {
+        kWhatRead,
+    };
+
+    Mutex mLock;
+    Condition mCondition;
+
+    bool mStarted;
+
+    sp<MediaSource> mSource;
+    double mRateHz;
+
+    sp<ALooper> mLooper;
+    sp<AHandlerReflector<RepeaterSource> > mReflector;
+
+    MediaBufferBase *mBuffer;
+    status_t mResult;
+    int64_t mLastBufferUpdateUs;
+
+    int64_t mStartTimeUs;
+    int32_t mFrameCount;
+
+    void postRead();
+
+    DISALLOW_EVIL_CONSTRUCTORS(RepeaterSource);
+};
+
+}  // namespace android
+
+#endif // REPEATER_SOURCE_H_
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
new file mode 100644
index 0000000..9791ed7
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
@@ -0,0 +1,1055 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "TSPacketizer"
+#include <utils/Log.h>
+
+#include "TSPacketizer.h"
+#include <media/stagefright/foundation/avc_utils.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <arpa/inet.h>
+
+namespace android {
+
+struct TSPacketizer::Track : public RefBase {
+    Track(const sp<AMessage> &format,
+          unsigned PID, unsigned streamType, unsigned streamID);
+
+    unsigned PID() const;
+    unsigned streamType() const;
+    unsigned streamID() const;
+
+    // Returns the previous value.
+    unsigned incrementContinuityCounter();
+
+    bool isAudio() const;
+    bool isVideo() const;
+
+    bool isH264() const;
+    bool isAAC() const;
+    bool lacksADTSHeader() const;
+    bool isPCMAudio() const;
+
+    sp<ABuffer> prependCSD(const sp<ABuffer> &accessUnit) const;
+    sp<ABuffer> prependADTSHeader(const sp<ABuffer> &accessUnit) const;
+
+    size_t countDescriptors() const;
+    sp<ABuffer> descriptorAt(size_t index) const;
+
+    void finalize();
+    void extractCSDIfNecessary();
+
+protected:
+    virtual ~Track();
+
+private:
+    sp<AMessage> mFormat;
+
+    unsigned mPID;
+    unsigned mStreamType;
+    unsigned mStreamID;
+    unsigned mContinuityCounter;
+
+    AString mMIME;
+    Vector<sp<ABuffer> > mCSD;
+
+    Vector<sp<ABuffer> > mDescriptors;
+
+    bool mAudioLacksATDSHeaders;
+    bool mFinalized;
+    bool mExtractedCSD;
+
+    DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+TSPacketizer::Track::Track(
+        const sp<AMessage> &format,
+        unsigned PID, unsigned streamType, unsigned streamID)
+    : mFormat(format),
+      mPID(PID),
+      mStreamType(streamType),
+      mStreamID(streamID),
+      mContinuityCounter(0),
+      mAudioLacksATDSHeaders(false),
+      mFinalized(false),
+      mExtractedCSD(false) {
+    CHECK(format->findString("mime", &mMIME));
+}
+
+void TSPacketizer::Track::extractCSDIfNecessary() {
+    if (mExtractedCSD) {
+        return;
+    }
+
+    if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)
+            || !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+        for (size_t i = 0;; ++i) {
+            sp<ABuffer> csd;
+            if (!mFormat->findBuffer(AStringPrintf("csd-%d", i).c_str(), &csd)) {
+                break;
+            }
+
+            mCSD.push(csd);
+        }
+
+        if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+            int32_t isADTS;
+            if (!mFormat->findInt32("is-adts", &isADTS) || isADTS == 0) {
+                mAudioLacksATDSHeaders = true;
+            }
+        }
+    }
+
+    mExtractedCSD = true;
+}
+
+TSPacketizer::Track::~Track() {
+}
+
+unsigned TSPacketizer::Track::PID() const {
+    return mPID;
+}
+
+unsigned TSPacketizer::Track::streamType() const {
+    return mStreamType;
+}
+
+unsigned TSPacketizer::Track::streamID() const {
+    return mStreamID;
+}
+
+unsigned TSPacketizer::Track::incrementContinuityCounter() {
+    unsigned prevCounter = mContinuityCounter;
+
+    if (++mContinuityCounter == 16) {
+        mContinuityCounter = 0;
+    }
+
+    return prevCounter;
+}
+
+bool TSPacketizer::Track::isAudio() const {
+    return !strncasecmp("audio/", mMIME.c_str(), 6);
+}
+
+bool TSPacketizer::Track::isVideo() const {
+    return !strncasecmp("video/", mMIME.c_str(), 6);
+}
+
+bool TSPacketizer::Track::isH264() const {
+    return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC);
+}
+
+bool TSPacketizer::Track::isAAC() const {
+    return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC);
+}
+
+bool TSPacketizer::Track::isPCMAudio() const {
+    return !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_RAW);
+}
+
+bool TSPacketizer::Track::lacksADTSHeader() const {
+    return mAudioLacksATDSHeaders;
+}
+
+sp<ABuffer> TSPacketizer::Track::prependCSD(
+        const sp<ABuffer> &accessUnit) const {
+    size_t size = 0;
+    for (size_t i = 0; i < mCSD.size(); ++i) {
+        size += mCSD.itemAt(i)->size();
+    }
+
+    sp<ABuffer> dup = new ABuffer(accessUnit->size() + size);
+    size_t offset = 0;
+    for (size_t i = 0; i < mCSD.size(); ++i) {
+        const sp<ABuffer> &csd = mCSD.itemAt(i);
+
+        memcpy(dup->data() + offset, csd->data(), csd->size());
+        offset += csd->size();
+    }
+
+    memcpy(dup->data() + offset, accessUnit->data(), accessUnit->size());
+
+    return dup;
+}
+
+sp<ABuffer> TSPacketizer::Track::prependADTSHeader(
+        const sp<ABuffer> &accessUnit) const {
+    CHECK_EQ(mCSD.size(), 1u);
+
+    const uint8_t *codec_specific_data = mCSD.itemAt(0)->data();
+
+    const uint32_t aac_frame_length = accessUnit->size() + 7;
+
+    sp<ABuffer> dup = new ABuffer(aac_frame_length);
+
+    unsigned profile = (codec_specific_data[0] >> 3) - 1;
+
+    unsigned sampling_freq_index =
+        ((codec_specific_data[0] & 7) << 1)
+        | (codec_specific_data[1] >> 7);
+
+    unsigned channel_configuration =
+        (codec_specific_data[1] >> 3) & 0x0f;
+
+    uint8_t *ptr = dup->data();
+
+    *ptr++ = 0xff;
+    *ptr++ = 0xf9;  // b11111001, ID=1(MPEG-2), layer=0, protection_absent=1
+
+    *ptr++ =
+        profile << 6
+        | sampling_freq_index << 2
+        | ((channel_configuration >> 2) & 1);  // private_bit=0
+
+    // original_copy=0, home=0, copyright_id_bit=0, copyright_id_start=0
+    *ptr++ =
+        (channel_configuration & 3) << 6
+        | aac_frame_length >> 11;
+    *ptr++ = (aac_frame_length >> 3) & 0xff;
+    *ptr++ = (aac_frame_length & 7) << 5;
+
+    // adts_buffer_fullness=0, number_of_raw_data_blocks_in_frame=0
+    *ptr++ = 0;
+
+    memcpy(ptr, accessUnit->data(), accessUnit->size());
+
+    return dup;
+}
+
+size_t TSPacketizer::Track::countDescriptors() const {
+    return mDescriptors.size();
+}
+
+sp<ABuffer> TSPacketizer::Track::descriptorAt(size_t index) const {
+    CHECK_LT(index, mDescriptors.size());
+    return mDescriptors.itemAt(index);
+}
+
+void TSPacketizer::Track::finalize() {
+    if (mFinalized) {
+        return;
+    }
+
+    if (isH264()) {
+        {
+            // AVC video descriptor (40)
+
+            sp<ABuffer> descriptor = new ABuffer(6);
+            uint8_t *data = descriptor->data();
+            data[0] = 40;  // descriptor_tag
+            data[1] = 4;  // descriptor_length
+
+            if (mCSD.size() > 0) {
+                CHECK_GE(mCSD.size(), 1u);
+                const sp<ABuffer> &sps = mCSD.itemAt(0);
+                CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4));
+                CHECK_GE(sps->size(), 7u);
+                // profile_idc, constraint_set*, level_idc
+                memcpy(&data[2], sps->data() + 4, 3);
+            } else {
+                int32_t profileIdc, levelIdc, constraintSet;
+                CHECK(mFormat->findInt32("profile-idc", &profileIdc));
+                CHECK(mFormat->findInt32("level-idc", &levelIdc));
+                CHECK(mFormat->findInt32("constraint-set", &constraintSet));
+                CHECK_GE(profileIdc, 0);
+                CHECK_GE(levelIdc, 0);
+                data[2] = profileIdc;    // profile_idc
+                data[3] = constraintSet; // constraint_set*
+                data[4] = levelIdc;      // level_idc
+            }
+
+            // AVC_still_present=0, AVC_24_hour_picture_flag=0, reserved
+            data[5] = 0x3f;
+
+            mDescriptors.push_back(descriptor);
+        }
+
+        {
+            // AVC timing and HRD descriptor (42)
+
+            sp<ABuffer> descriptor = new ABuffer(4);
+            uint8_t *data = descriptor->data();
+            data[0] = 42;  // descriptor_tag
+            data[1] = 2;  // descriptor_length
+
+            // hrd_management_valid_flag = 0
+            // reserved = 111111b
+            // picture_and_timing_info_present = 0
+
+            data[2] = 0x7e;
+
+            // fixed_frame_rate_flag = 0
+            // temporal_poc_flag = 0
+            // picture_to_display_conversion_flag = 0
+            // reserved = 11111b
+            data[3] = 0x1f;
+
+            mDescriptors.push_back(descriptor);
+        }
+    } else if (isPCMAudio()) {
+        // LPCM audio stream descriptor (0x83)
+
+        int32_t channelCount;
+        CHECK(mFormat->findInt32("channel-count", &channelCount));
+        CHECK_EQ(channelCount, 2);
+
+        int32_t sampleRate;
+        CHECK(mFormat->findInt32("sample-rate", &sampleRate));
+        CHECK(sampleRate == 44100 || sampleRate == 48000);
+
+        sp<ABuffer> descriptor = new ABuffer(4);
+        uint8_t *data = descriptor->data();
+        data[0] = 0x83;  // descriptor_tag
+        data[1] = 2;  // descriptor_length
+
+        unsigned sampling_frequency = (sampleRate == 44100) ? 1 : 2;
+
+        data[2] = (sampling_frequency << 5)
+                    | (3 /* reserved */ << 1)
+                    | 0 /* emphasis_flag */;
+
+        data[3] =
+            (1 /* number_of_channels = stereo */ << 5)
+            | 0xf /* reserved */;
+
+        mDescriptors.push_back(descriptor);
+    }
+
+    mFinalized = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TSPacketizer::TSPacketizer(uint32_t flags)
+    : mFlags(flags),
+      mPATContinuityCounter(0),
+      mPMTContinuityCounter(0) {
+    initCrcTable();
+
+    if (flags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)) {
+        int32_t hdcpVersion;
+        if (flags & EMIT_HDCP20_DESCRIPTOR) {
+            CHECK(!(flags & EMIT_HDCP21_DESCRIPTOR));
+            hdcpVersion = 0x20;
+        } else {
+            CHECK(!(flags & EMIT_HDCP20_DESCRIPTOR));
+
+            // HDCP2.0 _and_ HDCP 2.1 specs say to set the version
+            // inside the HDCP descriptor to 0x20!!!
+            hdcpVersion = 0x20;
+        }
+
+        // HDCP descriptor
+        sp<ABuffer> descriptor = new ABuffer(7);
+        uint8_t *data = descriptor->data();
+        data[0] = 0x05;  // descriptor_tag
+        data[1] = 5;  // descriptor_length
+        data[2] = 'H';
+        data[3] = 'D';
+        data[4] = 'C';
+        data[5] = 'P';
+        data[6] = hdcpVersion;
+
+        mProgramInfoDescriptors.push_back(descriptor);
+    }
+}
+
+TSPacketizer::~TSPacketizer() {
+}
+
+ssize_t TSPacketizer::addTrack(const sp<AMessage> &format) {
+    AString mime;
+    CHECK(format->findString("mime", &mime));
+
+    unsigned PIDStart;
+    bool isVideo = !strncasecmp("video/", mime.c_str(), 6);
+    bool isAudio = !strncasecmp("audio/", mime.c_str(), 6);
+
+    if (isVideo) {
+        PIDStart = 0x1011;
+    } else if (isAudio) {
+        PIDStart = 0x1100;
+    } else {
+        return ERROR_UNSUPPORTED;
+    }
+
+    unsigned streamType;
+    unsigned streamIDStart;
+    unsigned streamIDStop;
+
+    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+        streamType = 0x1b;
+        streamIDStart = 0xe0;
+        streamIDStop = 0xef;
+    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+        streamType = 0x0f;
+        streamIDStart = 0xc0;
+        streamIDStop = 0xdf;
+    } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_RAW)) {
+        streamType = 0x83;
+        streamIDStart = 0xbd;
+        streamIDStop = 0xbd;
+    } else {
+        return ERROR_UNSUPPORTED;
+    }
+
+    size_t numTracksOfThisType = 0;
+    unsigned PID = PIDStart;
+
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        const sp<Track> &track = mTracks.itemAt(i);
+
+        if (track->streamType() == streamType) {
+            ++numTracksOfThisType;
+        }
+
+        if ((isAudio && track->isAudio()) || (isVideo && track->isVideo())) {
+            ++PID;
+        }
+    }
+
+    unsigned streamID = streamIDStart + numTracksOfThisType;
+    if (streamID > streamIDStop) {
+        return -ERANGE;
+    }
+
+    sp<Track> track = new Track(format, PID, streamType, streamID);
+    return mTracks.add(track);
+}
+
+status_t TSPacketizer::extractCSDIfNecessary(size_t trackIndex) {
+    if (trackIndex >= mTracks.size()) {
+        return -ERANGE;
+    }
+
+    const sp<Track> &track = mTracks.itemAt(trackIndex);
+    track->extractCSDIfNecessary();
+
+    return OK;
+}
+
+status_t TSPacketizer::packetize(
+        size_t trackIndex,
+        const sp<ABuffer> &_accessUnit,
+        sp<ABuffer> *packets,
+        uint32_t flags,
+        const uint8_t *PES_private_data, size_t PES_private_data_len,
+        size_t numStuffingBytes) {
+    sp<ABuffer> accessUnit = _accessUnit;
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    packets->clear();
+
+    if (trackIndex >= mTracks.size()) {
+        return -ERANGE;
+    }
+
+    const sp<Track> &track = mTracks.itemAt(trackIndex);
+
+    if (track->isH264() && (flags & PREPEND_SPS_PPS_TO_IDR_FRAMES)
+            && IsIDR(accessUnit->data(), accessUnit->size())) {
+        // prepend codec specific data, i.e. SPS and PPS.
+        accessUnit = track->prependCSD(accessUnit);
+    } else if (track->isAAC() && track->lacksADTSHeader()) {
+        CHECK(!(flags & IS_ENCRYPTED));
+        accessUnit = track->prependADTSHeader(accessUnit);
+    }
+
+    // 0x47
+    // transport_error_indicator = b0
+    // payload_unit_start_indicator = b1
+    // transport_priority = b0
+    // PID
+    // transport_scrambling_control = b00
+    // adaptation_field_control = b??
+    // continuity_counter = b????
+    // -- payload follows
+    // packet_startcode_prefix = 0x000001
+    // stream_id
+    // PES_packet_length = 0x????
+    // reserved = b10
+    // PES_scrambling_control = b00
+    // PES_priority = b0
+    // data_alignment_indicator = b1
+    // copyright = b0
+    // original_or_copy = b0
+    // PTS_DTS_flags = b10  (PTS only)
+    // ESCR_flag = b0
+    // ES_rate_flag = b0
+    // DSM_trick_mode_flag = b0
+    // additional_copy_info_flag = b0
+    // PES_CRC_flag = b0
+    // PES_extension_flag = b0
+    // PES_header_data_length = 0x05
+    // reserved = b0010 (PTS)
+    // PTS[32..30] = b???
+    // reserved = b1
+    // PTS[29..15] = b??? ???? ???? ???? (15 bits)
+    // reserved = b1
+    // PTS[14..0] = b??? ???? ???? ???? (15 bits)
+    // reserved = b1
+    // the first fragment of "buffer" follows
+
+    // Each transport packet (except for the last one contributing to the PES
+    // payload) must contain a multiple of 16 bytes of payload per HDCP spec.
+    bool alignPayload =
+        (mFlags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR));
+
+    /*
+       a) The very first PES transport stream packet contains
+
+       4 bytes of TS header
+       ... padding
+       14 bytes of static PES header
+       PES_private_data_len + 1 bytes (only if PES_private_data_len > 0)
+       numStuffingBytes bytes
+
+       followed by the payload
+
+       b) Subsequent PES transport stream packets contain
+
+       4 bytes of TS header
+       ... padding
+
+       followed by the payload
+    */
+
+    size_t PES_packet_length = accessUnit->size() + 8 + numStuffingBytes;
+    if (PES_private_data_len > 0) {
+        PES_packet_length += PES_private_data_len + 1;
+    }
+
+    size_t numTSPackets = 1;
+
+    {
+        // Make sure the PES header fits into a single TS packet:
+        size_t PES_header_size = 14 + numStuffingBytes;
+        if (PES_private_data_len > 0) {
+            PES_header_size += PES_private_data_len + 1;
+        }
+
+        CHECK_LE(PES_header_size, 188u - 4u);
+
+        size_t sizeAvailableForPayload = 188 - 4 - PES_header_size;
+        size_t numBytesOfPayload = accessUnit->size();
+
+        if (numBytesOfPayload > sizeAvailableForPayload) {
+            numBytesOfPayload = sizeAvailableForPayload;
+
+            if (alignPayload && numBytesOfPayload > 16) {
+                numBytesOfPayload -= (numBytesOfPayload % 16);
+            }
+        }
+
+        size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload;
+        ALOGV("packet 1 contains %zd padding bytes and %zd bytes of payload",
+              numPaddingBytes, numBytesOfPayload);
+
+        size_t numBytesOfPayloadRemaining = accessUnit->size() - numBytesOfPayload;
+
+#if 0
+        // The following hopefully illustrates the logic that led to the
+        // more efficient computation in the #else block...
+
+        while (numBytesOfPayloadRemaining > 0) {
+            size_t sizeAvailableForPayload = 188 - 4;
+
+            size_t numBytesOfPayload = numBytesOfPayloadRemaining;
+
+            if (numBytesOfPayload > sizeAvailableForPayload) {
+                numBytesOfPayload = sizeAvailableForPayload;
+
+                if (alignPayload && numBytesOfPayload > 16) {
+                    numBytesOfPayload -= (numBytesOfPayload % 16);
+                }
+            }
+
+            size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload;
+            ALOGI("packet %zd contains %zd padding bytes and %zd bytes of payload",
+                    numTSPackets + 1, numPaddingBytes, numBytesOfPayload);
+
+            numBytesOfPayloadRemaining -= numBytesOfPayload;
+            ++numTSPackets;
+        }
+#else
+        // This is how many bytes of payload each subsequent TS packet
+        // can contain at most.
+        sizeAvailableForPayload = 188 - 4;
+        size_t sizeAvailableForAlignedPayload = sizeAvailableForPayload;
+        if (alignPayload) {
+            // We're only going to use a subset of the available space
+            // since we need to make each fragment a multiple of 16 in size.
+            sizeAvailableForAlignedPayload -=
+                (sizeAvailableForAlignedPayload % 16);
+        }
+
+        size_t numFullTSPackets =
+            numBytesOfPayloadRemaining / sizeAvailableForAlignedPayload;
+
+        numTSPackets += numFullTSPackets;
+
+        numBytesOfPayloadRemaining -=
+            numFullTSPackets * sizeAvailableForAlignedPayload;
+
+        // numBytesOfPayloadRemaining < sizeAvailableForAlignedPayload
+        if (numFullTSPackets == 0 && numBytesOfPayloadRemaining > 0) {
+            // There wasn't enough payload left to form a full aligned payload,
+            // the last packet doesn't have to be aligned.
+            ++numTSPackets;
+        } else if (numFullTSPackets > 0
+                && numBytesOfPayloadRemaining
+                    + sizeAvailableForAlignedPayload > sizeAvailableForPayload) {
+            // The last packet emitted had a full aligned payload and together
+            // with the bytes remaining does exceed the unaligned payload
+            // size, so we need another packet.
+            ++numTSPackets;
+        }
+#endif
+    }
+
+    if (flags & EMIT_PAT_AND_PMT) {
+        numTSPackets += 2;
+    }
+
+    if (flags & EMIT_PCR) {
+        ++numTSPackets;
+    }
+
+    sp<ABuffer> buffer = new ABuffer(numTSPackets * 188);
+    uint8_t *packetDataStart = buffer->data();
+
+    if (flags & EMIT_PAT_AND_PMT) {
+        // Program Association Table (PAT):
+        // 0x47
+        // transport_error_indicator = b0
+        // payload_unit_start_indicator = b1
+        // transport_priority = b0
+        // PID = b0000000000000 (13 bits)
+        // transport_scrambling_control = b00
+        // adaptation_field_control = b01 (no adaptation field, payload only)
+        // continuity_counter = b????
+        // skip = 0x00
+        // --- payload follows
+        // table_id = 0x00
+        // section_syntax_indicator = b1
+        // must_be_zero = b0
+        // reserved = b11
+        // section_length = 0x00d
+        // transport_stream_id = 0x0000
+        // reserved = b11
+        // version_number = b00001
+        // current_next_indicator = b1
+        // section_number = 0x00
+        // last_section_number = 0x00
+        //   one program follows:
+        //   program_number = 0x0001
+        //   reserved = b111
+        //   program_map_PID = kPID_PMT (13 bits!)
+        // CRC = 0x????????
+
+        if (++mPATContinuityCounter == 16) {
+            mPATContinuityCounter = 0;
+        }
+
+        uint8_t *ptr = packetDataStart;
+        *ptr++ = 0x47;
+        *ptr++ = 0x40;
+        *ptr++ = 0x00;
+        *ptr++ = 0x10 | mPATContinuityCounter;
+        *ptr++ = 0x00;
+
+        uint8_t *crcDataStart = ptr;
+        *ptr++ = 0x00;
+        *ptr++ = 0xb0;
+        *ptr++ = 0x0d;
+        *ptr++ = 0x00;
+        *ptr++ = 0x00;
+        *ptr++ = 0xc3;
+        *ptr++ = 0x00;
+        *ptr++ = 0x00;
+        *ptr++ = 0x00;
+        *ptr++ = 0x01;
+        *ptr++ = 0xe0 | (kPID_PMT >> 8);
+        *ptr++ = kPID_PMT & 0xff;
+
+        CHECK_EQ(ptr - crcDataStart, 12);
+        uint32_t crc = htonl(crc32(crcDataStart, ptr - crcDataStart));
+        memcpy(ptr, &crc, 4);
+        ptr += 4;
+
+        size_t sizeLeft = packetDataStart + 188 - ptr;
+        memset(ptr, 0xff, sizeLeft);
+
+        packetDataStart += 188;
+
+        // Program Map (PMT):
+        // 0x47
+        // transport_error_indicator = b0
+        // payload_unit_start_indicator = b1
+        // transport_priority = b0
+        // PID = kPID_PMT (13 bits)
+        // transport_scrambling_control = b00
+        // adaptation_field_control = b01 (no adaptation field, payload only)
+        // continuity_counter = b????
+        // skip = 0x00
+        // -- payload follows
+        // table_id = 0x02
+        // section_syntax_indicator = b1
+        // must_be_zero = b0
+        // reserved = b11
+        // section_length = 0x???
+        // program_number = 0x0001
+        // reserved = b11
+        // version_number = b00001
+        // current_next_indicator = b1
+        // section_number = 0x00
+        // last_section_number = 0x00
+        // reserved = b111
+        // PCR_PID = kPCR_PID (13 bits)
+        // reserved = b1111
+        // program_info_length = 0x???
+        //   program_info_descriptors follow
+        // one or more elementary stream descriptions follow:
+        //   stream_type = 0x??
+        //   reserved = b111
+        //   elementary_PID = b? ???? ???? ???? (13 bits)
+        //   reserved = b1111
+        //   ES_info_length = 0x000
+        // CRC = 0x????????
+
+        if (++mPMTContinuityCounter == 16) {
+            mPMTContinuityCounter = 0;
+        }
+
+        ptr = packetDataStart;
+        *ptr++ = 0x47;
+        *ptr++ = 0x40 | (kPID_PMT >> 8);
+        *ptr++ = kPID_PMT & 0xff;
+        *ptr++ = 0x10 | mPMTContinuityCounter;
+        *ptr++ = 0x00;
+
+        crcDataStart = ptr;
+        *ptr++ = 0x02;
+
+        *ptr++ = 0x00;  // section_length to be filled in below.
+        *ptr++ = 0x00;
+
+        *ptr++ = 0x00;
+        *ptr++ = 0x01;
+        *ptr++ = 0xc3;
+        *ptr++ = 0x00;
+        *ptr++ = 0x00;
+        *ptr++ = 0xe0 | (kPID_PCR >> 8);
+        *ptr++ = kPID_PCR & 0xff;
+
+        size_t program_info_length = 0;
+        for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) {
+            program_info_length += mProgramInfoDescriptors.itemAt(i)->size();
+        }
+
+        CHECK_LT(program_info_length, 0x400u);
+        *ptr++ = 0xf0 | (program_info_length >> 8);
+        *ptr++ = (program_info_length & 0xff);
+
+        for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) {
+            const sp<ABuffer> &desc = mProgramInfoDescriptors.itemAt(i);
+            memcpy(ptr, desc->data(), desc->size());
+            ptr += desc->size();
+        }
+
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            const sp<Track> &track = mTracks.itemAt(i);
+
+            // Make sure all the decriptors have been added.
+            track->finalize();
+
+            *ptr++ = track->streamType();
+            *ptr++ = 0xe0 | (track->PID() >> 8);
+            *ptr++ = track->PID() & 0xff;
+
+            size_t ES_info_length = 0;
+            for (size_t i = 0; i < track->countDescriptors(); ++i) {
+                ES_info_length += track->descriptorAt(i)->size();
+            }
+            CHECK_LE(ES_info_length, 0xfffu);
+
+            *ptr++ = 0xf0 | (ES_info_length >> 8);
+            *ptr++ = (ES_info_length & 0xff);
+
+            for (size_t i = 0; i < track->countDescriptors(); ++i) {
+                const sp<ABuffer> &descriptor = track->descriptorAt(i);
+                memcpy(ptr, descriptor->data(), descriptor->size());
+                ptr += descriptor->size();
+            }
+        }
+
+        size_t section_length = ptr - (crcDataStart + 3) + 4 /* CRC */;
+
+        crcDataStart[1] = 0xb0 | (section_length >> 8);
+        crcDataStart[2] = section_length & 0xff;
+
+        crc = htonl(crc32(crcDataStart, ptr - crcDataStart));
+        memcpy(ptr, &crc, 4);
+        ptr += 4;
+
+        sizeLeft = packetDataStart + 188 - ptr;
+        memset(ptr, 0xff, sizeLeft);
+
+        packetDataStart += 188;
+    }
+
+    if (flags & EMIT_PCR) {
+        // PCR stream
+        // 0x47
+        // transport_error_indicator = b0
+        // payload_unit_start_indicator = b1
+        // transport_priority = b0
+        // PID = kPCR_PID (13 bits)
+        // transport_scrambling_control = b00
+        // adaptation_field_control = b10 (adaptation field only, no payload)
+        // continuity_counter = b0000 (does not increment)
+        // adaptation_field_length = 183
+        // discontinuity_indicator = b0
+        // random_access_indicator = b0
+        // elementary_stream_priority_indicator = b0
+        // PCR_flag = b1
+        // OPCR_flag = b0
+        // splicing_point_flag = b0
+        // transport_private_data_flag = b0
+        // adaptation_field_extension_flag = b0
+        // program_clock_reference_base = b?????????????????????????????????
+        // reserved = b111111
+        // program_clock_reference_extension = b?????????
+
+        int64_t nowUs = ALooper::GetNowUs();
+
+        uint64_t PCR = nowUs * 27;  // PCR based on a 27MHz clock
+        uint64_t PCR_base = PCR / 300;
+        uint32_t PCR_ext = PCR % 300;
+
+        uint8_t *ptr = packetDataStart;
+        *ptr++ = 0x47;
+        *ptr++ = 0x40 | (kPID_PCR >> 8);
+        *ptr++ = kPID_PCR & 0xff;
+        *ptr++ = 0x20;
+        *ptr++ = 0xb7;  // adaptation_field_length
+        *ptr++ = 0x10;
+        *ptr++ = (PCR_base >> 25) & 0xff;
+        *ptr++ = (PCR_base >> 17) & 0xff;
+        *ptr++ = (PCR_base >> 9) & 0xff;
+        *ptr++ = ((PCR_base & 1) << 7) | 0x7e | ((PCR_ext >> 8) & 1);
+        *ptr++ = (PCR_ext & 0xff);
+
+        size_t sizeLeft = packetDataStart + 188 - ptr;
+        memset(ptr, 0xff, sizeLeft);
+
+        packetDataStart += 188;
+    }
+
+    uint64_t PTS = (timeUs * 9ll) / 100ll;
+
+    if (PES_packet_length >= 65536) {
+        // This really should only happen for video.
+        CHECK(track->isVideo());
+
+        // It's valid to set this to 0 for video according to the specs.
+        PES_packet_length = 0;
+    }
+
+    size_t sizeAvailableForPayload = 188 - 4 - 14 - numStuffingBytes;
+    if (PES_private_data_len > 0) {
+        sizeAvailableForPayload -= PES_private_data_len + 1;
+    }
+
+    size_t copy = accessUnit->size();
+
+    if (copy > sizeAvailableForPayload) {
+        copy = sizeAvailableForPayload;
+
+        if (alignPayload && copy > 16) {
+            copy -= (copy % 16);
+        }
+    }
+
+    size_t numPaddingBytes = sizeAvailableForPayload - copy;
+
+    uint8_t *ptr = packetDataStart;
+    *ptr++ = 0x47;
+    *ptr++ = 0x40 | (track->PID() >> 8);
+    *ptr++ = track->PID() & 0xff;
+
+    *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10)
+                | track->incrementContinuityCounter();
+
+    if (numPaddingBytes > 0) {
+        *ptr++ = numPaddingBytes - 1;
+        if (numPaddingBytes >= 2) {
+            *ptr++ = 0x00;
+            memset(ptr, 0xff, numPaddingBytes - 2);
+            ptr += numPaddingBytes - 2;
+        }
+    }
+
+    *ptr++ = 0x00;
+    *ptr++ = 0x00;
+    *ptr++ = 0x01;
+    *ptr++ = track->streamID();
+    *ptr++ = PES_packet_length >> 8;
+    *ptr++ = PES_packet_length & 0xff;
+    *ptr++ = 0x84;
+    *ptr++ = (PES_private_data_len > 0) ? 0x81 : 0x80;
+
+    size_t headerLength = 0x05 + numStuffingBytes;
+    if (PES_private_data_len > 0) {
+        headerLength += 1 + PES_private_data_len;
+    }
+
+    *ptr++ = headerLength;
+
+    *ptr++ = 0x20 | (((PTS >> 30) & 7) << 1) | 1;
+    *ptr++ = (PTS >> 22) & 0xff;
+    *ptr++ = (((PTS >> 15) & 0x7f) << 1) | 1;
+    *ptr++ = (PTS >> 7) & 0xff;
+    *ptr++ = ((PTS & 0x7f) << 1) | 1;
+
+    if (PES_private_data_len > 0) {
+        *ptr++ = 0x8e;  // PES_private_data_flag, reserved.
+        memcpy(ptr, PES_private_data, PES_private_data_len);
+        ptr += PES_private_data_len;
+    }
+
+    for (size_t i = 0; i < numStuffingBytes; ++i) {
+        *ptr++ = 0xff;
+    }
+
+    memcpy(ptr, accessUnit->data(), copy);
+    ptr += copy;
+
+    CHECK_EQ(ptr, packetDataStart + 188);
+    packetDataStart += 188;
+
+    size_t offset = copy;
+    while (offset < accessUnit->size()) {
+        // for subsequent fragments of "buffer":
+        // 0x47
+        // transport_error_indicator = b0
+        // payload_unit_start_indicator = b0
+        // transport_priority = b0
+        // PID = b0 0001 1110 ???? (13 bits) [0x1e0 + 1 + sourceIndex]
+        // transport_scrambling_control = b00
+        // adaptation_field_control = b??
+        // continuity_counter = b????
+        // the fragment of "buffer" follows.
+
+        size_t sizeAvailableForPayload = 188 - 4;
+
+        size_t copy = accessUnit->size() - offset;
+
+        if (copy > sizeAvailableForPayload) {
+            copy = sizeAvailableForPayload;
+
+            if (alignPayload && copy > 16) {
+                copy -= (copy % 16);
+            }
+        }
+
+        size_t numPaddingBytes = sizeAvailableForPayload - copy;
+
+        uint8_t *ptr = packetDataStart;
+        *ptr++ = 0x47;
+        *ptr++ = 0x00 | (track->PID() >> 8);
+        *ptr++ = track->PID() & 0xff;
+
+        *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10)
+                    | track->incrementContinuityCounter();
+
+        if (numPaddingBytes > 0) {
+            *ptr++ = numPaddingBytes - 1;
+            if (numPaddingBytes >= 2) {
+                *ptr++ = 0x00;
+                memset(ptr, 0xff, numPaddingBytes - 2);
+                ptr += numPaddingBytes - 2;
+            }
+        }
+
+        memcpy(ptr, accessUnit->data() + offset, copy);
+        ptr += copy;
+        CHECK_EQ(ptr, packetDataStart + 188);
+
+        offset += copy;
+        packetDataStart += 188;
+    }
+
+    CHECK(packetDataStart == buffer->data() + buffer->capacity());
+
+    *packets = buffer;
+
+    return OK;
+}
+
+void TSPacketizer::initCrcTable() {
+    uint32_t poly = 0x04C11DB7;
+
+    for (int i = 0; i < 256; i++) {
+        uint32_t crc = i << 24;
+        for (int j = 0; j < 8; j++) {
+            crc = (crc << 1) ^ ((crc & 0x80000000) ? (poly) : 0);
+        }
+        mCrcTable[i] = crc;
+    }
+}
+
+uint32_t TSPacketizer::crc32(const uint8_t *start, size_t size) const {
+    uint32_t crc = 0xFFFFFFFF;
+    const uint8_t *p;
+
+    for (p = start; p < start + size; ++p) {
+        crc = (crc << 8) ^ mCrcTable[((crc >> 24) ^ *p) & 0xFF];
+    }
+
+    return crc;
+}
+
+sp<ABuffer> TSPacketizer::prependCSD(
+        size_t trackIndex, const sp<ABuffer> &accessUnit) const {
+    CHECK_LT(trackIndex, mTracks.size());
+
+    const sp<Track> &track = mTracks.itemAt(trackIndex);
+    CHECK(track->isH264() && IsIDR(accessUnit->data(), accessUnit->size()));
+
+    int64_t timeUs;
+    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+    sp<ABuffer> accessUnit2 = track->prependCSD(accessUnit);
+
+    accessUnit2->meta()->setInt64("timeUs", timeUs);
+
+    return accessUnit2;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h
new file mode 100644
index 0000000..0dcb179
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TS_PACKETIZER_H_
+
+#define TS_PACKETIZER_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+// Forms the packets of a transport stream given access units.
+// Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based
+// on flags.
+struct TSPacketizer : public RefBase {
+    enum {
+        EMIT_HDCP20_DESCRIPTOR = 1,
+        EMIT_HDCP21_DESCRIPTOR = 2,
+    };
+    explicit TSPacketizer(uint32_t flags);
+
+    // Returns trackIndex or error.
+    ssize_t addTrack(const sp<AMessage> &format);
+
+    enum {
+        EMIT_PAT_AND_PMT                = 1,
+        EMIT_PCR                        = 2,
+        IS_ENCRYPTED                    = 4,
+        PREPEND_SPS_PPS_TO_IDR_FRAMES   = 8,
+    };
+    status_t packetize(
+            size_t trackIndex, const sp<ABuffer> &accessUnit,
+            sp<ABuffer> *packets,
+            uint32_t flags,
+            const uint8_t *PES_private_data, size_t PES_private_data_len,
+            size_t numStuffingBytes = 0);
+
+    status_t extractCSDIfNecessary(size_t trackIndex);
+
+    // XXX to be removed once encoder config option takes care of this for
+    // encrypted mode.
+    sp<ABuffer> prependCSD(
+            size_t trackIndex, const sp<ABuffer> &accessUnit) const;
+
+protected:
+    virtual ~TSPacketizer();
+
+private:
+    enum {
+        kPID_PMT = 0x100,
+        kPID_PCR = 0x1000,
+    };
+
+    struct Track;
+
+    uint32_t mFlags;
+    Vector<sp<Track> > mTracks;
+
+    Vector<sp<ABuffer> > mProgramInfoDescriptors;
+
+    unsigned mPATContinuityCounter;
+    unsigned mPMTContinuityCounter;
+
+    uint32_t mCrcTable[256];
+
+    void initCrcTable();
+    uint32_t crc32(const uint8_t *start, size_t size) const;
+
+    DISALLOW_EVIL_CONSTRUCTORS(TSPacketizer);
+};
+
+}  // namespace android
+
+#endif  // TS_PACKETIZER_H_
+
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
new file mode 100644
index 0000000..4695e5d
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -0,0 +1,1737 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WifiDisplaySource"
+#include <utils/Log.h>
+
+#include "WifiDisplaySource.h"
+#include "PlaybackSession.h"
+#include "Parameters.h"
+#include "rtp/RTPSender.h"
+
+#include <binder/IServiceManager.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <media/IHDCP.h>
+#include <media/IMediaPlayerService.h>
+#include <media/IRemoteDisplayClient.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ParsedMessage.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+
+#include <arpa/inet.h>
+#include <cutils/properties.h>
+
+#include <ctype.h>
+
+namespace android {
+
+// static
+const int64_t WifiDisplaySource::kReaperIntervalUs;
+const int64_t WifiDisplaySource::kTeardownTriggerTimeouSecs;
+const int64_t WifiDisplaySource::kPlaybackSessionTimeoutSecs;
+const int64_t WifiDisplaySource::kPlaybackSessionTimeoutUs;
+const AString WifiDisplaySource::sUserAgent = MakeUserAgent();
+
+WifiDisplaySource::WifiDisplaySource(
+        const String16 &opPackageName,
+        const sp<ANetworkSession> &netSession,
+        const sp<IRemoteDisplayClient> &client,
+        const char *path)
+    : mOpPackageName(opPackageName),
+      mState(INITIALIZED),
+      mNetSession(netSession),
+      mClient(client),
+      mSessionID(0),
+      mStopReplyID(NULL),
+      mChosenRTPPort(-1),
+      mUsingPCMAudio(false),
+      mClientSessionID(0),
+      mReaperPending(false),
+      mNextCSeq(1),
+      mUsingHDCP(false),
+      mIsHDCP2_0(false),
+      mHDCPPort(0),
+      mHDCPInitializationComplete(false),
+      mSetupTriggerDeferred(false),
+      mPlaybackSessionEstablished(false) {
+    if (path != NULL) {
+        mMediaPath.setTo(path);
+    }
+
+    mSupportedSourceVideoFormats.disableAll();
+
+    mSupportedSourceVideoFormats.setNativeResolution(
+            VideoFormats::RESOLUTION_CEA, 5);  // 1280x720 p30
+
+    // Enable all resolutions up to 1280x720p30
+    mSupportedSourceVideoFormats.enableResolutionUpto(
+            VideoFormats::RESOLUTION_CEA, 5,
+            VideoFormats::PROFILE_CHP,  // Constrained High Profile
+            VideoFormats::LEVEL_32);    // Level 3.2
+}
+
+WifiDisplaySource::~WifiDisplaySource() {
+}
+
+static status_t PostAndAwaitResponse(
+        const sp<AMessage> &msg, sp<AMessage> *response) {
+    status_t err = msg->postAndAwaitResponse(response);
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (response == NULL || !(*response)->findInt32("err", &err)) {
+        err = OK;
+    }
+
+    return err;
+}
+
+status_t WifiDisplaySource::start(const char *iface) {
+    CHECK_EQ(mState, INITIALIZED);
+
+    sp<AMessage> msg = new AMessage(kWhatStart, this);
+    msg->setString("iface", iface);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t WifiDisplaySource::stop() {
+    sp<AMessage> msg = new AMessage(kWhatStop, this);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t WifiDisplaySource::pause() {
+    sp<AMessage> msg = new AMessage(kWhatPause, this);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+status_t WifiDisplaySource::resume() {
+    sp<AMessage> msg = new AMessage(kWhatResume, this);
+
+    sp<AMessage> response;
+    return PostAndAwaitResponse(msg, &response);
+}
+
+void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatStart:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            AString iface;
+            CHECK(msg->findString("iface", &iface));
+
+            status_t err = OK;
+
+            ssize_t colonPos = iface.find(":");
+
+            unsigned long port;
+
+            if (colonPos >= 0) {
+                const char *s = iface.c_str() + colonPos + 1;
+
+                char *end;
+                port = strtoul(s, &end, 10);
+
+                if (end == s || *end != '\0' || port > 65535) {
+                    err = -EINVAL;
+                } else {
+                    iface.erase(colonPos, iface.size() - colonPos);
+                }
+            } else {
+                port = kWifiDisplayDefaultPort;
+            }
+
+            if (err == OK) {
+                if (inet_aton(iface.c_str(), &mInterfaceAddr) != 0) {
+                    sp<AMessage> notify = new AMessage(kWhatRTSPNotify, this);
+
+                    err = mNetSession->createRTSPServer(
+                            mInterfaceAddr, port, notify, &mSessionID);
+                } else {
+                    err = -EINVAL;
+                }
+            }
+
+            mState = AWAITING_CLIENT_CONNECTION;
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatRTSPNotify:
+        {
+            int32_t reason;
+            CHECK(msg->findInt32("reason", &reason));
+
+            switch (reason) {
+                case ANetworkSession::kWhatError:
+                {
+                    int32_t sessionID;
+                    CHECK(msg->findInt32("sessionID", &sessionID));
+
+                    int32_t err;
+                    CHECK(msg->findInt32("err", &err));
+
+                    AString detail;
+                    CHECK(msg->findString("detail", &detail));
+
+                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
+                          sessionID,
+                          err,
+                          detail.c_str(),
+                          strerror(-err));
+
+                    mNetSession->destroySession(sessionID);
+
+                    if (sessionID == mClientSessionID) {
+                        mClientSessionID = 0;
+
+                        mClient->onDisplayError(
+                                IRemoteDisplayClient::kDisplayErrorUnknown);
+                    }
+                    break;
+                }
+
+                case ANetworkSession::kWhatClientConnected:
+                {
+                    int32_t sessionID;
+                    CHECK(msg->findInt32("sessionID", &sessionID));
+
+                    if (mClientSessionID > 0) {
+                        ALOGW("A client tried to connect, but we already "
+                              "have one.");
+
+                        mNetSession->destroySession(sessionID);
+                        break;
+                    }
+
+                    CHECK_EQ(mState, AWAITING_CLIENT_CONNECTION);
+
+                    CHECK(msg->findString("client-ip", &mClientInfo.mRemoteIP));
+                    CHECK(msg->findString("server-ip", &mClientInfo.mLocalIP));
+
+                    if (mClientInfo.mRemoteIP == mClientInfo.mLocalIP) {
+                        // Disallow connections from the local interface
+                        // for security reasons.
+                        mNetSession->destroySession(sessionID);
+                        break;
+                    }
+
+                    CHECK(msg->findInt32(
+                                "server-port", &mClientInfo.mLocalPort));
+                    mClientInfo.mPlaybackSessionID = -1;
+
+                    mClientSessionID = sessionID;
+
+                    ALOGI("We now have a client (%d) connected.", sessionID);
+
+                    mState = AWAITING_CLIENT_SETUP;
+
+                    status_t err = sendM1(sessionID);
+                    CHECK_EQ(err, (status_t)OK);
+                    break;
+                }
+
+                case ANetworkSession::kWhatData:
+                {
+                    status_t err = onReceiveClientData(msg);
+
+                    if (err != OK) {
+                        mClient->onDisplayError(
+                                IRemoteDisplayClient::kDisplayErrorUnknown);
+                    }
+
+#if 0
+                    // testing only.
+                    char val[PROPERTY_VALUE_MAX];
+                    if (property_get("media.wfd.trigger", val, NULL)) {
+                        if (!strcasecmp(val, "pause") && mState == PLAYING) {
+                            mState = PLAYING_TO_PAUSED;
+                            sendTrigger(mClientSessionID, TRIGGER_PAUSE);
+                        } else if (!strcasecmp(val, "play")
+                                    && mState == PAUSED) {
+                            mState = PAUSED_TO_PLAYING;
+                            sendTrigger(mClientSessionID, TRIGGER_PLAY);
+                        }
+                    }
+#endif
+                    break;
+                }
+
+                case ANetworkSession::kWhatNetworkStall:
+                {
+                    break;
+                }
+
+                default:
+                    TRESPASS();
+            }
+            break;
+        }
+
+        case kWhatStop:
+        {
+            CHECK(msg->senderAwaitsResponse(&mStopReplyID));
+
+            CHECK_LT(mState, AWAITING_CLIENT_TEARDOWN);
+
+            if (mState >= AWAITING_CLIENT_PLAY) {
+                // We have a session, i.e. a previous SETUP succeeded.
+
+                status_t err = sendTrigger(
+                        mClientSessionID, TRIGGER_TEARDOWN);
+
+                if (err == OK) {
+                    mState = AWAITING_CLIENT_TEARDOWN;
+
+                    (new AMessage(kWhatTeardownTriggerTimedOut, this))->post(
+                            kTeardownTriggerTimeouSecs * 1000000ll);
+
+                    break;
+                }
+
+                // fall through.
+            }
+
+            finishStop();
+            break;
+        }
+
+        case kWhatPause:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            status_t err = OK;
+
+            if (mState != PLAYING) {
+                err = INVALID_OPERATION;
+            } else {
+                mState = PLAYING_TO_PAUSED;
+                sendTrigger(mClientSessionID, TRIGGER_PAUSE);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatResume:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            status_t err = OK;
+
+            if (mState != PAUSED) {
+                err = INVALID_OPERATION;
+            } else {
+                mState = PAUSED_TO_PLAYING;
+                sendTrigger(mClientSessionID, TRIGGER_PLAY);
+            }
+
+            sp<AMessage> response = new AMessage;
+            response->setInt32("err", err);
+            response->postReply(replyID);
+            break;
+        }
+
+        case kWhatReapDeadClients:
+        {
+            mReaperPending = false;
+
+            if (mClientSessionID == 0
+                    || mClientInfo.mPlaybackSession == NULL) {
+                break;
+            }
+
+            if (mClientInfo.mPlaybackSession->getLastLifesignUs()
+                    + kPlaybackSessionTimeoutUs < ALooper::GetNowUs()) {
+                ALOGI("playback session timed out, reaping.");
+
+                mNetSession->destroySession(mClientSessionID);
+                mClientSessionID = 0;
+
+                mClient->onDisplayError(
+                        IRemoteDisplayClient::kDisplayErrorUnknown);
+            } else {
+                scheduleReaper();
+            }
+            break;
+        }
+
+        case kWhatPlaybackSessionNotify:
+        {
+            int32_t playbackSessionID;
+            CHECK(msg->findInt32("playbackSessionID", &playbackSessionID));
+
+            int32_t what;
+            CHECK(msg->findInt32("what", &what));
+
+            if (what == PlaybackSession::kWhatSessionDead) {
+                ALOGI("playback session wants to quit.");
+
+                mClient->onDisplayError(
+                        IRemoteDisplayClient::kDisplayErrorUnknown);
+            } else if (what == PlaybackSession::kWhatSessionEstablished) {
+                mPlaybackSessionEstablished = true;
+
+                if (mClient != NULL) {
+                    if (!mSinkSupportsVideo) {
+                        mClient->onDisplayConnected(
+                                NULL,  // SurfaceTexture
+                                0, // width,
+                                0, // height,
+                                mUsingHDCP
+                                    ? IRemoteDisplayClient::kDisplayFlagSecure
+                                    : 0,
+                                0);
+                    } else {
+                        size_t width, height;
+
+                        CHECK(VideoFormats::GetConfiguration(
+                                    mChosenVideoResolutionType,
+                                    mChosenVideoResolutionIndex,
+                                    &width,
+                                    &height,
+                                    NULL /* framesPerSecond */,
+                                    NULL /* interlaced */));
+
+                        mClient->onDisplayConnected(
+                                mClientInfo.mPlaybackSession
+                                    ->getSurfaceTexture(),
+                                width,
+                                height,
+                                mUsingHDCP
+                                    ? IRemoteDisplayClient::kDisplayFlagSecure
+                                    : 0,
+                                playbackSessionID);
+                    }
+                }
+
+                finishPlay();
+
+                if (mState == ABOUT_TO_PLAY) {
+                    mState = PLAYING;
+                }
+            } else if (what == PlaybackSession::kWhatSessionDestroyed) {
+                disconnectClient2();
+            } else {
+                CHECK_EQ(what, PlaybackSession::kWhatBinaryData);
+
+                int32_t channel;
+                CHECK(msg->findInt32("channel", &channel));
+
+                sp<ABuffer> data;
+                CHECK(msg->findBuffer("data", &data));
+
+                CHECK_LE(channel, 0xff);
+                CHECK_LE(data->size(), 0xffffu);
+
+                int32_t sessionID;
+                CHECK(msg->findInt32("sessionID", &sessionID));
+
+                char header[4];
+                header[0] = '$';
+                header[1] = channel;
+                header[2] = data->size() >> 8;
+                header[3] = data->size() & 0xff;
+
+                mNetSession->sendRequest(
+                        sessionID, header, sizeof(header));
+
+                mNetSession->sendRequest(
+                        sessionID, data->data(), data->size());
+            }
+            break;
+        }
+
+        case kWhatKeepAlive:
+        {
+            int32_t sessionID;
+            CHECK(msg->findInt32("sessionID", &sessionID));
+
+            if (mClientSessionID != sessionID) {
+                // Obsolete event, client is already gone.
+                break;
+            }
+
+            sendM16(sessionID);
+            break;
+        }
+
+        case kWhatTeardownTriggerTimedOut:
+        {
+            if (mState == AWAITING_CLIENT_TEARDOWN) {
+                ALOGI("TEARDOWN trigger timed out, forcing disconnection.");
+
+                CHECK(mStopReplyID != NULL);
+                finishStop();
+                break;
+            }
+            break;
+        }
+
+        case kWhatHDCPNotify:
+        {
+            int32_t msgCode, ext1, ext2;
+            CHECK(msg->findInt32("msg", &msgCode));
+            CHECK(msg->findInt32("ext1", &ext1));
+            CHECK(msg->findInt32("ext2", &ext2));
+
+            ALOGI("Saw HDCP notification code %d, ext1 %d, ext2 %d",
+                    msgCode, ext1, ext2);
+
+            switch (msgCode) {
+                case HDCPModule::HDCP_INITIALIZATION_COMPLETE:
+                {
+                    mHDCPInitializationComplete = true;
+
+                    if (mSetupTriggerDeferred) {
+                        mSetupTriggerDeferred = false;
+
+                        sendTrigger(mClientSessionID, TRIGGER_SETUP);
+                    }
+                    break;
+                }
+
+                case HDCPModule::HDCP_SHUTDOWN_COMPLETE:
+                case HDCPModule::HDCP_SHUTDOWN_FAILED:
+                {
+                    // Ugly hack to make sure that the call to
+                    // HDCPObserver::notify is completely handled before
+                    // we clear the HDCP instance and unload the shared
+                    // library :(
+                    (new AMessage(kWhatFinishStop2, this))->post(300000ll);
+                    break;
+                }
+
+                default:
+                {
+                    ALOGE("HDCP failure, shutting down.");
+
+                    mClient->onDisplayError(
+                            IRemoteDisplayClient::kDisplayErrorUnknown);
+                    break;
+                }
+            }
+            break;
+        }
+
+        case kWhatFinishStop2:
+        {
+            finishStop2();
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void WifiDisplaySource::registerResponseHandler(
+        int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
+    ResponseID id;
+    id.mSessionID = sessionID;
+    id.mCSeq = cseq;
+    mResponseHandlers.add(id, func);
+}
+
+status_t WifiDisplaySource::sendM1(int32_t sessionID) {
+    AString request = "OPTIONS * RTSP/1.0\r\n";
+    AppendCommonResponse(&request, mNextCSeq);
+
+    request.append(
+            "Require: org.wfa.wfd1.0\r\n"
+            "\r\n");
+
+    status_t err =
+        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+    if (err != OK) {
+        return err;
+    }
+
+    registerResponseHandler(
+            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM1Response);
+
+    ++mNextCSeq;
+
+    return OK;
+}
+
+status_t WifiDisplaySource::sendM3(int32_t sessionID) {
+    AString body =
+        "wfd_content_protection\r\n"
+        "wfd_video_formats\r\n"
+        "wfd_audio_codecs\r\n"
+        "wfd_client_rtp_ports\r\n";
+
+    AString request = "GET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+    AppendCommonResponse(&request, mNextCSeq);
+
+    request.append("Content-Type: text/parameters\r\n");
+    request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
+    request.append("\r\n");
+    request.append(body);
+
+    status_t err =
+        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+    if (err != OK) {
+        return err;
+    }
+
+    registerResponseHandler(
+            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM3Response);
+
+    ++mNextCSeq;
+
+    return OK;
+}
+
+status_t WifiDisplaySource::sendM4(int32_t sessionID) {
+    CHECK_EQ(sessionID, mClientSessionID);
+
+    AString body;
+
+    if (mSinkSupportsVideo) {
+        body.append("wfd_video_formats: ");
+
+        VideoFormats chosenVideoFormat;
+        chosenVideoFormat.disableAll();
+        chosenVideoFormat.setNativeResolution(
+                mChosenVideoResolutionType, mChosenVideoResolutionIndex);
+        chosenVideoFormat.setProfileLevel(
+                mChosenVideoResolutionType, mChosenVideoResolutionIndex,
+                mChosenVideoProfile, mChosenVideoLevel);
+
+        body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */));
+        body.append("\r\n");
+    }
+
+    if (mSinkSupportsAudio) {
+        body.append(
+                AStringPrintf("wfd_audio_codecs: %s\r\n",
+                             (mUsingPCMAudio
+                                ? "LPCM 00000002 00" // 2 ch PCM 48kHz
+                                : "AAC 00000001 00")));  // 2 ch AAC 48kHz
+    }
+
+    body.append(
+            AStringPrintf(
+                "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n",
+                mClientInfo.mLocalIP.c_str()));
+
+    body.append(
+            AStringPrintf(
+                "wfd_client_rtp_ports: %s\r\n", mWfdClientRtpPorts.c_str()));
+
+    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+    AppendCommonResponse(&request, mNextCSeq);
+
+    request.append("Content-Type: text/parameters\r\n");
+    request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
+    request.append("\r\n");
+    request.append(body);
+
+    status_t err =
+        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+    if (err != OK) {
+        return err;
+    }
+
+    registerResponseHandler(
+            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM4Response);
+
+    ++mNextCSeq;
+
+    return OK;
+}
+
+status_t WifiDisplaySource::sendTrigger(
+        int32_t sessionID, TriggerType triggerType) {
+    AString body = "wfd_trigger_method: ";
+    switch (triggerType) {
+        case TRIGGER_SETUP:
+            body.append("SETUP");
+            break;
+        case TRIGGER_TEARDOWN:
+            ALOGI("Sending TEARDOWN trigger.");
+            body.append("TEARDOWN");
+            break;
+        case TRIGGER_PAUSE:
+            body.append("PAUSE");
+            break;
+        case TRIGGER_PLAY:
+            body.append("PLAY");
+            break;
+        default:
+            TRESPASS();
+    }
+
+    body.append("\r\n");
+
+    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+    AppendCommonResponse(&request, mNextCSeq);
+
+    request.append("Content-Type: text/parameters\r\n");
+    request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
+    request.append("\r\n");
+    request.append(body);
+
+    status_t err =
+        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+    if (err != OK) {
+        return err;
+    }
+
+    registerResponseHandler(
+            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM5Response);
+
+    ++mNextCSeq;
+
+    return OK;
+}
+
+status_t WifiDisplaySource::sendM16(int32_t sessionID) {
+    AString request = "GET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+    AppendCommonResponse(&request, mNextCSeq);
+
+    CHECK_EQ(sessionID, mClientSessionID);
+    request.append(
+            AStringPrintf("Session: %d\r\n", mClientInfo.mPlaybackSessionID));
+    request.append("\r\n");  // Empty body
+
+    status_t err =
+        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+    if (err != OK) {
+        return err;
+    }
+
+    registerResponseHandler(
+            sessionID, mNextCSeq, &WifiDisplaySource::onReceiveM16Response);
+
+    ++mNextCSeq;
+
+    scheduleKeepAlive(sessionID);
+
+    return OK;
+}
+
+status_t WifiDisplaySource::onReceiveM1Response(
+        int32_t /* sessionID */, const sp<ParsedMessage> &msg) {
+    int32_t statusCode;
+    if (!msg->getStatusCode(&statusCode)) {
+        return ERROR_MALFORMED;
+    }
+
+    if (statusCode != 200) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    return OK;
+}
+
+// sink_audio_list := ("LPCM"|"AAC"|"AC3" HEXDIGIT*8 HEXDIGIT*2)
+//                       (", " sink_audio_list)*
+static void GetAudioModes(const char *s, const char *prefix, uint32_t *modes) {
+    *modes = 0;
+
+    size_t prefixLen = strlen(prefix);
+
+    while (*s != '0') {
+        if (!strncmp(s, prefix, prefixLen) && s[prefixLen] == ' ') {
+            unsigned latency;
+            if (sscanf(&s[prefixLen + 1], "%08x %02x", modes, &latency) != 2) {
+                *modes = 0;
+            }
+
+            return;
+        }
+
+        const char *commaPos = strchr(s, ',');
+        if (commaPos != NULL) {
+            s = commaPos + 1;
+
+            while (isspace(*s)) {
+                ++s;
+            }
+        } else {
+            break;
+        }
+    }
+}
+
+status_t WifiDisplaySource::onReceiveM3Response(
+        int32_t sessionID, const sp<ParsedMessage> &msg) {
+    int32_t statusCode;
+    if (!msg->getStatusCode(&statusCode)) {
+        return ERROR_MALFORMED;
+    }
+
+    if (statusCode != 200) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    sp<Parameters> params =
+        Parameters::Parse(msg->getContent(), strlen(msg->getContent()));
+
+    if (params == NULL) {
+        return ERROR_MALFORMED;
+    }
+
+    AString value;
+    if (!params->findParameter("wfd_client_rtp_ports", &value)) {
+        ALOGE("Sink doesn't report its choice of wfd_client_rtp_ports.");
+        return ERROR_MALFORMED;
+    }
+
+    unsigned port0 = 0, port1 = 0;
+    if (sscanf(value.c_str(),
+               "RTP/AVP/UDP;unicast %u %u mode=play",
+               &port0,
+               &port1) == 2
+        || sscanf(value.c_str(),
+               "RTP/AVP/TCP;unicast %u %u mode=play",
+               &port0,
+               &port1) == 2) {
+            if (port0 == 0 || port0 > 65535 || port1 != 0) {
+                ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)",
+                      value.c_str());
+
+                return ERROR_MALFORMED;
+            }
+    } else if (strcmp(value.c_str(), "RTP/AVP/TCP;interleaved mode=play")) {
+        ALOGE("Unsupported value for wfd_client_rtp_ports (%s)",
+              value.c_str());
+
+        return ERROR_UNSUPPORTED;
+    }
+
+    mWfdClientRtpPorts = value;
+    mChosenRTPPort = port0;
+
+    if (!params->findParameter("wfd_video_formats", &value)) {
+        ALOGE("Sink doesn't report its choice of wfd_video_formats.");
+        return ERROR_MALFORMED;
+    }
+
+    mSinkSupportsVideo = false;
+
+    if  (!(value == "none")) {
+        mSinkSupportsVideo = true;
+        if (!mSupportedSinkVideoFormats.parseFormatSpec(value.c_str())) {
+            ALOGE("Failed to parse sink provided wfd_video_formats (%s)",
+                  value.c_str());
+
+            return ERROR_MALFORMED;
+        }
+
+        if (!VideoFormats::PickBestFormat(
+                    mSupportedSinkVideoFormats,
+                    mSupportedSourceVideoFormats,
+                    &mChosenVideoResolutionType,
+                    &mChosenVideoResolutionIndex,
+                    &mChosenVideoProfile,
+                    &mChosenVideoLevel)) {
+            ALOGE("Sink and source share no commonly supported video "
+                  "formats.");
+
+            return ERROR_UNSUPPORTED;
+        }
+
+        size_t width, height, framesPerSecond;
+        bool interlaced;
+        CHECK(VideoFormats::GetConfiguration(
+                    mChosenVideoResolutionType,
+                    mChosenVideoResolutionIndex,
+                    &width,
+                    &height,
+                    &framesPerSecond,
+                    &interlaced));
+
+        ALOGI("Picked video resolution %zu x %zu %c%zu",
+              width, height, interlaced ? 'i' : 'p', framesPerSecond);
+
+        ALOGI("Picked AVC profile %d, level %d",
+              mChosenVideoProfile, mChosenVideoLevel);
+    } else {
+        ALOGI("Sink doesn't support video at all.");
+    }
+
+    if (!params->findParameter("wfd_audio_codecs", &value)) {
+        ALOGE("Sink doesn't report its choice of wfd_audio_codecs.");
+        return ERROR_MALFORMED;
+    }
+
+    mSinkSupportsAudio = false;
+
+    if  (!(value == "none")) {
+        mSinkSupportsAudio = true;
+
+        uint32_t modes;
+        GetAudioModes(value.c_str(), "AAC", &modes);
+
+        bool supportsAAC = (modes & 1) != 0;  // AAC 2ch 48kHz
+
+        GetAudioModes(value.c_str(), "LPCM", &modes);
+
+        bool supportsPCM = (modes & 2) != 0;  // LPCM 2ch 48kHz
+
+        if (supportsPCM
+                && property_get_bool("media.wfd.use-pcm-audio", false)) {
+            ALOGI("Using PCM audio.");
+            mUsingPCMAudio = true;
+        } else if (supportsAAC) {
+            ALOGI("Using AAC audio.");
+            mUsingPCMAudio = false;
+        } else if (supportsPCM) {
+            ALOGI("Using PCM audio.");
+            mUsingPCMAudio = true;
+        } else {
+            ALOGI("Sink doesn't support an audio format we do.");
+            return ERROR_UNSUPPORTED;
+        }
+    } else {
+        ALOGI("Sink doesn't support audio at all.");
+    }
+
+    if (!mSinkSupportsVideo && !mSinkSupportsAudio) {
+        ALOGE("Sink supports neither video nor audio...");
+        return ERROR_UNSUPPORTED;
+    }
+
+    mUsingHDCP = false;
+    if (!params->findParameter("wfd_content_protection", &value)) {
+        ALOGI("Sink doesn't appear to support content protection.");
+    } else if (value == "none") {
+        ALOGI("Sink does not support content protection.");
+    } else {
+        mUsingHDCP = true;
+
+        bool isHDCP2_0 = false;
+        if (value.startsWith("HDCP2.0 ")) {
+            isHDCP2_0 = true;
+        } else if (!value.startsWith("HDCP2.1 ")) {
+            ALOGE("malformed wfd_content_protection: '%s'", value.c_str());
+
+            return ERROR_MALFORMED;
+        }
+
+        int32_t hdcpPort;
+        if (!ParsedMessage::GetInt32Attribute(
+                    value.c_str() + 8, "port", &hdcpPort)
+                || hdcpPort < 1 || hdcpPort > 65535) {
+            return ERROR_MALFORMED;
+        }
+
+        mIsHDCP2_0 = isHDCP2_0;
+        mHDCPPort = hdcpPort;
+
+        status_t err = makeHDCP();
+        if (err != OK) {
+            ALOGE("Unable to instantiate HDCP component. "
+                  "Not using HDCP after all.");
+
+            mUsingHDCP = false;
+        }
+    }
+
+    return sendM4(sessionID);
+}
+
+status_t WifiDisplaySource::onReceiveM4Response(
+        int32_t sessionID, const sp<ParsedMessage> &msg) {
+    int32_t statusCode;
+    if (!msg->getStatusCode(&statusCode)) {
+        return ERROR_MALFORMED;
+    }
+
+    if (statusCode != 200) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    if (mUsingHDCP && !mHDCPInitializationComplete) {
+        ALOGI("Deferring SETUP trigger until HDCP initialization completes.");
+
+        mSetupTriggerDeferred = true;
+        return OK;
+    }
+
+    return sendTrigger(sessionID, TRIGGER_SETUP);
+}
+
+status_t WifiDisplaySource::onReceiveM5Response(
+        int32_t /* sessionID */, const sp<ParsedMessage> &msg) {
+    int32_t statusCode;
+    if (!msg->getStatusCode(&statusCode)) {
+        return ERROR_MALFORMED;
+    }
+
+    if (statusCode != 200) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    return OK;
+}
+
+status_t WifiDisplaySource::onReceiveM16Response(
+        int32_t sessionID, const sp<ParsedMessage> & /* msg */) {
+    // If only the response was required to include a "Session:" header...
+
+    CHECK_EQ(sessionID, mClientSessionID);
+
+    if (mClientInfo.mPlaybackSession != NULL) {
+        mClientInfo.mPlaybackSession->updateLiveness();
+    }
+
+    return OK;
+}
+
+void WifiDisplaySource::scheduleReaper() {
+    if (mReaperPending) {
+        return;
+    }
+
+    mReaperPending = true;
+    (new AMessage(kWhatReapDeadClients, this))->post(kReaperIntervalUs);
+}
+
+void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) {
+    // We need to send updates at least 5 secs before the timeout is set to
+    // expire, make sure the timeout is greater than 5 secs to begin with.
+    CHECK_GT(kPlaybackSessionTimeoutUs, 5000000ll);
+
+    sp<AMessage> msg = new AMessage(kWhatKeepAlive, this);
+    msg->setInt32("sessionID", sessionID);
+    msg->post(kPlaybackSessionTimeoutUs - 5000000ll);
+}
+
+status_t WifiDisplaySource::onReceiveClientData(const sp<AMessage> &msg) {
+    int32_t sessionID;
+    CHECK(msg->findInt32("sessionID", &sessionID));
+
+    sp<RefBase> obj;
+    CHECK(msg->findObject("data", &obj));
+
+    sp<ParsedMessage> data =
+        static_cast<ParsedMessage *>(obj.get());
+
+    ALOGV("session %d received '%s'",
+          sessionID, data->debugString().c_str());
+
+    AString method;
+    AString uri;
+    data->getRequestField(0, &method);
+
+    int32_t cseq;
+    if (!data->findInt32("cseq", &cseq)) {
+        sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
+        return ERROR_MALFORMED;
+    }
+
+    if (method.startsWith("RTSP/")) {
+        // This is a response.
+
+        ResponseID id;
+        id.mSessionID = sessionID;
+        id.mCSeq = cseq;
+
+        ssize_t index = mResponseHandlers.indexOfKey(id);
+
+        if (index < 0) {
+            ALOGW("Received unsolicited server response, cseq %d", cseq);
+            return ERROR_MALFORMED;
+        }
+
+        HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
+        mResponseHandlers.removeItemsAt(index);
+
+        status_t err = (this->*func)(sessionID, data);
+
+        if (err != OK) {
+            ALOGW("Response handler for session %d, cseq %d returned "
+                  "err %d (%s)",
+                  sessionID, cseq, err, strerror(-err));
+
+            return err;
+        }
+
+        return OK;
+    }
+
+    AString version;
+    data->getRequestField(2, &version);
+    if (!(version == AString("RTSP/1.0"))) {
+        sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
+        return ERROR_UNSUPPORTED;
+    }
+
+    status_t err;
+    if (method == "OPTIONS") {
+        err = onOptionsRequest(sessionID, cseq, data);
+    } else if (method == "SETUP") {
+        err = onSetupRequest(sessionID, cseq, data);
+    } else if (method == "PLAY") {
+        err = onPlayRequest(sessionID, cseq, data);
+    } else if (method == "PAUSE") {
+        err = onPauseRequest(sessionID, cseq, data);
+    } else if (method == "TEARDOWN") {
+        err = onTeardownRequest(sessionID, cseq, data);
+    } else if (method == "GET_PARAMETER") {
+        err = onGetParameterRequest(sessionID, cseq, data);
+    } else if (method == "SET_PARAMETER") {
+        err = onSetParameterRequest(sessionID, cseq, data);
+    } else {
+        sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
+
+        err = ERROR_UNSUPPORTED;
+    }
+
+    return err;
+}
+
+status_t WifiDisplaySource::onOptionsRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    int32_t playbackSessionID;
+    sp<PlaybackSession> playbackSession =
+        findPlaybackSession(data, &playbackSessionID);
+
+    if (playbackSession != NULL) {
+        playbackSession->updateLiveness();
+    }
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq);
+
+    response.append(
+            "Public: org.wfa.wfd1.0, SETUP, TEARDOWN, PLAY, PAUSE, "
+            "GET_PARAMETER, SET_PARAMETER\r\n");
+
+    response.append("\r\n");
+
+    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+
+    if (err == OK) {
+        err = sendM3(sessionID);
+    }
+
+    return err;
+}
+
+status_t WifiDisplaySource::onSetupRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    CHECK_EQ(sessionID, mClientSessionID);
+    if (mClientInfo.mPlaybackSessionID != -1) {
+        // We only support a single playback session per client.
+        // This is due to the reversed keep-alive design in the wfd specs...
+        sendErrorResponse(sessionID, "400 Bad Request", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    AString transport;
+    if (!data->findString("transport", &transport)) {
+        sendErrorResponse(sessionID, "400 Bad Request", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    RTPSender::TransportMode rtpMode = RTPSender::TRANSPORT_UDP;
+
+    int clientRtp, clientRtcp;
+    if (transport.startsWith("RTP/AVP/TCP;")) {
+        AString interleaved;
+        if (ParsedMessage::GetAttribute(
+                    transport.c_str(), "interleaved", &interleaved)
+                && sscanf(interleaved.c_str(), "%d-%d",
+                          &clientRtp, &clientRtcp) == 2) {
+            rtpMode = RTPSender::TRANSPORT_TCP_INTERLEAVED;
+        } else {
+            bool badRequest = false;
+
+            AString clientPort;
+            if (!ParsedMessage::GetAttribute(
+                        transport.c_str(), "client_port", &clientPort)) {
+                badRequest = true;
+            } else if (sscanf(clientPort.c_str(), "%d-%d",
+                              &clientRtp, &clientRtcp) == 2) {
+            } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
+                // No RTCP.
+                clientRtcp = -1;
+            } else {
+                badRequest = true;
+            }
+
+            if (badRequest) {
+                sendErrorResponse(sessionID, "400 Bad Request", cseq);
+                return ERROR_MALFORMED;
+            }
+
+            rtpMode = RTPSender::TRANSPORT_TCP;
+        }
+    } else if (transport.startsWith("RTP/AVP;unicast;")
+            || transport.startsWith("RTP/AVP/UDP;unicast;")) {
+        bool badRequest = false;
+
+        AString clientPort;
+        if (!ParsedMessage::GetAttribute(
+                    transport.c_str(), "client_port", &clientPort)) {
+            badRequest = true;
+        } else if (sscanf(clientPort.c_str(), "%d-%d",
+                          &clientRtp, &clientRtcp) == 2) {
+        } else if (sscanf(clientPort.c_str(), "%d", &clientRtp) == 1) {
+            // No RTCP.
+            clientRtcp = -1;
+        } else {
+            badRequest = true;
+        }
+
+        if (badRequest) {
+            sendErrorResponse(sessionID, "400 Bad Request", cseq);
+            return ERROR_MALFORMED;
+        }
+#if 1
+    // The older LG dongles doesn't specify client_port=xxx apparently.
+    } else if (transport == "RTP/AVP/UDP;unicast") {
+        clientRtp = 19000;
+        clientRtcp = -1;
+#endif
+    } else {
+        sendErrorResponse(sessionID, "461 Unsupported Transport", cseq);
+        return ERROR_UNSUPPORTED;
+    }
+
+    int32_t playbackSessionID = makeUniquePlaybackSessionID();
+
+    sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, this);
+    notify->setInt32("playbackSessionID", playbackSessionID);
+    notify->setInt32("sessionID", sessionID);
+
+    sp<PlaybackSession> playbackSession =
+        new PlaybackSession(
+                mOpPackageName, mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str());
+
+    looper()->registerHandler(playbackSession);
+
+    AString uri;
+    data->getRequestField(1, &uri);
+
+    if (strncasecmp("rtsp://", uri.c_str(), 7)) {
+        sendErrorResponse(sessionID, "400 Bad Request", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    if (!(uri.startsWith("rtsp://") && uri.endsWith("/wfd1.0/streamid=0"))) {
+        sendErrorResponse(sessionID, "404 Not found", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    RTPSender::TransportMode rtcpMode = RTPSender::TRANSPORT_UDP;
+    if (clientRtcp < 0) {
+        rtcpMode = RTPSender::TRANSPORT_NONE;
+    }
+
+    status_t err = playbackSession->init(
+            mClientInfo.mRemoteIP.c_str(),
+            clientRtp,
+            rtpMode,
+            clientRtcp,
+            rtcpMode,
+            mSinkSupportsAudio,
+            mUsingPCMAudio,
+            mSinkSupportsVideo,
+            mChosenVideoResolutionType,
+            mChosenVideoResolutionIndex,
+            mChosenVideoProfile,
+            mChosenVideoLevel);
+
+    if (err != OK) {
+        looper()->unregisterHandler(playbackSession->id());
+        playbackSession.clear();
+    }
+
+    switch (err) {
+        case OK:
+            break;
+        case -ENOENT:
+            sendErrorResponse(sessionID, "404 Not Found", cseq);
+            return err;
+        default:
+            sendErrorResponse(sessionID, "403 Forbidden", cseq);
+            return err;
+    }
+
+    mClientInfo.mPlaybackSessionID = playbackSessionID;
+    mClientInfo.mPlaybackSession = playbackSession;
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq, playbackSessionID);
+
+    if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) {
+        response.append(
+                AStringPrintf(
+                    "Transport: RTP/AVP/TCP;interleaved=%d-%d;",
+                    clientRtp, clientRtcp));
+    } else {
+        int32_t serverRtp = playbackSession->getRTPPort();
+
+        AString transportString = "UDP";
+        if (rtpMode == RTPSender::TRANSPORT_TCP) {
+            transportString = "TCP";
+        }
+
+        if (clientRtcp >= 0) {
+            response.append(
+                    AStringPrintf(
+                        "Transport: RTP/AVP/%s;unicast;client_port=%d-%d;"
+                        "server_port=%d-%d\r\n",
+                        transportString.c_str(),
+                        clientRtp, clientRtcp, serverRtp, serverRtp + 1));
+        } else {
+            response.append(
+                    AStringPrintf(
+                        "Transport: RTP/AVP/%s;unicast;client_port=%d;"
+                        "server_port=%d\r\n",
+                        transportString.c_str(),
+                        clientRtp, serverRtp));
+        }
+    }
+
+    response.append("\r\n");
+
+    err = mNetSession->sendRequest(sessionID, response.c_str());
+
+    if (err != OK) {
+        return err;
+    }
+
+    mState = AWAITING_CLIENT_PLAY;
+
+    scheduleReaper();
+    scheduleKeepAlive(sessionID);
+
+    return OK;
+}
+
+status_t WifiDisplaySource::onPlayRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    int32_t playbackSessionID;
+    sp<PlaybackSession> playbackSession =
+        findPlaybackSession(data, &playbackSessionID);
+
+    if (playbackSession == NULL) {
+        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    if (mState != AWAITING_CLIENT_PLAY
+     && mState != PAUSED_TO_PLAYING
+     && mState != PAUSED) {
+        ALOGW("Received PLAY request but we're in state %d", mState);
+
+        sendErrorResponse(
+                sessionID, "455 Method Not Valid in This State", cseq);
+
+        return INVALID_OPERATION;
+    }
+
+    ALOGI("Received PLAY request.");
+    if (mPlaybackSessionEstablished) {
+        finishPlay();
+    } else {
+        ALOGI("deferring PLAY request until session established.");
+    }
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq, playbackSessionID);
+    response.append("Range: npt=now-\r\n");
+    response.append("\r\n");
+
+    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+
+    if (err != OK) {
+        return err;
+    }
+
+    if (mState == PAUSED_TO_PLAYING || mPlaybackSessionEstablished) {
+        mState = PLAYING;
+        return OK;
+    }
+
+    CHECK_EQ(mState, AWAITING_CLIENT_PLAY);
+    mState = ABOUT_TO_PLAY;
+
+    return OK;
+}
+
+void WifiDisplaySource::finishPlay() {
+    const sp<PlaybackSession> &playbackSession =
+        mClientInfo.mPlaybackSession;
+
+    status_t err = playbackSession->play();
+    CHECK_EQ(err, (status_t)OK);
+}
+
+status_t WifiDisplaySource::onPauseRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    int32_t playbackSessionID;
+    sp<PlaybackSession> playbackSession =
+        findPlaybackSession(data, &playbackSessionID);
+
+    if (playbackSession == NULL) {
+        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    ALOGI("Received PAUSE request.");
+
+    if (mState != PLAYING_TO_PAUSED && mState != PLAYING) {
+        return INVALID_OPERATION;
+    }
+
+    status_t err = playbackSession->pause();
+    CHECK_EQ(err, (status_t)OK);
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq, playbackSessionID);
+    response.append("\r\n");
+
+    err = mNetSession->sendRequest(sessionID, response.c_str());
+
+    if (err != OK) {
+        return err;
+    }
+
+    mState = PAUSED;
+
+    return err;
+}
+
+status_t WifiDisplaySource::onTeardownRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    ALOGI("Received TEARDOWN request.");
+
+    int32_t playbackSessionID;
+    sp<PlaybackSession> playbackSession =
+        findPlaybackSession(data, &playbackSessionID);
+
+    if (playbackSession == NULL) {
+        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq, playbackSessionID);
+    response.append("Connection: close\r\n");
+    response.append("\r\n");
+
+    mNetSession->sendRequest(sessionID, response.c_str());
+
+    if (mState == AWAITING_CLIENT_TEARDOWN) {
+        CHECK(mStopReplyID != NULL);
+        finishStop();
+    } else {
+        mClient->onDisplayError(IRemoteDisplayClient::kDisplayErrorUnknown);
+    }
+
+    return OK;
+}
+
+void WifiDisplaySource::finishStop() {
+    ALOGV("finishStop");
+
+    mState = STOPPING;
+
+    disconnectClientAsync();
+}
+
+void WifiDisplaySource::finishStopAfterDisconnectingClient() {
+    ALOGV("finishStopAfterDisconnectingClient");
+
+    if (mHDCP != NULL) {
+        ALOGI("Initiating HDCP shutdown.");
+        mHDCP->shutdownAsync();
+        return;
+    }
+
+    finishStop2();
+}
+
+void WifiDisplaySource::finishStop2() {
+    ALOGV("finishStop2");
+
+    if (mHDCP != NULL) {
+        mHDCP->setObserver(NULL);
+        mHDCPObserver.clear();
+        mHDCP.clear();
+    }
+
+    if (mSessionID != 0) {
+        mNetSession->destroySession(mSessionID);
+        mSessionID = 0;
+    }
+
+    ALOGI("We're stopped.");
+    mState = STOPPED;
+
+    status_t err = OK;
+
+    sp<AMessage> response = new AMessage;
+    response->setInt32("err", err);
+    response->postReply(mStopReplyID);
+}
+
+status_t WifiDisplaySource::onGetParameterRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    int32_t playbackSessionID;
+    sp<PlaybackSession> playbackSession =
+        findPlaybackSession(data, &playbackSessionID);
+
+    if (playbackSession == NULL) {
+        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    playbackSession->updateLiveness();
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq, playbackSessionID);
+    response.append("\r\n");
+
+    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+    return err;
+}
+
+status_t WifiDisplaySource::onSetParameterRequest(
+        int32_t sessionID,
+        int32_t cseq,
+        const sp<ParsedMessage> &data) {
+    int32_t playbackSessionID;
+    sp<PlaybackSession> playbackSession =
+        findPlaybackSession(data, &playbackSessionID);
+
+    if (playbackSession == NULL) {
+        sendErrorResponse(sessionID, "454 Session Not Found", cseq);
+        return ERROR_MALFORMED;
+    }
+
+    if (strstr(data->getContent(), "wfd_idr_request\r\n")) {
+        playbackSession->requestIDRFrame();
+    }
+
+    playbackSession->updateLiveness();
+
+    AString response = "RTSP/1.0 200 OK\r\n";
+    AppendCommonResponse(&response, cseq, playbackSessionID);
+    response.append("\r\n");
+
+    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
+    return err;
+}
+
+// static
+void WifiDisplaySource::AppendCommonResponse(
+        AString *response, int32_t cseq, int32_t playbackSessionID) {
+    time_t now = time(NULL);
+    struct tm *now2 = gmtime(&now);
+    char buf[128];
+    strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
+
+    response->append("Date: ");
+    response->append(buf);
+    response->append("\r\n");
+
+    response->append(AStringPrintf("Server: %s\r\n", sUserAgent.c_str()));
+
+    if (cseq >= 0) {
+        response->append(AStringPrintf("CSeq: %d\r\n", cseq));
+    }
+
+    if (playbackSessionID >= 0ll) {
+        response->append(
+                AStringPrintf(
+                    "Session: %d;timeout=%lld\r\n",
+                    playbackSessionID, kPlaybackSessionTimeoutSecs));
+    }
+}
+
+void WifiDisplaySource::sendErrorResponse(
+        int32_t sessionID,
+        const char *errorDetail,
+        int32_t cseq) {
+    AString response;
+    response.append("RTSP/1.0 ");
+    response.append(errorDetail);
+    response.append("\r\n");
+
+    AppendCommonResponse(&response, cseq);
+
+    response.append("\r\n");
+
+    mNetSession->sendRequest(sessionID, response.c_str());
+}
+
+int32_t WifiDisplaySource::makeUniquePlaybackSessionID() const {
+    return rand();
+}
+
+sp<WifiDisplaySource::PlaybackSession> WifiDisplaySource::findPlaybackSession(
+        const sp<ParsedMessage> &data, int32_t *playbackSessionID) const {
+    if (!data->findInt32("session", playbackSessionID)) {
+        // XXX the older dongles do not always include a "Session:" header.
+        *playbackSessionID = mClientInfo.mPlaybackSessionID;
+        return mClientInfo.mPlaybackSession;
+    }
+
+    if (*playbackSessionID != mClientInfo.mPlaybackSessionID) {
+        return NULL;
+    }
+
+    return mClientInfo.mPlaybackSession;
+}
+
+void WifiDisplaySource::disconnectClientAsync() {
+    ALOGV("disconnectClient");
+
+    if (mClientInfo.mPlaybackSession == NULL) {
+        disconnectClient2();
+        return;
+    }
+
+    if (mClientInfo.mPlaybackSession != NULL) {
+        ALOGV("Destroying PlaybackSession");
+        mClientInfo.mPlaybackSession->destroyAsync();
+    }
+}
+
+void WifiDisplaySource::disconnectClient2() {
+    ALOGV("disconnectClient2");
+
+    if (mClientInfo.mPlaybackSession != NULL) {
+        looper()->unregisterHandler(mClientInfo.mPlaybackSession->id());
+        mClientInfo.mPlaybackSession.clear();
+    }
+
+    if (mClientSessionID != 0) {
+        mNetSession->destroySession(mClientSessionID);
+        mClientSessionID = 0;
+    }
+
+    mClient->onDisplayDisconnected();
+
+    finishStopAfterDisconnectingClient();
+}
+
+struct WifiDisplaySource::HDCPObserver : public BnHDCPObserver {
+    explicit HDCPObserver(const sp<AMessage> &notify);
+
+    virtual void notify(
+            int msg, int ext1, int ext2, const Parcel *obj);
+
+private:
+    sp<AMessage> mNotify;
+
+    DISALLOW_EVIL_CONSTRUCTORS(HDCPObserver);
+};
+
+WifiDisplaySource::HDCPObserver::HDCPObserver(
+        const sp<AMessage> &notify)
+    : mNotify(notify) {
+}
+
+void WifiDisplaySource::HDCPObserver::notify(
+        int msg, int ext1, int ext2, const Parcel * /* obj */) {
+    sp<AMessage> notify = mNotify->dup();
+    notify->setInt32("msg", msg);
+    notify->setInt32("ext1", ext1);
+    notify->setInt32("ext2", ext2);
+    notify->post();
+}
+
+status_t WifiDisplaySource::makeHDCP() {
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder = sm->getService(String16("media.player"));
+
+    sp<IMediaPlayerService> service =
+        interface_cast<IMediaPlayerService>(binder);
+
+    CHECK(service != NULL);
+
+    mHDCP = service->makeHDCP(true /* createEncryptionModule */);
+
+    if (mHDCP == NULL) {
+        return ERROR_UNSUPPORTED;
+    }
+
+    sp<AMessage> notify = new AMessage(kWhatHDCPNotify, this);
+    mHDCPObserver = new HDCPObserver(notify);
+
+    status_t err = mHDCP->setObserver(mHDCPObserver);
+
+    if (err != OK) {
+        ALOGE("Failed to set HDCP observer.");
+
+        mHDCPObserver.clear();
+        mHDCP.clear();
+
+        return err;
+    }
+
+    ALOGI("Initiating HDCP negotiation w/ host %s:%d",
+            mClientInfo.mRemoteIP.c_str(), mHDCPPort);
+
+    err = mHDCP->initAsync(mClientInfo.mRemoteIP.c_str(), mHDCPPort);
+
+    if (err != OK) {
+        return err;
+    }
+
+    return OK;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
new file mode 100644
index 0000000..c25a675
--- /dev/null
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -0,0 +1,278 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WIFI_DISPLAY_SOURCE_H_
+
+#define WIFI_DISPLAY_SOURCE_H_
+
+#include "VideoFormats.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/ANetworkSession.h>
+
+#include <netinet/in.h>
+
+#include <utils/String16.h>
+
+namespace android {
+
+struct AReplyToken;
+struct IHDCP;
+class IRemoteDisplayClient;
+struct ParsedMessage;
+
+// Represents the RTSP server acting as a wifi display source.
+// Manages incoming connections, sets up Playback sessions as necessary.
+struct WifiDisplaySource : public AHandler {
+    static const unsigned kWifiDisplayDefaultPort = 7236;
+
+    WifiDisplaySource(
+            const String16 &opPackageName,
+            const sp<ANetworkSession> &netSession,
+            const sp<IRemoteDisplayClient> &client,
+            const char *path = NULL);
+
+    status_t start(const char *iface);
+    status_t stop();
+
+    status_t pause();
+    status_t resume();
+
+protected:
+    virtual ~WifiDisplaySource();
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+    struct PlaybackSession;
+    struct HDCPObserver;
+
+    enum State {
+        INITIALIZED,
+        AWAITING_CLIENT_CONNECTION,
+        AWAITING_CLIENT_SETUP,
+        AWAITING_CLIENT_PLAY,
+        ABOUT_TO_PLAY,
+        PLAYING,
+        PLAYING_TO_PAUSED,
+        PAUSED,
+        PAUSED_TO_PLAYING,
+        AWAITING_CLIENT_TEARDOWN,
+        STOPPING,
+        STOPPED,
+    };
+
+    enum {
+        kWhatStart,
+        kWhatRTSPNotify,
+        kWhatStop,
+        kWhatPause,
+        kWhatResume,
+        kWhatReapDeadClients,
+        kWhatPlaybackSessionNotify,
+        kWhatKeepAlive,
+        kWhatHDCPNotify,
+        kWhatFinishStop2,
+        kWhatTeardownTriggerTimedOut,
+    };
+
+    struct ResponseID {
+        int32_t mSessionID;
+        int32_t mCSeq;
+
+        bool operator<(const ResponseID &other) const {
+            return mSessionID < other.mSessionID
+                || (mSessionID == other.mSessionID
+                        && mCSeq < other.mCSeq);
+        }
+    };
+
+    typedef status_t (WifiDisplaySource::*HandleRTSPResponseFunc)(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
+    static const int64_t kReaperIntervalUs = 1000000ll;
+
+    // We request that the dongle send us a "TEARDOWN" in order to
+    // perform an orderly shutdown. We're willing to wait up to 2 secs
+    // for this message to arrive, after that we'll force a disconnect
+    // instead.
+    static const int64_t kTeardownTriggerTimeouSecs = 2;
+
+    static const int64_t kPlaybackSessionTimeoutSecs = 30;
+
+    static const int64_t kPlaybackSessionTimeoutUs =
+        kPlaybackSessionTimeoutSecs * 1000000ll;
+
+    static const AString sUserAgent;
+
+    String16 mOpPackageName;
+
+    State mState;
+    VideoFormats mSupportedSourceVideoFormats;
+    sp<ANetworkSession> mNetSession;
+    sp<IRemoteDisplayClient> mClient;
+    AString mMediaPath;
+    struct in_addr mInterfaceAddr;
+    int32_t mSessionID;
+
+    sp<AReplyToken> mStopReplyID;
+
+    AString mWfdClientRtpPorts;
+    int32_t mChosenRTPPort;  // extracted from "wfd_client_rtp_ports"
+
+    bool mSinkSupportsVideo;
+    VideoFormats mSupportedSinkVideoFormats;
+
+    VideoFormats::ResolutionType mChosenVideoResolutionType;
+    size_t mChosenVideoResolutionIndex;
+    VideoFormats::ProfileType mChosenVideoProfile;
+    VideoFormats::LevelType mChosenVideoLevel;
+
+    bool mSinkSupportsAudio;
+
+    bool mUsingPCMAudio;
+    int32_t mClientSessionID;
+
+    struct ClientInfo {
+        AString mRemoteIP;
+        AString mLocalIP;
+        int32_t mLocalPort;
+        int32_t mPlaybackSessionID;
+        sp<PlaybackSession> mPlaybackSession;
+    };
+    ClientInfo mClientInfo;
+
+    bool mReaperPending;
+
+    int32_t mNextCSeq;
+
+    KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
+
+    // HDCP specific section >>>>
+    bool mUsingHDCP;
+    bool mIsHDCP2_0;
+    int32_t mHDCPPort;
+    sp<IHDCP> mHDCP;
+    sp<HDCPObserver> mHDCPObserver;
+
+    bool mHDCPInitializationComplete;
+    bool mSetupTriggerDeferred;
+
+    bool mPlaybackSessionEstablished;
+
+    status_t makeHDCP();
+    // <<<< HDCP specific section
+
+    status_t sendM1(int32_t sessionID);
+    status_t sendM3(int32_t sessionID);
+    status_t sendM4(int32_t sessionID);
+
+    enum TriggerType {
+        TRIGGER_SETUP,
+        TRIGGER_TEARDOWN,
+        TRIGGER_PAUSE,
+        TRIGGER_PLAY,
+    };
+
+    // M5
+    status_t sendTrigger(int32_t sessionID, TriggerType triggerType);
+
+    status_t sendM16(int32_t sessionID);
+
+    status_t onReceiveM1Response(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
+    status_t onReceiveM3Response(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
+    status_t onReceiveM4Response(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
+    status_t onReceiveM5Response(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
+    status_t onReceiveM16Response(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
+    void registerResponseHandler(
+            int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
+
+    status_t onReceiveClientData(const sp<AMessage> &msg);
+
+    status_t onOptionsRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    status_t onSetupRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    status_t onPlayRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    status_t onPauseRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    status_t onTeardownRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    status_t onGetParameterRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    status_t onSetParameterRequest(
+            int32_t sessionID,
+            int32_t cseq,
+            const sp<ParsedMessage> &data);
+
+    void sendErrorResponse(
+            int32_t sessionID,
+            const char *errorDetail,
+            int32_t cseq);
+
+    static void AppendCommonResponse(
+            AString *response, int32_t cseq, int32_t playbackSessionID = -1ll);
+
+    void scheduleReaper();
+    void scheduleKeepAlive(int32_t sessionID);
+
+    int32_t makeUniquePlaybackSessionID() const;
+
+    sp<PlaybackSession> findPlaybackSession(
+            const sp<ParsedMessage> &data, int32_t *playbackSessionID) const;
+
+    void finishStop();
+    void disconnectClientAsync();
+    void disconnectClient2();
+    void finishStopAfterDisconnectingClient();
+    void finishStop2();
+
+    void finishPlay();
+
+    DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource);
+};
+
+}  // namespace android
+
+#endif  // WIFI_DISPLAY_SOURCE_H_
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index 9d7c57d..16c8af8 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -138,7 +138,12 @@
 
   public class Variant {
     ctor public Variant();
+    method public java.util.List<media.codecs.Alias> getAlias_optional();
+    method public java.util.List<media.codecs.Quirk> getAttribute_optional();
+    method public java.util.List<media.codecs.Feature> getFeature_optional();
+    method public java.util.List<media.codecs.Limit> getLimit_optional();
     method public String getName();
+    method public java.util.List<media.codecs.Quirk> getQuirk_optional();
     method public void setName(String);
   }
 
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index 63ec5d0..3b5681f 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -107,6 +107,13 @@
         <xs:attribute name="value" type="xs:string"/>
     </xs:complexType>
     <xs:complexType name="Variant">
+        <xs:choice minOccurs="0" maxOccurs="unbounded">
+            <xs:element name="Quirk" type="Quirk"/>
+            <xs:element name="Attribute" type="Quirk"/>
+            <xs:element name="Alias" type="Alias"/>
+            <xs:element name="Limit" type="Limit"/>
+            <xs:element name="Feature" type="Feature"/>
+        </xs:choice>
         <xs:attribute name="name" type="xs:string"/>
     </xs:complexType>
     <xs:complexType name="Setting">
diff --git a/media/libwatchdog/Android.bp b/media/libwatchdog/Android.bp
new file mode 100644
index 0000000..2bdf172
--- /dev/null
+++ b/media/libwatchdog/Android.bp
@@ -0,0 +1,35 @@
+// Copyright 2020 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_library {
+    name: "libwatchdog",
+    srcs: [
+        "Watchdog.cpp",
+    ],
+    export_include_dirs: ["include"],
+    shared_libs: [
+        "liblog",
+    ],
+    static_libs: [
+        "libbase",
+    ],
+    target: {
+        windows: {
+            enabled: false,
+        },
+        darwin: {
+            enabled: false,
+        },
+    },
+}
diff --git a/media/libwatchdog/Watchdog.cpp b/media/libwatchdog/Watchdog.cpp
new file mode 100644
index 0000000..bb012b9
--- /dev/null
+++ b/media/libwatchdog/Watchdog.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Watchdog"
+
+#include <watchdog/Watchdog.h>
+
+#include <android-base/logging.h>
+#include <android-base/threads.h>
+#include <signal.h>
+#include <time.h>
+#include <cstring>
+#include <utils/Log.h>
+
+namespace android {
+
+Watchdog::Watchdog(::std::chrono::steady_clock::duration timeout) {
+    // Create the timer.
+    struct sigevent sev;
+    sev.sigev_notify = SIGEV_THREAD_ID;
+    sev.sigev_notify_thread_id = base::GetThreadId();
+    sev.sigev_signo = SIGABRT;
+    sev.sigev_value.sival_ptr = &mTimerId;
+    int err = timer_create(CLOCK_MONOTONIC, &sev, &mTimerId);
+    if (err != 0) {
+        PLOG(FATAL) << "Failed to create timer";
+    }
+
+    // Start the timer.
+    struct itimerspec spec;
+    memset(&spec, 0, sizeof(spec));
+    auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(timeout);
+    LOG_ALWAYS_FATAL_IF(timeout.count() <= 0, "Duration must be positive");
+    spec.it_value.tv_sec = ns.count() / 1000000000;
+    spec.it_value.tv_nsec = ns.count() % 1000000000;
+    err = timer_settime(mTimerId, 0, &spec, nullptr);
+    if (err != 0) {
+        PLOG(FATAL) << "Failed to start timer";
+    }
+}
+
+Watchdog::~Watchdog() {
+    // Delete the timer.
+    int err = timer_delete(mTimerId);
+    if (err != 0) {
+        PLOG(FATAL) << "Failed to delete timer";
+    }
+}
+
+}  // namespace android
diff --git a/media/libwatchdog/include/watchdog/Watchdog.h b/media/libwatchdog/include/watchdog/Watchdog.h
new file mode 100644
index 0000000..2819f8a
--- /dev/null
+++ b/media/libwatchdog/include/watchdog/Watchdog.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_WATCHDOG_H
+#define ANDROID_WATCHDOG_H
+
+#include <chrono>
+#include <time.h>
+
+namespace android {
+
+/*
+ * An RAII-style object, which would crash the process if a timeout expires
+ * before the object is destroyed.
+ * The calling thread would be sent a SIGABORT, which would typically result in
+ * a stack trace.
+ *
+ * Sample usage:
+ * {
+ *     Watchdog watchdog(std::chrono::milliseconds(10));
+ *     DoSomething();
+ * }
+ * // If we got here, the function completed in time.
+ */
+class Watchdog final {
+public:
+    Watchdog(std::chrono::steady_clock::duration timeout);
+    ~Watchdog();
+
+private:
+    timer_t mTimerId;
+};
+
+}  // namespace android
+
+#endif  // ANDROID_WATCHDOG_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index ca8cb78..dd69496 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -44,6 +44,7 @@
 #include "MtpStringBuffer.h"
 
 namespace android {
+static const int SN_EVENT_LOG_ID = 0x534e4554;
 
 static const MtpOperationCode kSupportedOperationCodes[] = {
     MTP_OPERATION_GET_DEVICE_INFO,
@@ -961,6 +962,17 @@
     if (!parseDateTime(modified, modifiedTime))
         modifiedTime = 0;
 
+    if ((strcmp(name, ".") == 0) || (strcmp(name, "..") == 0) ||
+        (strchr(name, '/') != NULL)) {
+        char errMsg[80];
+
+        snprintf(errMsg, sizeof(errMsg), "Invalid name: %s", (const char *) name);
+        ALOGE("%s (b/130656917)", errMsg);
+        android_errorWriteWithInfoLog(SN_EVENT_LOG_ID, "130656917", -1, errMsg,
+                                      strlen(errMsg));
+
+        return MTP_RESPONSE_INVALID_PARAMETER;
+    }
     if (path[path.size() - 1] != '/')
         path.append("/");
     path.append(name);
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index a3cabd8..9c7a630 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -110,10 +110,6 @@
         symbol_file: "libmediandk.map.txt",
         versions: ["29"],
     },
-
-    // Bug: http://b/124522995 libmediandk has linker errors when built with
-    // coverage
-    native_coverage: false,
 }
 
 llndk_library {
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 1883f63..1145b7b 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -35,6 +35,7 @@
         int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes) :
         mReader(reader), mFormat(format), mUsage(usage), mBuffer(buffer), mLockedBuffer(nullptr),
         mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
+    LOG_FATAL_IF(reader == nullptr, "AImageReader shouldn't be null while creating AImage");
 }
 
 AImage::~AImage() {
@@ -57,14 +58,9 @@
     if (mIsClosed) {
         return;
     }
-    sp<AImageReader> reader = mReader.promote();
-    if (reader != nullptr) {
-        reader->releaseImageLocked(this, releaseFenceFd);
-    } else if (mBuffer != nullptr) {
-        LOG_ALWAYS_FATAL("%s: parent AImageReader closed without releasing image %p",
-                __FUNCTION__, this);
+    if (!mReader->mIsClosed) {
+        mReader->releaseImageLocked(this, releaseFenceFd);
     }
-
     // Should have been set to nullptr in releaseImageLocked
     // Set to nullptr here for extra safety only
     mBuffer = nullptr;
@@ -83,22 +79,12 @@
 
 void
 AImage::lockReader() const {
-    sp<AImageReader> reader = mReader.promote();
-    if (reader == nullptr) {
-        // Reader has been closed
-        return;
-    }
-    reader->mLock.lock();
+    mReader->mLock.lock();
 }
 
 void
 AImage::unlockReader() const {
-    sp<AImageReader> reader = mReader.promote();
-    if (reader == nullptr) {
-        // Reader has been closed
-        return;
-    }
-    reader->mLock.unlock();
+    mReader->mLock.unlock();
 }
 
 media_status_t
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index e0f16da..0e8cbcb 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -72,7 +72,7 @@
     uint32_t getJpegSize() const;
 
     // When reader is close, AImage will only accept close API call
-    wp<AImageReader>           mReader;
+    const sp<AImageReader>     mReader;
     const int32_t              mFormat;
     const uint64_t             mUsage;  // AHARDWAREBUFFER_USAGE_* flags.
     BufferItem*                mBuffer;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index baa4fc7..c0ceb3d 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -113,12 +113,12 @@
 
 void
 AImageReader::FrameListener::onFrameAvailable(const BufferItem& /*item*/) {
-    Mutex::Autolock _l(mLock);
     sp<AImageReader> reader = mReader.promote();
     if (reader == nullptr) {
         ALOGW("A frame is available after AImageReader closed!");
         return; // reader has been closed
     }
+    Mutex::Autolock _l(mLock);
     if (mListener.onImageAvailable == nullptr) {
         return; // No callback registered
     }
@@ -143,12 +143,12 @@
 
 void
 AImageReader::BufferRemovedListener::onBufferFreed(const wp<GraphicBuffer>& graphicBuffer) {
-    Mutex::Autolock _l(mLock);
     sp<AImageReader> reader = mReader.promote();
     if (reader == nullptr) {
         ALOGW("A frame is available after AImageReader closed!");
         return; // reader has been closed
     }
+    Mutex::Autolock _l(mLock);
     if (mListener.onBufferRemoved == nullptr) {
         return; // No callback registered
     }
@@ -272,6 +272,11 @@
       mFrameListener(new FrameListener(this)),
       mBufferRemovedListener(new BufferRemovedListener(this)) {}
 
+AImageReader::~AImageReader() {
+    Mutex::Autolock _l(mLock);
+    LOG_FATAL_IF("AImageReader not closed before destruction", mIsClosed != true);
+}
+
 media_status_t
 AImageReader::init() {
     PublicFormat publicFormat = static_cast<PublicFormat>(mFormat);
@@ -347,8 +352,12 @@
     return AMEDIA_OK;
 }
 
-AImageReader::~AImageReader() {
+void AImageReader::close() {
     Mutex::Autolock _l(mLock);
+    if (mIsClosed) {
+        return;
+    }
+    mIsClosed = true;
     AImageReader_ImageListener nullListener = {nullptr, nullptr};
     setImageListenerLocked(&nullListener);
 
@@ -741,6 +750,7 @@
 void AImageReader_delete(AImageReader* reader) {
     ALOGV("%s", __FUNCTION__);
     if (reader != nullptr) {
+        reader->close();
         reader->decStrong((void*) AImageReader_delete);
     }
     return;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index e328cb1..0779a71 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -76,6 +76,7 @@
     int32_t        getHeight()    const { return mHeight; };
     int32_t        getFormat()    const { return mFormat; };
     int32_t        getMaxImages() const { return mMaxImages; };
+    void           close();
 
   private:
 
@@ -134,7 +135,7 @@
 
       private:
         AImageReader_ImageListener mListener = {nullptr, nullptr};
-        wp<AImageReader>           mReader;
+        const wp<AImageReader>     mReader;
         Mutex                      mLock;
     };
     sp<FrameListener> mFrameListener;
@@ -149,7 +150,7 @@
 
        private:
         AImageReader_BufferRemovedListener mListener = {nullptr, nullptr};
-        wp<AImageReader>           mReader;
+        const wp<AImageReader>     mReader;
         Mutex                      mLock;
     };
     sp<BufferRemovedListener> mBufferRemovedListener;
@@ -165,6 +166,7 @@
     native_handle_t*           mWindowHandle = nullptr;
 
     List<AImage*>              mAcquiredImages;
+    bool                       mIsClosed = false;
 
     Mutex                      mLock;
 };
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index c23f19b..e041533 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -221,7 +221,13 @@
                          break;
                      }
 
-                     AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(&format);
+                     // Here format is MediaCodec's internal copy of output format.
+                     // Make a copy since the client might modify it.
+                     sp<AMessage> copy;
+                     if (format != nullptr) {
+                         copy = format->dup();
+                     }
+                     AMediaFormat *aMediaFormat = AMediaFormat_fromMsg(&copy);
 
                      Mutex::Autolock _l(mCodec->mAsyncCallbackLock);
                      if (mCodec->mAsyncCallbackUserData != NULL
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 85dbffe..cd5a23a 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -89,7 +89,7 @@
 };
 
 void DrmListener::notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) {
-    if (!mEventListener && !mExpirationUpdateListener && !mKeysChangeListener) {
+    if (!mEventListener || !mExpirationUpdateListener || !mKeysChangeListener) {
         ALOGE("No listeners are specified");
         return;
     }
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 3adb40f..e2cd4e3 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -27,6 +27,7 @@
     ],
     shared_libs: [
         "libbinder",
+        "libcutils",
         "liblog",
         "libutils",
         "libmemunreachable",
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index b824212..bc8fff6 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -63,7 +63,10 @@
         uid_t uid, bool start) {
     // Okay to not track in app ops as audio server is us and if
     // device is rooted security model is considered compromised.
-    if (isAudioServerOrRootUid(uid)) return true;
+    // system_server loses its RECORD_AUDIO permission when a secondary
+    // user is active, but it is a core system service so let it through.
+    // TODO(b/141210120): UserManager.DISALLOW_RECORD_AUDIO should not affect system user 0
+    if (isAudioServerOrSystemServerOrRootUid(uid)) return true;
 
     // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
     // may open a record track on behalf of a client.  Note that pid may be a tid.
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 59cf4ef..f16776f 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -82,10 +82,10 @@
         if (waitTimeNs > 0) {
             status = mCond.waitRelative(mMutex, waitTimeNs);
         }
-    }
-    if (status != NO_ERROR) {
-        LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag);
-        LOG_ALWAYS_FATAL("TimeCheck timeout for %s", tag);
+        if (status != NO_ERROR) {
+            LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag);
+            LOG_ALWAYS_FATAL("TimeCheck timeout for %s", tag);
+        }
     }
     return true;
 }
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 2a6e609..e1089d5 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -58,6 +58,12 @@
     return multiuser_get_app_id(uid) == AID_SYSTEM || uid == AID_AUDIOSERVER;
 }
 
+// used for calls that should come from system_server or audio_server and
+// include AID_ROOT for command-line tests.
+static inline bool isAudioServerOrSystemServerOrRootUid(uid_t uid) {
+    return multiuser_get_app_id(uid) == AID_SYSTEM || uid == AID_AUDIOSERVER || uid == AID_ROOT;
+}
+
 // Mediaserver may forward the client PID and UID as part of a binder interface call;
 // otherwise the calling UID must be equal to the client UID.
 static inline bool isAudioServerOrMediaServerUid(uid_t uid) {
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index d38190d..8bbdc69 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -810,7 +810,33 @@
                     continue;
                 }
 
-                size_t frameCount = std::lcm(thread->frameCount(), secondaryThread->frameCount());
+                size_t sourceFrameCount = thread->frameCount() * output.sampleRate
+                                          / thread->sampleRate();
+                size_t sinkFrameCount = secondaryThread->frameCount() * output.sampleRate
+                                          / secondaryThread->sampleRate();
+                // If the secondary output has just been opened, the first secondaryThread write
+                // will not block as it will fill the empty startup buffer of the HAL,
+                // so a second sink buffer needs to be ready for the immediate next blocking write.
+                // Additionally, have a margin of one main thread buffer as the scheduling jitter
+                // can reorder the writes (eg if thread A&B have the same write intervale,
+                // the scheduler could schedule AB...BA)
+                size_t frameCountToBeReady = 2 * sinkFrameCount + sourceFrameCount;
+                // Total secondary output buffer must be at least as the read frames plus
+                // the margin of a few buffers on both sides in case the
+                // threads scheduling has some jitter.
+                // That value should not impact latency as the secondary track is started before
+                // its buffer is full, see frameCountToBeReady.
+                size_t frameCount = frameCountToBeReady + 2 * (sourceFrameCount + sinkFrameCount);
+                // The frameCount should also not be smaller than the secondary thread min frame
+                // count
+                size_t minFrameCount = AudioSystem::calculateMinFrameCount(
+                            [&] { Mutex::Autolock _l(secondaryThread->mLock);
+                                  return secondaryThread->latency_l(); }(),
+                            secondaryThread->mNormalFrameCount,
+                            secondaryThread->mSampleRate,
+                            output.sampleRate,
+                            input.speed);
+                frameCount = std::max(frameCount, minFrameCount);
 
                 using namespace std::chrono_literals;
                 auto inChannelMask = audio_channel_mask_out_to_in(input.config.channel_mask);
@@ -843,7 +869,8 @@
                                                                patchRecord->buffer(),
                                                                patchRecord->bufferSize(),
                                                                outputFlags,
-                                                               0ns /* timeout */);
+                                                               0ns /* timeout */,
+                                                               frameCountToBeReady);
                 status = patchTrack->initCheck();
                 if (status != NO_ERROR) {
                     ALOGE("Secondary output patchTrack init failed: %d", status);
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 3c4fbba..13152d0 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -24,6 +24,7 @@
 #include "Configuration.h"
 #include <utils/Log.h>
 #include <system/audio_effects/effect_aec.h>
+#include <system/audio_effects/effect_dynamicsprocessing.h>
 #include <system/audio_effects/effect_ns.h>
 #include <system/audio_effects/effect_visualizer.h>
 #include <audio_utils/channels.h>
@@ -2569,7 +2570,8 @@
     if ((mSessionId == AUDIO_SESSION_OUTPUT_MIX) &&
         (((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) ||
          (memcmp(&desc.type, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) ||
-         (memcmp(&desc.type, SL_IID_VOLUME, sizeof(effect_uuid_t)) == 0))) {
+         (memcmp(&desc.type, SL_IID_VOLUME, sizeof(effect_uuid_t)) == 0) ||
+         (memcmp(&desc.type, SL_IID_DYNAMICSPROCESSING, sizeof(effect_uuid_t)) == 0))) {
         return false;
     }
     return true;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index a093893..17adba5 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -74,7 +74,10 @@
                                 uid_t uid,
                                 audio_output_flags_t flags,
                                 track_type type,
-                                audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+                                audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+                                /** default behaviour is to start when there are as many frames
+                                  * ready as possible (aka. Buffer is full). */
+                                size_t frameCountToBeReady = SIZE_MAX);
     virtual             ~Track();
     virtual status_t    initCheck() const;
 
@@ -263,6 +266,8 @@
     };
     sp<AudioVibrationController> mAudioVibrationController;
     sp<os::ExternalVibration>    mExternalVibration;
+    /** How many frames should be in the buffer before the track is considered ready */
+    const size_t        mFrameCountToBeReady;
 
 private:
     void                interceptBuffer(const AudioBufferProvider::Buffer& buffer);
@@ -384,7 +389,11 @@
                                    void *buffer,
                                    size_t bufferSize,
                                    audio_output_flags_t flags,
-                                   const Timeout& timeout = {});
+                                   const Timeout& timeout = {},
+                                   size_t frameCountToBeReady = 1 /** Default behaviour is to start
+                                                                    *  as soon as possible to have
+                                                                    *  the lowest possible latency
+                                                                    *  even if it might glitch. */);
     virtual             ~PatchTrack();
 
     virtual status_t    start(AudioSystem::sync_event_t event =
@@ -402,5 +411,4 @@
 
 private:
             void restartIfDisabled();
-
 };  // end of PatchTrack
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1e33057..eb299f2 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3956,6 +3956,32 @@
     return INVALID_OPERATION;
 }
 
+// For dedicated VoIP outputs, let the HAL apply the stream volume. Track volume is
+// still applied by the mixer.
+// All tracks attached to a mixer with flag VOIP_RX are tied to the same
+// stream type STREAM_VOICE_CALL so this will only change the HAL volume once even
+// if more than one track are active
+status_t AudioFlinger::PlaybackThread::handleVoipVolume_l(float *volume)
+{
+    status_t result = NO_ERROR;
+    if ((mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
+        if (*volume != mLeftVolFloat) {
+            result = mOutput->stream->setVolume(*volume, *volume);
+            ALOGE_IF(result != OK,
+                     "Error when setting output stream volume: %d", result);
+            if (result == NO_ERROR) {
+                mLeftVolFloat = *volume;
+            }
+        }
+        // if stream volume was successfully sent to the HAL, mLeftVolFloat == v here and we
+        // remove stream volume contribution from software volume.
+        if (mLeftVolFloat == *volume) {
+            *volume = 1.0f;
+        }
+    }
+    return result;
+}
+
 status_t AudioFlinger::MixerThread::createAudioPatch_l(const struct audio_patch *patch,
                                                           audio_patch_handle_t *handle)
 {
@@ -4758,22 +4784,25 @@
                     // no acknowledgement required for newly active tracks
                 }
                 sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
+                float volume;
+                if (track->isPlaybackRestricted() || mStreamTypes[track->streamType()].mute) {
+                    volume = 0.f;
+                } else {
+                    volume = masterVolume * mStreamTypes[track->streamType()].volume;
+                }
+
+                handleVoipVolume_l(&volume);
+
                 // cache the combined master volume and stream type volume for fast mixer; this
                 // lacks any synchronization or barrier so VolumeProvider may read a stale value
                 const float vh = track->getVolumeHandler()->getVolume(
-                        proxy->framesReleased()).first;
-                float volume;
-                if (track->isPlaybackRestricted()) {
-                    volume = 0.f;
-                } else {
-                    volume = masterVolume
-                        * mStreamTypes[track->streamType()].volume
-                        * vh;
-                }
+                    proxy->framesReleased()).first;
+                volume *= vh;
                 track->mCachedVolume = volume;
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
                 float vlf = volume * float_from_gain(gain_minifloat_unpack_left(vlr));
                 float vrf = volume * float_from_gain(gain_minifloat_unpack_right(vlr));
+
                 track->setFinalVolume((vlf + vrf) / 2.f);
                 ++fastTracks;
             } else {
@@ -4916,20 +4945,22 @@
             uint32_t vl, vr;       // in U8.24 integer format
             float vlf, vrf, vaf;   // in [0.0, 1.0] float format
             // read original volumes with volume control
-            float typeVolume = mStreamTypes[track->streamType()].volume;
-            float v = masterVolume * typeVolume;
+            float v = masterVolume * mStreamTypes[track->streamType()].volume;
             // Always fetch volumeshaper volume to ensure state is updated.
             const sp<AudioTrackServerProxy> proxy = track->mAudioTrackServerProxy;
             const float vh = track->getVolumeHandler()->getVolume(
                     track->mAudioTrackServerProxy->framesReleased()).first;
 
-            if (track->isPausing() || mStreamTypes[track->streamType()].mute
-                    || track->isPlaybackRestricted()) {
+            if (mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
+                v = 0;
+            }
+
+            handleVoipVolume_l(&v);
+
+            if (track->isPausing()) {
                 vl = vr = 0;
                 vlf = vrf = vaf = 0.;
-                if (track->isPausing()) {
-                    track->setPaused();
-                }
+                track->setPaused();
             } else {
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
                 vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
@@ -4981,25 +5012,6 @@
                 track->mHasVolumeController = false;
             }
 
-            // For dedicated VoIP outputs, let the HAL apply the stream volume. Track volume is
-            // still applied by the mixer.
-            if ((mOutput->flags & AUDIO_OUTPUT_FLAG_VOIP_RX) != 0) {
-                v = mStreamTypes[track->streamType()].mute ? 0.0f : v;
-                if (v != mLeftVolFloat) {
-                    status_t result = mOutput->stream->setVolume(v, v);
-                    ALOGE_IF(result != OK, "Error when setting output stream volume: %d", result);
-                    if (result == OK) {
-                        mLeftVolFloat = v;
-                    }
-                }
-                // if stream volume was successfully sent to the HAL, mLeftVolFloat == v here and we
-                // remove stream volume contribution from software volume.
-                if (v != 0.0f && mLeftVolFloat == v) {
-                   vlf = min(1.0f, vlf / v);
-                   vrf = min(1.0f, vrf / v);
-                   vaf = min(1.0f, vaf / v);
-               }
-            }
             // XXX: these things DON'T need to be done each time
             mAudioMixer->setBufferProvider(trackId, track);
             mAudioMixer->enable(trackId);
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 30063b2..df3b9d1 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -747,6 +747,7 @@
                 // is safe to do so. That will drop the final ref count and destroy the tracks.
     virtual     mixer_state prepareTracks_l(Vector< sp<Track> > *tracksToRemove) = 0;
                 void        removeTracks_l(const Vector< sp<Track> >& tracksToRemove);
+                status_t    handleVoipVolume_l(float *volume);
 
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 2a5a713..51e57b5 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -511,7 +511,8 @@
             uid_t uid,
             audio_output_flags_t flags,
             track_type type,
-            audio_port_handle_t portId)
+            audio_port_handle_t portId,
+            size_t frameCountToBeReady)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   (sharedBuffer != 0) ? sharedBuffer->pointer() : buffer,
                   (sharedBuffer != 0) ? sharedBuffer->size() : bufferSize,
@@ -530,6 +531,7 @@
     mVolumeHandler(new media::VolumeHandler(sampleRate)),
     mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(uid, attr, id(), streamType)),
     // mSinkTimestamp
+    mFrameCountToBeReady(frameCountToBeReady),
     mFastIndex(-1),
     mCachedVolume(1.0),
     /* The track might not play immediately after being active, similarly as if its volume was 0.
@@ -837,7 +839,7 @@
     auto spent = ceil<std::chrono::microseconds>(std::chrono::steady_clock::now() - start);
     using namespace std::chrono_literals;
     // Average is ~20us per track, this should virtually never be logged (Logging takes >200us)
-    ALOGD_IF(spent > 200us, "%s: took %lldus to intercept %zu tracks", __func__,
+    ALOGD_IF(spent > 500us, "%s: took %lldus to intercept %zu tracks", __func__,
              spent.count(), mTeePatches.size());
 }
 
@@ -910,8 +912,12 @@
         return true;
     }
 
-    if (framesReady() >= mServerProxy->getBufferSizeInFrames() ||
-            (mCblk->mFlags & CBLK_FORCEREADY)) {
+    size_t bufferSizeInFrames = mServerProxy->getBufferSizeInFrames();
+    size_t framesToBeReady = std::min(mFrameCountToBeReady, bufferSizeInFrames);
+
+    if (framesReady() >= framesToBeReady || (mCblk->mFlags & CBLK_FORCEREADY)) {
+        ALOGV("%s(%d): consider track ready with %zu/%zu, target was %zu)",
+              __func__, mId, framesReady(), bufferSizeInFrames, framesToBeReady);
         mFillingUpStatus = FS_FILLED;
         android_atomic_and(~CBLK_FORCEREADY, &mCblk->mFlags);
         return true;
@@ -1413,6 +1419,7 @@
 
 void AudioFlinger::PlaybackThread::Track::disable()
 {
+    // TODO(b/142394888): the filling status should also be reset to filling
     signalClientFlag(CBLK_DISABLED);
 }
 
@@ -1790,12 +1797,14 @@
                                                      void *buffer,
                                                      size_t bufferSize,
                                                      audio_output_flags_t flags,
-                                                     const Timeout& timeout)
+                                                     const Timeout& timeout,
+                                                     size_t frameCountToBeReady)
     :   Track(playbackThread, NULL, streamType,
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
-              AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER, flags, TYPE_PATCH),
+              AUDIO_SESSION_NONE, getpid(), AID_AUDIOSERVER, flags, TYPE_PATCH,
+              AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
         PatchTrackBase(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true),
                        *playbackThread, timeout)
 {
@@ -1869,7 +1878,6 @@
 {
     mProxy->releaseBuffer(buffer);
     restartIfDisabled();
-    android_atomic_or(CBLK_FORCEREADY, &mCblk->mFlags);
 }
 
 void AudioFlinger::PlaybackThread::PatchTrack::restartIfDisabled()
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 605fc1c..6c64d6b 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -33,7 +33,7 @@
 
 // For mixed output and inputs, the policy will use max mixer sampling rates.
 // Do not limit sampling rate otherwise
-#define SAMPLE_RATE_HZ_MAX 192000
+#define SAMPLE_RATE_HZ_MAX 384000
 
 // Used when a client opens a capture stream, without specifying a desired sample rate.
 #define SAMPLE_RATE_HZ_DEFAULT 48000
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index cd54085..2309f71 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -287,6 +287,7 @@
 
     DeviceVector mDevices; /**< current devices this output is routed to */
     wp<AudioPolicyMix> mPolicyMix;  // non NULL when used by a dynamic policy
+    audio_io_handle_t mIoHandle;           // output handle
 
 protected:
     const sp<AudioPort> mPort;
@@ -387,7 +388,6 @@
     DeviceVector filterSupportedDevices(const DeviceVector &devices) const;
 
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
-    audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
     audio_output_flags_t mFlags;   //
     sp<SwAudioOutputDescriptor> mOutput1;    // used by duplicated outputs: first output
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 8a60cf2..96f685e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -36,7 +36,7 @@
 
 AudioOutputDescriptor::AudioOutputDescriptor(const sp<AudioPort>& port,
                                              AudioPolicyClientInterface *clientInterface)
-    : mPort(port), mClientInterface(clientInterface)
+    : mIoHandle(AUDIO_IO_HANDLE_NONE), mPort(port), mClientInterface(clientInterface)
 {
     if (mPort.get() != nullptr) {
         mPort->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -254,7 +254,7 @@
 SwAudioOutputDescriptor::SwAudioOutputDescriptor(const sp<IOProfile>& profile,
                                                  AudioPolicyClientInterface *clientInterface)
     : AudioOutputDescriptor(profile, clientInterface),
-    mProfile(profile), mIoHandle(AUDIO_IO_HANDLE_NONE), mLatency(0),
+    mProfile(profile), mLatency(0),
     mFlags((audio_output_flags_t)0),
     mOutput1(0), mOutput2(0), mDirectOpenCount(0),
     mDirectClientSession(AUDIO_SESSION_NONE)
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 96a8337..1f9b725 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -333,9 +333,10 @@
             if (encodedFormat != AUDIO_FORMAT_DEFAULT) {
                 moduleDevice->setEncodedFormat(encodedFormat);
             }
-            moduleDevice->setAddress(devAddress);
             if (allowToCreate) {
                 moduleDevice->attach(hwModule);
+                moduleDevice->setAddress(devAddress);
+                moduleDevice->setName(String8(name));
             }
             return moduleDevice;
         }
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index cedc78f..a511ad8 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -38,6 +38,11 @@
 
     audio_mode_t getPhoneState() const override { return mPhoneState; }
 
+    void setDpConnAndAllowedForVoice(bool connAndAllowed) override
+    {
+        mDpConnAndAllowedForVoice = connAndAllowed;
+    }
+
     status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) override
     {
         mForceUse[usage] = config;
@@ -106,6 +111,11 @@
         return is_state_in_call(getPhoneState());
     }
 
+    inline bool getDpConnAndAllowedForVoice() const
+    {
+        return mDpConnAndAllowedForVoice;
+    }
+
     VolumeSource toVolumeSource(audio_stream_type_t stream) const
     {
         return static_cast<VolumeSource>(getVolumeGroupForStreamType(stream));
@@ -121,6 +131,8 @@
     ProductStrategyMap mProductStrategies;
     VolumeGroupMap mVolumeGroups;
     audio_mode_t mPhoneState = AUDIO_MODE_NORMAL;  /**< current phone state. */
+    /* if display-port is connected and can be used for voip/voice */
+    bool mDpConnAndAllowedForVoice;
 
     /** current forced use configuration. */
     audio_policy_forced_cfg_t mForceUse[AUDIO_POLICY_FORCE_USE_CNT] = {};
diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
index b7fd031..fafed02 100644
--- a/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
+++ b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h
@@ -73,6 +73,14 @@
     virtual audio_mode_t getPhoneState() const = 0;
 
     /**
+     * Set whether display-port is connected and is allowed to be used
+     * for voice usecases
+     *
+     * @param[in] connAndAllowed: if display-port is connected and can be used
+     */
+    virtual void setDpConnAndAllowedForVoice(bool connAndAllowed) = 0;
+
+    /**
      * Set Force Use config for a given usage.
      *
      * @param[in] usage for which a configuration shall be forced.
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 04170ac..e3e2dc9 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -254,6 +254,10 @@
             if (device) break;
             device = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_DEVICE;
             if (device) break;
+            if (getDpConnAndAllowedForVoice() && isInCall()) {
+                device = availableOutputDevicesType & AUDIO_DEVICE_OUT_AUX_DIGITAL;
+                if (device) break;
+            }
             if (!isInCall()) {
                 device = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_ACCESSORY;
                 if (device) break;
@@ -352,6 +356,15 @@
                 }
             }
         }
+        // if display-port is connected and being used in voice usecase,
+        // play ringtone over speaker and display-port
+        if ((strategy == STRATEGY_SONIFICATION) && getDpConnAndAllowedForVoice()) {
+            uint32_t device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_AUX_DIGITAL;
+            if (device2 != AUDIO_DEVICE_NONE) {
+                device |= device2;
+                break;
+            }
+        }
         // The second device used for sonification is the same as the device used by media strategy
         FALLTHROUGH_INTENDED;
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 1d4dacd..6425590 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -4234,7 +4234,7 @@
 
 // Treblized audio policy xml config will be located in /odm/etc or /vendor/etc.
 static const char *kConfigLocationList[] =
-        {"/odm/etc", "/vendor/etc", "/system/etc"};
+        {"/odm/etc", "/vendor/etc/audio", "/vendor/etc", "/system/etc"};
 static const int kConfigLocationListSize =
         (sizeof(kConfigLocationList) / sizeof(kConfigLocationList[0]));
 
@@ -5535,7 +5535,10 @@
             patchBuilder.addSink(filteredDevice);
         }
 
-        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(), delayMs);
+        // Add half reported latency to delayMs when muteWaitMs is null in order
+        // to avoid disordered sequence of muting volume and changing devices.
+        installPatch(__func__, patchHandle, outputDesc.get(), patchBuilder.patch(),
+                muteWaitMs == 0 ? (delayMs + (outputDesc->latency() / 2)) : delayMs);
     }
 
     // update stream volumes according to new device
@@ -5690,8 +5693,9 @@
     const auto ringVolumeSrc = toVolumeSource(AUDIO_STREAM_RING);
     const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC);
     const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM);
+    const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY);
 
-    if (volumeSource == toVolumeSource(AUDIO_STREAM_ACCESSIBILITY)
+    if (volumeSource == a11yVolumeSrc
             && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
@@ -5708,7 +5712,7 @@
              volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION) ||
              volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE) ||
              volumeSource == toVolumeSource(AUDIO_STREAM_DTMF) ||
-             volumeSource == toVolumeSource(AUDIO_STREAM_ACCESSIBILITY))) {
+             volumeSource == a11yVolumeSrc)) {
         auto &voiceCurves = getVolumeCurves(callVolumeSrc);
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(device);
         const float maxVoiceVolDb =
@@ -5720,7 +5724,9 @@
         // VOICE_CALL stream has minVolumeIndex > 0 : Users cannot set the volume of voice calls to
         // 0. We don't want to cap volume when the system has programmatically muted the voice call
         // stream. See setVolumeCurveIndex() for more information.
-        bool exemptFromCapping = (volumeSource == ringVolumeSrc) && (voiceVolumeIndex == 0);
+        bool exemptFromCapping =
+                ((volumeSource == ringVolumeSrc) || (volumeSource == a11yVolumeSrc))
+                && (voiceVolumeIndex == 0);
         ALOGV_IF(exemptFromCapping, "%s volume source %d at vol=%f not capped", __func__,
                  volumeSource, volumeDb);
         if ((volumeDb > maxVoiceVolDb) && !exemptFromCapping) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 612bd8f..39a4b8f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -346,7 +346,7 @@
         }
         virtual const DeviceVector getAvailableOutputDevices() const
         {
-            return mAvailableOutputDevices;
+            return mAvailableOutputDevices.filterForEngine();
         }
         virtual const DeviceVector getAvailableInputDevices() const
         {
@@ -482,7 +482,7 @@
         // when a device is disconnected, checks if an output is not used any more and
         // returns its handle if any.
         // transfers the audio tracks and effects from one output thread to another accordingly.
-        status_t checkOutputsForDevice(const sp<DeviceDescriptor>& device,
+        virtual status_t checkOutputsForDevice(const sp<DeviceDescriptor>& device,
                                        audio_policy_dev_state_t state,
                                        SortedVector<audio_io_handle_t>& outputs);
 
@@ -658,10 +658,10 @@
                 const DeviceVector& devices, const char *moduleId) const;
         bool isDeviceOfModule(const sp<DeviceDescriptor>& devDesc, const char *moduleId) const;
 
-        status_t startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
+        virtual status_t startSource(const sp<SwAudioOutputDescriptor>& outputDesc,
                              const sp<TrackClientDescriptor>& client,
                              uint32_t *delayMs);
-        status_t stopSource(const sp<SwAudioOutputDescriptor>& outputDesc,
+        virtual status_t stopSource(const sp<SwAudioOutputDescriptor>& outputDesc,
                             const sp<TrackClientDescriptor>& client);
 
         void clearAudioPatches(uid_t uid);
@@ -759,7 +759,7 @@
         std::unordered_set<audio_format_t> mManualSurroundFormats;
 
         std::unordered_map<uid_t, audio_flags_mask_t> mAllowedCapturePolicies;
-private:
+protected:
         // Add or remove AC3 DTS encodings based on user preferences.
         void modifySurroundFormats(const sp<DeviceDescriptor>& devDesc, FormatVector *formatsPtr);
         void modifySurroundChannelMasks(ChannelsVector *channelMasksPtr);
@@ -809,7 +809,7 @@
                 bool *isRequestedDeviceForExclusiveUse,
                 std::vector<sp<SwAudioOutputDescriptor>> *secondaryDescs);
         // internal method to return the output handle for the given device and format
-        audio_io_handle_t getOutputForDevices(
+        virtual audio_io_handle_t getOutputForDevices(
                 const DeviceVector &devices,
                 audio_session_t session,
                 audio_stream_type_t stream,
@@ -844,7 +844,7 @@
         bool     isValidAttributes(const audio_attributes_t *paa);
 
         // Called by setDeviceConnectionState().
-        status_t setDeviceConnectionStateInt(audio_devices_t deviceType,
+        virtual status_t setDeviceConnectionStateInt(audio_devices_t deviceType,
                                              audio_policy_dev_state_t state,
                                              const char *device_address,
                                              const char *device_name,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index fa8da89..b6d1af2 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -378,7 +378,7 @@
     }
 
     // check calling permissions
-    if (!recordingAllowed(opPackageName, pid, uid)) {
+    if (!isAudioServerOrMediaServerUid(callingUid) && !recordingAllowed(opPackageName, pid, uid)) {
         ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
                 __func__, uid, pid);
         return PERMISSION_DENIED;
@@ -425,7 +425,7 @@
             case AudioPolicyInterface::API_INPUT_TELEPHONY_RX:
                 // FIXME: use the same permission as for remote submix for now.
             case AudioPolicyInterface::API_INPUT_MIX_CAPTURE:
-                if (!canCaptureOutput) {
+                if (!isAudioServerOrMediaServerUid(callingUid) && !canCaptureOutput) {
                     ALOGE("getInputForAttr() permission denied: capture not allowed");
                     status = PERMISSION_DENIED;
                 }
@@ -494,7 +494,7 @@
     }
 
     // check calling permissions
-    if (!startRecording(client->opPackageName, client->pid, client->uid)) {
+    if (!isAudioServerOrMediaServerUid(IPCThreadState::self()->getCallingUid()) && !startRecording(client->opPackageName, client->pid, client->uid)) {
         ALOGE("%s permission denied: recording not allowed for uid %d pid %d",
                 __func__, client->uid, client->pid);
         return PERMISSION_DENIED;
@@ -976,11 +976,6 @@
         ALOGV("%s() mAudioPolicyManager == NULL", __func__);
         return NO_INIT;
     }
-    uint_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (uid != callingUid) {
-        ALOGD("%s() uid invalid %d != %d", __func__, uid, callingUid);
-        return PERMISSION_DENIED;
-    }
     return mAudioPolicyManager->setAllowedCapturePolicy(uid, capturePolicy);
 }
 
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 77f7997..a6cda20 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -410,12 +410,17 @@
 //    Another client in the same UID has already been allowed to capture
 //    OR The client is the assistant
 //        AND an accessibility service is on TOP or a RTT call is active
-//               AND the source is VOICE_RECOGNITION or HOTWORD
-//        OR uses VOICE_RECOGNITION AND is on TOP
-//               OR uses HOTWORD
+//                AND the source is VOICE_RECOGNITION or HOTWORD
+//            OR uses VOICE_RECOGNITION AND is on TOP
+//                OR uses HOTWORD
 //            AND there is no active privacy sensitive capture or call
 //                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 //    OR The client is an accessibility service
+//        AND Is on TOP
+//                AND the source is VOICE_RECOGNITION or HOTWORD
+//            OR The assistant is not on TOP
+//                AND there is no active privacy sensitive capture or call
+//                    OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 //        AND is on TOP
 //        AND the source is VOICE_RECOGNITION or HOTWORD
 //    OR the client source is virtual (remote submix, call audio TX or RX...)
@@ -423,7 +428,7 @@
 //        AND The assistant is not on TOP
 //        AND is on TOP or latest started
 //        AND there is no active privacy sensitive capture or call
-//                OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+//            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 
     sp<AudioRecordClient> topActive;
     sp<AudioRecordClient> latestActive;
@@ -459,16 +464,24 @@
             continue;
         }
 
-        if (appState == APP_STATE_TOP) {
+        bool isAssistant = mUidPolicy->isAssistantUid(current->uid);
+        bool isAccessibility = mUidPolicy->isA11yUid(current->uid);
+        if (appState == APP_STATE_TOP && !isAccessibility) {
             if (current->startTimeNs > topStartNs) {
                 topActive = current;
                 topStartNs = current->startTimeNs;
             }
-            if (mUidPolicy->isAssistantUid(current->uid)) {
+            if (isAssistant) {
                 isAssistantOnTop = true;
             }
         }
-        if (current->startTimeNs > latestStartNs) {
+        // Assistant capturing for HOTWORD or Accessibility services not considered
+        // for latest active to avoid masking regular clients started before
+        if (current->startTimeNs > latestStartNs
+                && !((current->attributes.source == AUDIO_SOURCE_HOTWORD
+                        || isA11yOnTop || rttCallActive)
+                    && isAssistant)
+                && !isAccessibility) {
             latestActive = current;
             latestStartNs = current->startTimeNs;
         }
@@ -541,10 +554,20 @@
         } else if (mUidPolicy->isA11yUid(current->uid)) {
             // For accessibility service allow capture if:
             //     Is on TOP
-            //     AND the source is VOICE_RECOGNITION or HOTWORD
-            if (isA11yOnTop &&
-                    (source == AUDIO_SOURCE_VOICE_RECOGNITION || source == AUDIO_SOURCE_HOTWORD)) {
-                allowCapture = true;
+            //          AND the source is VOICE_RECOGNITION or HOTWORD
+            //     Or
+            //          The assistant is not on TOP
+            //          AND there is no active privacy sensitive capture or call
+            //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
+            if (isA11yOnTop) {
+                if (source == AUDIO_SOURCE_VOICE_RECOGNITION || source == AUDIO_SOURCE_HOTWORD) {
+                    allowCapture = true;
+                }
+            } else {
+                if (!isAssistantOnTop
+                        && (!(isSensitiveActive || isInCall) || current->canCaptureOutput)) {
+                    allowCapture = true;
+                }
             }
         }
         setAppState_l(current->uid,
@@ -703,11 +726,11 @@
     if (in == BAD_TYPE || out == BAD_TYPE || err == BAD_TYPE) {
         return BAD_VALUE;
     }
-    if (args.size() == 3 && args[0] == String16("set-uid-state")) {
+    if (args.size() >= 3 && args[0] == String16("set-uid-state")) {
         return handleSetUidState(args, err);
-    } else if (args.size() == 2 && args[0] == String16("reset-uid-state")) {
+    } else if (args.size() >= 2 && args[0] == String16("reset-uid-state")) {
         return handleResetUidState(args, err);
-    } else if (args.size() == 2 && args[0] == String16("get-uid-state")) {
+    } else if (args.size() >= 2 && args[0] == String16("get-uid-state")) {
         return handleGetUidState(args, out, err);
     } else if (args.size() == 1 && args[0] == String16("help")) {
         printHelp(out);
@@ -717,14 +740,32 @@
     return BAD_VALUE;
 }
 
-status_t AudioPolicyService::handleSetUidState(Vector<String16>& args, int err) {
-    PermissionController pc;
-    int uid = pc.getPackageUid(args[1], 0);
-    if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", String8(args[1]).string());
-        dprintf(err, "Unknown package: '%s'\n", String8(args[1]).string());
+static status_t getUidForPackage(String16 packageName, int userId, /*inout*/uid_t& uid, int err) {
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        dprintf(err, "Invalid user: %d\n", userId);
         return BAD_VALUE;
     }
+
+    PermissionController pc;
+    uid = pc.getPackageUid(packageName, 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", String8(packageName).string());
+        dprintf(err, "Unknown package: '%s'\n", String8(packageName).string());
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+status_t AudioPolicyService::handleSetUidState(Vector<String16>& args, int err) {
+    // Valid arg.size() is 3 or 5, args.size() is 5 with --user option.
+    if (!(args.size() == 3 || args.size() == 5)) {
+        printHelp(err);
+        return BAD_VALUE;
+    }
+
     bool active = false;
     if (args[2] == String16("active")) {
         active = true;
@@ -732,30 +773,59 @@
         ALOGE("Expected active or idle but got: '%s'", String8(args[2]).string());
         return BAD_VALUE;
     }
+
+    int userId = 0;
+    if (args.size() >= 5 && args[3] == String16("--user")) {
+        userId = atoi(String8(args[4]));
+    }
+
+    uid_t uid;
+    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
+        return BAD_VALUE;
+    }
+
     mUidPolicy->addOverrideUid(uid, active);
     return NO_ERROR;
 }
 
 status_t AudioPolicyService::handleResetUidState(Vector<String16>& args, int err) {
-    PermissionController pc;
-    int uid = pc.getPackageUid(args[1], 0);
-    if (uid < 0) {
-        ALOGE("Unknown package: '%s'", String8(args[1]).string());
-        dprintf(err, "Unknown package: '%s'\n", String8(args[1]).string());
+    // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
+    if (!(args.size() == 2 || args.size() == 4)) {
+        printHelp(err);
         return BAD_VALUE;
     }
+
+    int userId = 0;
+    if (args.size() >= 4 && args[2] == String16("--user")) {
+        userId = atoi(String8(args[3]));
+    }
+
+    uid_t uid;
+    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
+        return BAD_VALUE;
+    }
+
     mUidPolicy->removeOverrideUid(uid);
     return NO_ERROR;
 }
 
 status_t AudioPolicyService::handleGetUidState(Vector<String16>& args, int out, int err) {
-    PermissionController pc;
-    int uid = pc.getPackageUid(args[1], 0);
-    if (uid < 0) {
-        ALOGE("Unknown package: '%s'", String8(args[1]).string());
-        dprintf(err, "Unknown package: '%s'\n", String8(args[1]).string());
+    // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
+    if (!(args.size() == 2 || args.size() == 4)) {
+        printHelp(err);
         return BAD_VALUE;
     }
+
+    int userId = 0;
+    if (args.size() >= 4 && args[2] == String16("--user")) {
+        userId = atoi(String8(args[3]));
+    }
+
+    uid_t uid;
+    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
+        return BAD_VALUE;
+    }
+
     if (mUidPolicy->isUidActive(uid)) {
         return dprintf(out, "active\n");
     } else {
@@ -765,9 +835,9 @@
 
 status_t AudioPolicyService::printHelp(int out) {
     return dprintf(out, "Audio policy service commands:\n"
-        "  get-uid-state <PACKAGE> gets the uid state\n"
-        "  set-uid-state <PACKAGE> <active|idle> overrides the uid state\n"
-        "  reset-uid-state <PACKAGE> clears the uid state override\n"
+        "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
+        "  set-uid-state <PACKAGE> <active|idle> [--user USER_ID] overrides the uid state\n"
+        "  reset-uid-state <PACKAGE> [--user USER_ID] clears the uid state override\n"
         "  help print this message\n");
 }
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3e62102..670e026 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -118,6 +118,8 @@
 // ----------------------------------------------------------------------------
 
 static const String16 sManageCameraPermission("android.permission.MANAGE_CAMERA");
+static const String16 sCameraOpenCloseListenerPermission(
+        "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
 
 // Matches with PERCEPTIBLE_APP_ADJ in ProcessList.java
 static constexpr int32_t kVendorClientScore = 200;
@@ -153,8 +155,6 @@
         mInitialized = true;
     }
 
-    CameraService::pingCameraServiceProxy();
-
     mUidPolicy = new UidPolicy(this);
     mUidPolicy->registerSelf();
     mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
@@ -164,6 +164,11 @@
         ALOGE("%s: Failed to register default android.frameworks.cameraservice.service@1.0",
               __FUNCTION__);
     }
+
+    // This needs to be last call in this function, so that it's as close to
+    // ServiceManager::addService() as possible.
+    CameraService::pingCameraServiceProxy();
+    ALOGI("CameraService pinged cameraservice proxy");
 }
 
 status_t CameraService::enumerateProviders() {
@@ -253,6 +258,15 @@
     enumerateProviders();
 }
 
+bool CameraService::isPublicallyHiddenSecureCamera(const String8& cameraId) {
+    auto state = getCameraState(cameraId);
+    if (state != nullptr) {
+        return state->isPublicallyHiddenSecureCamera();
+    }
+    // Hidden physical camera ids won't have CameraState
+    return mCameraProviderManager->isPublicallyHiddenSecureCamera(cameraId.c_str());
+}
+
 void CameraService::updateCameraNumAndIds() {
     Mutex::Autolock l(mServiceLock);
     mNumberOfCameras = mCameraProviderManager->getCameraCount();
@@ -268,6 +282,8 @@
         ALOGE("Failed to query device resource cost: %s (%d)", strerror(-res), res);
         return;
     }
+    bool isPublicallyHiddenSecureCamera =
+            mCameraProviderManager->isPublicallyHiddenSecureCamera(id.string());
     std::set<String8> conflicting;
     for (size_t i = 0; i < cost.conflictingDevices.size(); i++) {
         conflicting.emplace(String8(cost.conflictingDevices[i].c_str()));
@@ -276,7 +292,8 @@
     {
         Mutex::Autolock lock(mCameraStatesLock);
         mCameraStates.emplace(id, std::make_shared<CameraState>(id, cost.resourceCost,
-                                                                conflicting));
+                                                                conflicting,
+                                                                isPublicallyHiddenSecureCamera));
     }
 
     if (mFlashlight->hasFlashUnit(id)) {
@@ -514,8 +531,16 @@
                 "Camera subsystem is not available");;
     }
 
-    Status ret{};
+    if (shouldRejectHiddenCameraConnection(String8(cameraId))) {
+        ALOGW("Attempting to retrieve characteristics for system-only camera id %s, rejected",
+              String8(cameraId).string());
+        return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
+                                "No camera device with ID \"%s\" currently available",
+                                String8(cameraId).string());
 
+    }
+
+    Status ret{};
     status_t res = mCameraProviderManager->getCameraCharacteristics(
             String8(cameraId).string(), cameraInfo);
     if (res != OK) {
@@ -1149,6 +1174,8 @@
                 clientPid,
                 states[states.size() - 1]);
 
+        resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
+
         // Find clients that would be evicted
         auto evicted = mActiveClientManager.wouldEvict(clientDescriptor);
 
@@ -1166,8 +1193,7 @@
             String8 msg = String8::format("%s : DENIED connect device %s client for package %s "
                     "(PID %d, score %d state %d) due to eviction policy", curTime.string(),
                     cameraId.string(), packageName.string(), clientPid,
-                    priorityScores[priorityScores.size() - 1],
-                    states[states.size() - 1]);
+                    clientPriority.getScore(), clientPriority.getState());
 
             for (auto& i : incompatibleClients) {
                 msg.appendFormat("\n   - Blocked by existing device %s client for package %s"
@@ -1212,9 +1238,8 @@
                     i->getKey().string(), String8{clientSp->getPackageName()}.string(),
                     i->getOwnerId(), i->getPriority().getScore(),
                     i->getPriority().getState(), cameraId.string(),
-                    packageName.string(), clientPid,
-                    priorityScores[priorityScores.size() - 1],
-                    states[states.size() - 1]));
+                    packageName.string(), clientPid, clientPriority.getScore(),
+                    clientPriority.getState()));
 
             // Notify the client of disconnection
             clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
@@ -1330,7 +1355,7 @@
     // publically hidden, we should reject the connection.
     if (!hardware::IPCThreadState::self()->isServingCall() &&
             CameraThreadState::getCallingPid() != getpid() &&
-            mCameraProviderManager->isPublicallyHiddenSecureCamera(cameraId.c_str())) {
+            isPublicallyHiddenSecureCamera(cameraId)) {
         return true;
     }
     return false;
@@ -1348,14 +1373,19 @@
     Status ret = Status::ok();
     String8 id = String8(cameraId);
     sp<CameraDeviceClient> client = nullptr;
-
+    String16 clientPackageNameAdj = clientPackageName;
+    if (hardware::IPCThreadState::self()->isServingCall()) {
+        std::string vendorClient =
+                StringPrintf("vendor.client.pid<%d>", CameraThreadState::getCallingPid());
+        clientPackageNameAdj = String16(vendorClient.c_str());
+    }
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
             /*api1CameraId*/-1,
-            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName,
+            CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageNameAdj,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, /*out*/client);
 
     if(!ret.isOk()) {
-        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
+        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageNameAdj),
                 ret.toString8());
         return ret;
     }
@@ -1783,7 +1813,11 @@
         }
 
         auto clientUid = CameraThreadState::getCallingUid();
-        sp<ServiceListener> serviceListener = new ServiceListener(this, listener, clientUid);
+        auto clientPid = CameraThreadState::getCallingPid();
+        bool openCloseCallbackAllowed = checkPermission(sCameraOpenCloseListenerPermission,
+                clientPid, clientUid);
+        sp<ServiceListener> serviceListener = new ServiceListener(this, listener,
+                clientUid, clientPid, openCloseCallbackAllowed);
         auto ret = serviceListener->initialize();
         if (ret != NO_ERROR) {
             String8 msg = String8::format("Failed to initialize service listener: %s (%d)",
@@ -1799,16 +1833,25 @@
     {
         Mutex::Autolock lock(mCameraStatesLock);
         for (auto& i : mCameraStates) {
-            if (!isVendorListener &&
-                mCameraProviderManager->isPublicallyHiddenSecureCamera(i.first.c_str())) {
-                ALOGV("Cannot add public listener for hidden system-only %s for pid %d",
-                      i.first.c_str(), CameraThreadState::getCallingPid());
-                continue;
-            }
             cameraStatuses->emplace_back(i.first, mapToInterface(i.second->getStatus()));
         }
     }
 
+    // Remove the camera statuses that should be hidden from the client, we do
+    // this after collecting the states in order to avoid holding
+    // mCameraStatesLock and mInterfaceLock (held in
+    // isPublicallyHiddenSecureCamera()) at the same time.
+    cameraStatuses->erase(std::remove_if(cameraStatuses->begin(), cameraStatuses->end(),
+                [this, &isVendorListener](const hardware::CameraStatus& s) {
+                    bool ret = !isVendorListener && isPublicallyHiddenSecureCamera(s.cameraId);
+                    if (ret) {
+                        ALOGV("Cannot add public listener for hidden system-only %s for pid %d",
+                                s.cameraId.c_str(), CameraThreadState::getCallingPid());
+                    }
+                    return ret;
+                }),
+                cameraStatuses->end());
+
     /*
      * Immediately signal current torch status to this listener only
      * This may be a subset of all the devices, so don't include it in the response directly
@@ -2368,11 +2411,7 @@
         }
         mClientPackageName = packages[0];
     }
-    if (hardware::IPCThreadState::self()->isServingCall()) {
-        std::string vendorClient =
-                StringPrintf("vendor.client.pid<%d>", CameraThreadState::getCallingPid());
-        mClientPackageName = String16(vendorClient.c_str());
-    } else {
+    if (!hardware::IPCThreadState::self()->isServingCall()) {
         mAppOpsManager = std::make_unique<AppOpsManager>();
     }
 }
@@ -2482,6 +2521,9 @@
 
     sCameraService->mUidPolicy->registerMonitorUid(mClientUid);
 
+    // Notify listeners of camera open/close status
+    sCameraService->updateOpenCloseStatus(mCameraIdStr, true/*open*/, mClientPackageName);
+
     return OK;
 }
 
@@ -2522,6 +2564,9 @@
 
     sCameraService->mUidPolicy->unregisterMonitorUid(mClientUid);
 
+    // Notify listeners of camera open/close status
+    sCameraService->updateOpenCloseStatus(mCameraIdStr, false/*open*/, mClientPackageName);
+
     return OK;
 }
 
@@ -2872,8 +2917,9 @@
 // ----------------------------------------------------------------------------
 
 CameraService::CameraState::CameraState(const String8& id, int cost,
-        const std::set<String8>& conflicting) : mId(id),
-        mStatus(StatusInternal::NOT_PRESENT), mCost(cost), mConflicting(conflicting) {}
+        const std::set<String8>& conflicting, bool isHidden) : mId(id),
+        mStatus(StatusInternal::NOT_PRESENT), mCost(cost), mConflicting(conflicting),
+        mIsPublicallyHiddenSecureCamera(isHidden) {}
 
 CameraService::CameraState::~CameraState() {}
 
@@ -2902,6 +2948,10 @@
     return mId;
 }
 
+bool CameraService::CameraState::isPublicallyHiddenSecureCamera() const {
+    return mIsPublicallyHiddenSecureCamera;
+}
+
 // ----------------------------------------------------------------------------
 //                  ClientEventListener
 // ----------------------------------------------------------------------------
@@ -3237,10 +3287,10 @@
                 cameraId.string());
         return;
     }
-
+    bool isHidden = isPublicallyHiddenSecureCamera(cameraId);
     // Update the status for this camera state, then send the onStatusChangedCallbacks to each
     // of the listeners with both the mStatusStatus and mStatusListenerLock held
-    state->updateStatus(status, cameraId, rejectSourceStates, [this]
+    state->updateStatus(status, cameraId, rejectSourceStates, [this,&isHidden]
             (const String8& cameraId, StatusInternal status) {
 
             if (status != StatusInternal::ENUMERATING) {
@@ -3262,8 +3312,7 @@
             Mutex::Autolock lock(mStatusListenerLock);
 
             for (auto& listener : mListenerList) {
-                if (!listener.first &&
-                    mCameraProviderManager->isPublicallyHiddenSecureCamera(cameraId.c_str())) {
+                if (!listener.first &&  isHidden) {
                     ALOGV("Skipping camera discovery callback for system-only camera %s",
                           cameraId.c_str());
                     continue;
@@ -3274,6 +3323,29 @@
         });
 }
 
+void CameraService::updateOpenCloseStatus(const String8& cameraId, bool open,
+        const String16& clientPackageName) {
+    Mutex::Autolock lock(mStatusListenerLock);
+
+    for (const auto& it : mListenerList) {
+        if (!it.second->isOpenCloseCallbackAllowed()) {
+            continue;
+        }
+
+        binder::Status ret;
+        String16 cameraId64(cameraId);
+        if (open) {
+            ret = it.second->getListener()->onCameraOpened(cameraId64, clientPackageName);
+        } else {
+            ret = it.second->getListener()->onCameraClosed(cameraId64);
+        }
+        if (!ret.isOk()) {
+            ALOGE("%s: Failed to trigger onCameraOpened/onCameraClosed callback: %d", __FUNCTION__,
+                    ret.exceptionCode());
+        }
+    }
+}
+
 template<class Func>
 void CameraService::CameraState::updateStatus(StatusInternal status,
         const String8& cameraId,
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 065157d..bcaca9f 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -470,7 +470,8 @@
          * Make a new CameraState and set the ID, cost, and conflicting devices using the values
          * returned in the HAL's camera_info struct for each device.
          */
-        CameraState(const String8& id, int cost, const std::set<String8>& conflicting);
+        CameraState(const String8& id, int cost, const std::set<String8>& conflicting,
+                bool isHidden);
         virtual ~CameraState();
 
         /**
@@ -522,6 +523,11 @@
          */
         String8 getId() const;
 
+        /**
+         * Return if the camera device is a publically hidden secure camera
+         */
+        bool isPublicallyHiddenSecureCamera() const;
+
     private:
         const String8 mId;
         StatusInternal mStatus; // protected by mStatusLock
@@ -529,6 +535,7 @@
         std::set<String8> mConflicting;
         mutable Mutex mStatusLock;
         CameraParameters mShimParams;
+        const bool mIsPublicallyHiddenSecureCamera;
     }; // class CameraState
 
     // Observer for UID lifecycle enforcing that UIDs in idle
@@ -633,7 +640,9 @@
 
     // Should an operation attempt on a cameraId be rejected, if the camera id is
     // advertised as a publically hidden secure camera, by the camera HAL ?
-    bool shouldRejectHiddenCameraConnection(const String8 & cameraId);
+    bool shouldRejectHiddenCameraConnection(const String8& cameraId);
+
+    bool isPublicallyHiddenSecureCamera(const String8& cameraId);
 
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
@@ -808,7 +817,9 @@
     class ServiceListener : public virtual IBinder::DeathRecipient {
         public:
             ServiceListener(sp<CameraService> parent, sp<hardware::ICameraServiceListener> listener,
-                    int uid) : mParent(parent), mListener(listener), mListenerUid(uid) {}
+                    int uid, int pid, bool openCloseCallbackAllowed) : mParent(parent),
+                    mListener(listener), mListenerUid(uid), mListenerPid(pid),
+                    mOpenCloseCallbackAllowed(openCloseCallbackAllowed) {}
 
             status_t initialize() {
                 return IInterface::asBinder(mListener)->linkToDeath(this);
@@ -822,12 +833,16 @@
             }
 
             int getListenerUid() { return mListenerUid; }
+            int getListenerPid() { return mListenerPid; }
             sp<hardware::ICameraServiceListener> getListener() { return mListener; }
+            bool isOpenCloseCallbackAllowed() { return mOpenCloseCallbackAllowed; }
 
         private:
             wp<CameraService> mParent;
             sp<hardware::ICameraServiceListener> mListener;
             int mListenerUid;
+            int mListenerPid;
+            bool mOpenCloseCallbackAllowed = false;
     };
 
     // Guarded by mStatusListenerMutex
@@ -850,6 +865,13 @@
     void updateStatus(StatusInternal status,
             const String8& cameraId);
 
+    /**
+     * Update the opened/closed status of the given camera id.
+     *
+     * This method acqiures mStatusListenerLock.
+     */
+    void updateOpenCloseStatus(const String8& cameraId, bool open, const String16& packageName);
+
     // flashlight control
     sp<CameraFlashlight> mFlashlight;
     // guard mTorchStatusMap
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c7a4f2b..3587db3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -2004,6 +2004,15 @@
         }
     }
 
+    for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+        auto ret = mCompositeStreamMap.valueAt(i)->deleteInternalStreams();
+        if (ret != OK) {
+            ALOGE("%s: Failed removing composite stream  %s (%d)", __FUNCTION__,
+                    strerror(-ret), ret);
+        }
+    }
+    mCompositeStreamMap.clear();
+
     Camera2ClientBase::detachDevice();
 }
 
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 8ebaa2b..0b91016 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -247,7 +247,7 @@
     return ret;
 }
 
-status_t DepthCompositeStream::processInputFrame(const InputFrame &inputFrame) {
+status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
     status_t res;
     sp<ANativeWindow> outputANW = mOutputSurface;
     ANativeWindowBuffer *anb;
@@ -370,6 +370,13 @@
         return NO_MEMORY;
     }
 
+    res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
+                getStreamId(), strerror(-res), res);
+        return res;
+    }
+
     ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegSize);
     uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
         (gb->getWidth() - sizeof(struct camera3_jpeg_blob));
@@ -459,7 +466,7 @@
         }
     }
 
-    auto res = processInputFrame(mPendingInputFrames[currentTs]);
+    auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
     Mutex::Autolock l(mMutex);
     if (res != OK) {
         ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 975c59b..28a7826 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -97,7 +97,7 @@
             size_t maxJpegSize, uint8_t jpegQuality,
             std::vector<std::unique_ptr<Item>>* items /*out*/);
     std::unique_ptr<ImagingModel> getImagingModel();
-    status_t processInputFrame(const InputFrame &inputFrame);
+    status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
 
     // Buffer/Results handling
     void compilePendingInputLocked();
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 5a87134..9f15be0 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -31,7 +31,6 @@
 #include <media/ICrypto.h>
 #include <media/MediaCodecBuffer.h>
 #include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/MediaDefs.h>
 #include <media/stagefright/MediaCodecConstants.h>
 
@@ -61,12 +60,13 @@
         mUseGrid(false),
         mAppSegmentStreamId(-1),
         mAppSegmentSurfaceId(-1),
-        mAppSegmentBufferAcquired(false),
         mMainImageStreamId(-1),
         mMainImageSurfaceId(-1),
         mYuvBufferAcquired(false),
         mProducerListener(new ProducerListener()),
-        mOutputBufferCounter(0),
+        mDequeuedOutputBufferCnt(0),
+        mLockedAppSegmentBufferCnt(0),
+        mCodecOutputCounter(0),
         mGridTimestampUs(0) {
 }
 
@@ -132,7 +132,7 @@
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
-    mAppSegmentConsumer = new CpuConsumer(consumer, 1);
+    mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
     mAppSegmentConsumer->setFrameAvailableListener(this);
     mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
     mAppSegmentSurface = new Surface(producer);
@@ -231,6 +231,8 @@
     if (bufferInfo.mError) return;
 
     mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
+    ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
+            __FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
 }
 
 // We need to get the settings early to handle the case where the codec output
@@ -361,6 +363,8 @@
             mCodecOutputBuffers.push_back(outputBufferInfo);
             mInputReadyCondition.signal();
         } else {
+            ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
+                outputBufferInfo.size, outputBufferInfo.flags);
             mCodec->releaseOutputBuffer(outputBufferInfo.index);
         }
     } else {
@@ -414,8 +418,10 @@
         mNumOutputTiles = 1;
     }
 
-    ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
     mFormat = newFormat;
+
+    ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
+    mInputReadyCondition.signal();
 }
 
 void HeicCompositeStream::onHeicCodecError() {
@@ -459,9 +465,8 @@
 
     // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
     // buffer count.
-    int maxProducerBuffers = 1;
     if ((res = native_window_set_buffer_count(
-                    anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+                    anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
         ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
         return res;
     }
@@ -505,6 +510,8 @@
     }
 
     if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
+        ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
+                timestamp, resultExtras.frameNumber);
         mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
         mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
         mSettingsByFrameNumber.erase(resultExtras.frameNumber);
@@ -520,12 +527,12 @@
         mSettingsByTimestamp.erase(it);
     }
 
-    while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) {
+    while (!mInputAppSegmentBuffers.empty()) {
         CpuConsumer::LockedBuffer imgBuffer;
         auto it = mInputAppSegmentBuffers.begin();
         auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
         if (res == NOT_ENOUGH_DATA) {
-            // Canot not lock any more buffers.
+            // Can not lock any more buffers.
             break;
         } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
             if (res != OK) {
@@ -535,6 +542,7 @@
                 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
                         " received buffer with time stamp: %" PRId64, __FUNCTION__,
                         *it, imgBuffer.timestamp);
+                mAppSegmentConsumer->unlockBuffer(imgBuffer);
             }
             mPendingInputFrames[*it].error = true;
             mInputAppSegmentBuffers.erase(it);
@@ -546,7 +554,7 @@
             mAppSegmentConsumer->unlockBuffer(imgBuffer);
         } else {
             mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
-            mAppSegmentBufferAcquired = true;
+            mLockedAppSegmentBufferCnt++;
         }
         mInputAppSegmentBuffers.erase(it);
     }
@@ -556,7 +564,7 @@
         auto it = mInputYuvBuffers.begin();
         auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
         if (res == NOT_ENOUGH_DATA) {
-            // Canot not lock any more buffers.
+            // Can not lock any more buffers.
             break;
         } else if (res != OK) {
             ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
@@ -589,17 +597,20 @@
         // to look up timestamp.
         int64_t bufferTime = -1;
         if (mCodecOutputBufferTimestamps.empty()) {
-            ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
+            ALOGV("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
+            break;
         } else {
             // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
             bufferTime = mCodecOutputBufferTimestamps.front();
-            mOutputBufferCounter++;
-            if (mOutputBufferCounter == mNumOutputTiles) {
+            mCodecOutputCounter++;
+            if (mCodecOutputCounter == mNumOutputTiles) {
                 mCodecOutputBufferTimestamps.pop();
-                mOutputBufferCounter = 0;
+                mCodecOutputCounter = 0;
             }
 
             mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
+            ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
+                    __FUNCTION__, bufferTime, it->timeUs);
         }
         mCodecOutputBuffers.erase(it);
     }
@@ -607,6 +618,7 @@
     while (!mFrameNumberMap.empty()) {
         auto it = mFrameNumberMap.begin();
         mPendingInputFrames[it->second].frameNumber = it->first;
+        ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
         mFrameNumberMap.erase(it);
     }
 
@@ -675,16 +687,29 @@
     }
 
     bool newInputAvailable = false;
-    for (const auto& it : mPendingInputFrames) {
+    for (auto& it : mPendingInputFrames) {
+        // New input is considered to be available only if:
+        // 1. input buffers are ready, or
+        // 2. App segment and muxer is created, or
+        // 3. A codec output tile is ready, and an output buffer is available.
+        // This makes sure that muxer gets created only when an output tile is
+        // generated, because right now we only handle 1 HEIC output buffer at a
+        // time (max dequeued buffer count is 1).
         bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
-                !it.second.appSegmentWritten && it.second.result != nullptr;
+                !it.second.appSegmentWritten && it.second.result != nullptr &&
+                it.second.muxer != nullptr;
         bool codecOutputReady = !it.second.codecOutputBuffers.empty();
         bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
                 (!it.second.codecInputBuffers.empty());
+        bool hasOutputBuffer = it.second.muxer != nullptr ||
+                (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
         if ((!it.second.error) &&
                 (it.first < *currentTs) &&
-                (appSegmentReady || codecOutputReady || codecInputReady)) {
+                (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
             *currentTs = it.first;
+            if (it.second.format == nullptr && mFormat != nullptr) {
+                it.second.format = mFormat->dup();
+            }
             newInputAvailable = true;
             break;
         }
@@ -716,15 +741,17 @@
     status_t res = OK;
 
     bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
-            !inputFrame.appSegmentWritten && inputFrame.result != nullptr;
+            !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
+            inputFrame.muxer != nullptr;
     bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
     bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
-           !inputFrame.codecInputBuffers.empty();
+            !inputFrame.codecInputBuffers.empty();
+    bool hasOutputBuffer = inputFrame.muxer != nullptr ||
+            (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
 
-    if (!appSegmentReady && !codecOutputReady && !codecInputReady) {
-        ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__);
-        return OK;
-    }
+    ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
+            " dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
+            codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
 
     // Handle inputs for Hevc tiling
     if (codecInputReady) {
@@ -736,7 +763,13 @@
         }
     }
 
-    // Initialize and start muxer if not yet done so
+    if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
+        return OK;
+    }
+
+    // Initialize and start muxer if not yet done so. In this case,
+    // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
+    // to be false, and the function must have returned early.
     if (inputFrame.muxer == nullptr) {
         res = startMuxerForInputFrame(timestamp, inputFrame);
         if (res != OK) {
@@ -747,7 +780,7 @@
     }
 
     // Write JPEG APP segments data to the muxer.
-    if (appSegmentReady && inputFrame.muxer != nullptr) {
+    if (appSegmentReady) {
         res = processAppSegment(timestamp, inputFrame);
         if (res != OK) {
             ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
@@ -766,12 +799,18 @@
         }
     }
 
-    if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) {
-        res = processCompletedInputFrame(timestamp, inputFrame);
-        if (res != OK) {
-            ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
-                    strerror(-res), res);
-            return res;
+    if (inputFrame.pendingOutputTiles == 0) {
+        if (inputFrame.appSegmentWritten) {
+            res = processCompletedInputFrame(timestamp, inputFrame);
+            if (res != OK) {
+                ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
+                        strerror(-res), res);
+                return res;
+            }
+        } else if (mLockedAppSegmentBufferCnt == kMaxAcquiredAppSegment) {
+            ALOGE("%s: Out-of-order app segment buffers reaches limit %u", __FUNCTION__,
+                    kMaxAcquiredAppSegment);
+            return INVALID_OPERATION;
         }
     }
 
@@ -780,11 +819,6 @@
 
 status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
     sp<ANativeWindow> outputANW = mOutputSurface;
-    if (inputFrame.codecOutputBuffers.size() == 0) {
-        // No single codec output buffer has been generated. Continue to
-        // wait.
-        return OK;
-    }
 
     auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
     if (res != OK) {
@@ -792,6 +826,7 @@
                 res);
         return res;
     }
+    mDequeuedOutputBufferCnt++;
 
     // Combine current thread id, stream id and timestamp to uniquely identify image.
     std::ostringstream tempOutputFile;
@@ -828,7 +863,7 @@
         }
     }
 
-    ssize_t trackId = inputFrame.muxer->addTrack(mFormat);
+    ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
     if (trackId < 0) {
         ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
         return NO_INIT;
@@ -844,6 +879,8 @@
         return res;
     }
 
+    ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
+            timestamp);
     return OK;
 }
 
@@ -852,9 +889,6 @@
     auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
             inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
             &app1Size);
-    ALOGV("%s: appSegmentSize is %zu, width %d, height %d, app1Size %zu", __FUNCTION__,
-          appSegmentSize, inputFrame.appSegmentBuffer.width,
-          inputFrame.appSegmentBuffer.height, app1Size);
     if (appSegmentSize == 0) {
         ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
         return NO_INIT;
@@ -910,7 +944,16 @@
                 __FUNCTION__, strerror(-res), res);
         return res;
     }
+
+    ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
+          __FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
+          inputFrame.appSegmentBuffer.height, app1Size);
+
     inputFrame.appSegmentWritten = true;
+    // Release the buffer now so any pending input app segments can be processed
+    mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
+    inputFrame.appSegmentBuffer.data = nullptr;
+    mLockedAppSegmentBufferCnt--;
 
     return OK;
 }
@@ -934,8 +977,9 @@
                 mOutputWidth - tileX * mGridWidth : mGridWidth;
         size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
                 mOutputHeight - tileY * mGridHeight : mGridHeight;
-        ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu",
-                __FUNCTION__, tileX, tileY, top, left, width, height);
+        ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
+                " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
+                inputBuffer.timeUs);
 
         res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
         if (res != OK) {
@@ -990,6 +1034,9 @@
     }
 
     inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
+
+    ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
+        __FUNCTION__, timestamp, it->index);
     return OK;
 }
 
@@ -1046,7 +1093,9 @@
         return res;
     }
     inputFrame.anb = nullptr;
+    mDequeuedOutputBufferCnt--;
 
+    ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
     ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
     return OK;
 }
@@ -1060,7 +1109,6 @@
     if (inputFrame->appSegmentBuffer.data != nullptr) {
         mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
         inputFrame->appSegmentBuffer.data = nullptr;
-        mAppSegmentBufferAcquired = false;
     }
 
     while (!inputFrame->codecOutputBuffers.empty()) {
@@ -1098,11 +1146,13 @@
     }
 }
 
-void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+void HeicCompositeStream::releaseInputFramesLocked() {
     auto it = mPendingInputFrames.begin();
     while (it != mPendingInputFrames.end()) {
-        if (it->first <= currentTs) {
-            releaseInputFrameLocked(&it->second);
+        auto& inputFrame = it->second;
+        if (inputFrame.error ||
+            (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
+            releaseInputFrameLocked(&inputFrame);
             it = mPendingInputFrames.erase(it);
         } else {
             it++;
@@ -1506,7 +1556,7 @@
             // In case we landed in error state, return any pending buffers and
             // halt all further processing.
             compilePendingInputLocked();
-            releaseInputFramesLocked(currentTs);
+            releaseInputFramesLocked();
             return false;
         }
 
@@ -1548,11 +1598,7 @@
         mPendingInputFrames[currentTs].error = true;
     }
 
-    if (mPendingInputFrames[currentTs].error ||
-            (mPendingInputFrames[currentTs].appSegmentWritten &&
-            mPendingInputFrames[currentTs].pendingOutputTiles == 0)) {
-        releaseInputFramesLocked(currentTs);
-    }
+    releaseInputFramesLocked();
 
     return true;
 }
@@ -1671,8 +1717,13 @@
                          ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
                          break;
                      }
-
-                     parent->onHeicFormatChanged(format);
+                     // Here format is MediaCodec's internal copy of output format.
+                     // Make a copy since onHeicFormatChanged() might modify it.
+                     sp<AMessage> formatCopy;
+                     if (format != nullptr) {
+                         formatCopy = format->dup();
+                     }
+                     parent->onHeicFormatChanged(formatCopy);
                      break;
                  }
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 260c68e..04e7b83 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -25,6 +25,7 @@
 #include <media/hardware/VideoAPI.h>
 #include <media/MediaCodecBuffer.h>
 #include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaMuxer.h>
 
@@ -157,6 +158,7 @@
         bool                      errorNotified;
         int64_t                   frameNumber;
 
+        sp<AMessage>              format;
         sp<MediaMuxer>            muxer;
         int                       fenceFd;
         int                       fileFd;
@@ -187,7 +189,7 @@
     status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
 
     void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
-    void releaseInputFramesLocked(int64_t currentTs);
+    void releaseInputFramesLocked();
 
     size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize,
             size_t* app1SegmentSize);
@@ -205,11 +207,13 @@
             static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
     static const android_dataspace kHeifDataSpace =
             static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
+    // Use the limit of pipeline depth in the API sepc as maximum number of acquired
+    // app segment buffers.
+    static const uint32_t kMaxAcquiredAppSegment = 8;
 
     int               mAppSegmentStreamId, mAppSegmentSurfaceId;
     sp<CpuConsumer>   mAppSegmentConsumer;
     sp<Surface>       mAppSegmentSurface;
-    bool              mAppSegmentBufferAcquired;
     size_t            mAppSegmentMaxSize;
     CameraMetadata    mStaticInfo;
 
@@ -218,9 +222,10 @@
     sp<CpuConsumer>   mMainImageConsumer; // Only applicable for HEVC codec.
     bool              mYuvBufferAcquired; // Only applicable to HEVC codec
 
+    static const int32_t kMaxOutputSurfaceProducerCount = 1;
     sp<Surface>       mOutputSurface;
     sp<ProducerListener> mProducerListener;
-
+    int32_t           mDequeuedOutputBufferCnt;
 
     // Map from frame number to JPEG setting of orientation+quality
     std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByFrameNumber;
@@ -229,11 +234,12 @@
 
     // Keep all incoming APP segment Blob buffer pending further processing.
     std::vector<int64_t> mInputAppSegmentBuffers;
+    int32_t           mLockedAppSegmentBufferCnt;
 
     // Keep all incoming HEIC blob buffer pending further processing.
     std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
     std::queue<int64_t> mCodecOutputBufferTimestamps;
-    size_t mOutputBufferCounter;
+    size_t mCodecOutputCounter;
 
     // Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
     std::vector<int64_t> mInputYuvBuffers;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 09638d0..fdb5657 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -29,6 +29,7 @@
 #include <future>
 #include <inttypes.h>
 #include <hardware/camera_common.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <functional>
 #include <camera_metadata_hidden.h>
@@ -47,10 +48,6 @@
 using std::literals::chrono_literals::operator""s;
 
 namespace {
-// Hardcoded name for the passthrough HAL implementation, since it can't be discovered via the
-// service manager
-const std::string kLegacyProviderName("legacy/0");
-const std::string kExternalProviderName("external/0");
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
 } // anonymous namespace
 
@@ -62,6 +59,19 @@
 CameraProviderManager::~CameraProviderManager() {
 }
 
+hardware::hidl_vec<hardware::hidl_string>
+CameraProviderManager::HardwareServiceInteractionProxy::listServices() {
+    hardware::hidl_vec<hardware::hidl_string> ret;
+    auto manager = hardware::defaultServiceManager1_2();
+    if (manager != nullptr) {
+        manager->listManifestByInterface(provider::V2_4::ICameraProvider::descriptor,
+                [&ret](const hardware::hidl_vec<hardware::hidl_string> &registered) {
+                    ret = registered;
+                });
+    }
+    return ret;
+}
+
 status_t CameraProviderManager::initialize(wp<CameraProviderManager::StatusListener> listener,
         ServiceInteractionProxy* proxy) {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -84,9 +94,10 @@
         return INVALID_OPERATION;
     }
 
-    // See if there's a passthrough HAL, but let's not complain if there's not
-    addProviderLocked(kLegacyProviderName, /*expected*/ false);
-    addProviderLocked(kExternalProviderName, /*expected*/ false);
+
+    for (const auto& instance : mServiceProxy->listServices()) {
+        this->addProviderLocked(instance);
+    }
 
     IPCThreadState::self()->flushCommands();
 
@@ -97,7 +108,13 @@
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     int count = 0;
     for (auto& provider : mProviders) {
-        count += provider->mUniqueCameraIds.size();
+        for (auto& id : provider->mUniqueCameraIds) {
+            // Hidden secure camera ids are not to be exposed to camera1 api.
+            if (isPublicallyHiddenSecureCameraLocked(id)) {
+                continue;
+            }
+            count++;
+        }
     }
     return count;
 }
@@ -123,7 +140,11 @@
         // for each camera facing, only take the first id advertised by HAL in
         // all [logical, physical1, physical2, ...] id combos, and filter out the rest.
         filterLogicalCameraIdsLocked(providerDeviceIds);
-
+        // Hidden secure camera ids are not to be exposed to camera1 api.
+        providerDeviceIds.erase(std::remove_if(providerDeviceIds.begin(), providerDeviceIds.end(),
+                [this](const std::string& s) {
+                    return this->isPublicallyHiddenSecureCameraLocked(s);}),
+                providerDeviceIds.end());
         deviceIds.insert(deviceIds.end(), providerDeviceIds.begin(), providerDeviceIds.end());
     }
 
@@ -1035,23 +1056,42 @@
     return deviceInfo->mIsLogicalCamera;
 }
 
-bool CameraProviderManager::isPublicallyHiddenSecureCamera(const std::string& id) {
+bool CameraProviderManager::isPublicallyHiddenSecureCamera(const std::string& id) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
-
-    auto deviceInfo = findDeviceInfoLocked(id);
-    if (deviceInfo == nullptr) {
-        return false;
-    }
-    return deviceInfo->mIsPublicallyHiddenSecureCamera;
+    return isPublicallyHiddenSecureCameraLocked(id);
 }
 
-bool CameraProviderManager::isHiddenPhysicalCamera(const std::string& cameraId) {
+bool CameraProviderManager::isPublicallyHiddenSecureCameraLocked(const std::string& id) const {
+    auto deviceInfo = findDeviceInfoLocked(id);
+    if (deviceInfo != nullptr) {
+        return deviceInfo->mIsPublicallyHiddenSecureCamera;
+    }
+    // If this is a hidden physical camera, we should return what kind of
+    // camera the enclosing logical camera is.
+    auto isHiddenAndParent = isHiddenPhysicalCameraInternal(id);
+    if (isHiddenAndParent.first) {
+        LOG_ALWAYS_FATAL_IF(id == isHiddenAndParent.second->mId,
+                "%s: hidden physical camera id %s and enclosing logical camera id %s are the same",
+                __FUNCTION__, id.c_str(), isHiddenAndParent.second->mId.c_str());
+        return isPublicallyHiddenSecureCameraLocked(isHiddenAndParent.second->mId);
+    }
+    // Invalid camera id
+    return true;
+}
+
+bool CameraProviderManager::isHiddenPhysicalCamera(const std::string& cameraId) const {
+    return isHiddenPhysicalCameraInternal(cameraId).first;
+}
+
+std::pair<bool, CameraProviderManager::ProviderInfo::DeviceInfo *>
+CameraProviderManager::isHiddenPhysicalCameraInternal(const std::string& cameraId) const {
+    auto falseRet = std::make_pair(false, nullptr);
     for (auto& provider : mProviders) {
         for (auto& deviceInfo : provider->mDevices) {
             if (deviceInfo->mId == cameraId) {
                 // cameraId is found in public camera IDs advertised by the
                 // provider.
-                return false;
+                return falseRet;
             }
         }
     }
@@ -1063,7 +1103,7 @@
             if (res != OK) {
                 ALOGE("%s: Failed to getCameraCharacteristics for id %s", __FUNCTION__,
                         deviceInfo->mId.c_str());
-                return false;
+                return falseRet;
             }
 
             std::vector<std::string> physicalIds;
@@ -1075,19 +1115,19 @@
                     if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) {
                         ALOGE("%s: Wrong deviceVersion %x for hiddenPhysicalCameraId %s",
                                 __FUNCTION__, deviceVersion, cameraId.c_str());
-                        return false;
+                        return falseRet;
                     } else {
-                        return true;
+                        return std::make_pair(true, deviceInfo.get());
                     }
                 }
             }
         }
     }
 
-    return false;
+    return falseRet;
 }
 
-status_t CameraProviderManager::addProviderLocked(const std::string& newProvider, bool expected) {
+status_t CameraProviderManager::addProviderLocked(const std::string& newProvider) {
     for (const auto& providerInfo : mProviders) {
         if (providerInfo->mProviderName == newProvider) {
             ALOGW("%s: Camera provider HAL with name '%s' already registered", __FUNCTION__,
@@ -1100,13 +1140,9 @@
     interface = mServiceProxy->getService(newProvider);
 
     if (interface == nullptr) {
-        if (expected) {
-            ALOGE("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
-                    newProvider.c_str());
-            return BAD_VALUE;
-        } else {
-            return OK;
-        }
+        ALOGE("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
+                newProvider.c_str());
+        return BAD_VALUE;
     }
 
     sp<ProviderInfo> providerInfo = new ProviderInfo(newProvider, this);
@@ -2058,6 +2094,13 @@
     return OK;
 }
 bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAPI1Compatible() const {
+    // Do not advertise NIR cameras to API1 camera app.
+    camera_metadata_ro_entry cfa = mCameraCharacteristics.find(
+            ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
+    if (cfa.count == 1 && cfa.data.u8[0] == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
+        return false;
+    }
+
     bool isBackwardCompatible = false;
     camera_metadata_ro_entry_t caps = mCameraCharacteristics.find(
             ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index a42fb4d..3a4655c 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -78,6 +78,7 @@
                 &notification) = 0;
         virtual sp<hardware::camera::provider::V2_4::ICameraProvider> getService(
                 const std::string &serviceName) = 0;
+        virtual hardware::hidl_vec<hardware::hidl_string> listServices() = 0;
         virtual ~ServiceInteractionProxy() {}
     };
 
@@ -95,6 +96,8 @@
                 const std::string &serviceName) override {
             return hardware::camera::provider::V2_4::ICameraProvider::getService(serviceName);
         }
+
+        virtual hardware::hidl_vec<hardware::hidl_string> listServices() override;
     };
 
     /**
@@ -269,8 +272,8 @@
      */
     bool isLogicalCamera(const std::string& id, std::vector<std::string>* physicalCameraIds);
 
-    bool isPublicallyHiddenSecureCamera(const std::string& id);
-    bool isHiddenPhysicalCamera(const std::string& cameraId);
+    bool isPublicallyHiddenSecureCamera(const std::string& id) const;
+    bool isHiddenPhysicalCamera(const std::string& cameraId) const;
 
     static const float kDepthARTolerance;
 private:
@@ -567,7 +570,7 @@
             hardware::hidl_version minVersion = hardware::hidl_version{0,0},
             hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
 
-    status_t addProviderLocked(const std::string& newProvider, bool expected = true);
+    status_t addProviderLocked(const std::string& newProvider);
 
     status_t removeProvider(const std::string& provider);
     sp<StatusListener> getStatusListener() const;
@@ -591,7 +594,15 @@
 
     status_t getCameraCharacteristicsLocked(const std::string &id,
             CameraMetadata* characteristics) const;
+
+    bool isPublicallyHiddenSecureCameraLocked(const std::string& id) const;
+
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
+
+    bool isPublicallyHiddenSecureCameraLocked(const std::string& id);
+
+    std::pair<bool, CameraProviderManager::ProviderInfo::DeviceInfo *>
+            isHiddenPhysicalCameraInternal(const std::string& cameraId) const;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index 3c90de0..94541d8 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -419,7 +419,7 @@
 
     std::vector<std::unique_ptr<Item>> items;
     std::vector<std::unique_ptr<Camera>> cameraList;
-    auto image = Image::FromDataForPrimaryImage("android/mainimage", &items);
+    auto image = Image::FromDataForPrimaryImage("image/jpeg", &items);
     std::unique_ptr<CameraParams> cameraParams(new CameraParams(std::move(image)));
     if (cameraParams == nullptr) {
         ALOGE("%s: Failed to initialize camera parameters", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index e347127..93e18cf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -29,6 +29,9 @@
 #define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
             ##__VA_ARGS__)
 
+#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+            ##__VA_ARGS__)
+
 // Convenience macros for transitioning to the error state
 #define SET_ERR(fmt, ...) setErrorState(   \
     "%s: " fmt, __FUNCTION__,              \
@@ -3267,14 +3270,19 @@
         ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
      }
 
-    // Sanity check - if we have too many in-flight frames, something has
-    // likely gone wrong
-    if (!mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() > kInFlightWarnLimit) {
-        CLOGE("In-flight list too large: %zu", mInFlightMap.size());
-    } else if (mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() >
-            kInFlightWarnLimitHighSpeed) {
-        CLOGE("In-flight list too large for high speed configuration: %zu",
-                mInFlightMap.size());
+    // Sanity check - if we have too many in-flight frames with long total inflight duration,
+    // something has likely gone wrong. This might still be legit only if application send in
+    // a long burst of long exposure requests.
+    if (mExpectedInflightDuration > kMinWarnInflightDuration) {
+        if (!mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() > kInFlightWarnLimit) {
+            CLOGW("In-flight list too large: %zu, total inflight duration %" PRIu64,
+                    mInFlightMap.size(), mExpectedInflightDuration);
+        } else if (mIsConstrainedHighSpeedConfiguration && mInFlightMap.size() >
+                kInFlightWarnLimitHighSpeed) {
+            CLOGW("In-flight list too large for high speed configuration: %zu,"
+                    "total inflight duration %" PRIu64,
+                    mInFlightMap.size(), mExpectedInflightDuration);
+        }
     }
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6e8ac84..cae34ce 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -227,6 +227,7 @@
     static const size_t        kDumpLockAttempts  = 10;
     static const size_t        kDumpSleepDuration = 100000; // 0.10 sec
     static const nsecs_t       kActiveTimeout     = 500000000;  // 500 ms
+    static const nsecs_t       kMinWarnInflightDuration = 5000000000; // 5 s
     static const size_t        kInFlightWarnLimit = 30;
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
     static const nsecs_t       kDefaultExpectedDuration = 100000000; // 100 ms
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index acb8b3c..e1d35e8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -54,9 +54,8 @@
         mState = STATE_ERROR;
     }
 
-    if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
-        mBufferReleasedListener = new BufferReleasedListener(this);
-    }
+    bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
+    mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
 
 Camera3OutputStream::Camera3OutputStream(int id,
@@ -87,9 +86,8 @@
         mState = STATE_ERROR;
     }
 
-    if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
-        mBufferReleasedListener = new BufferReleasedListener(this);
-    }
+    bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
+    mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
 
 Camera3OutputStream::Camera3OutputStream(int id,
@@ -124,10 +122,8 @@
     }
 
     mConsumerName = String8("Deferred");
-    if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
-        mBufferReleasedListener = new BufferReleasedListener(this);
-    }
-
+    bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
+    mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
 
 Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type,
@@ -151,9 +147,8 @@
         mDropBuffers(false),
         mDequeueBufferLatency(kDequeueLatencyBinSize) {
 
-    if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
-        mBufferReleasedListener = new BufferReleasedListener(this);
-    }
+    bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
+    mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 
     // Subclasses expected to initialize mConsumer themselves
 }
@@ -261,7 +256,7 @@
         notifyBufferReleased(anwBuffer);
         if (mUseBufferManager) {
             // Return this buffer back to buffer manager.
-            mBufferReleasedListener->onBufferReleased();
+            mBufferProducerListener->onBufferReleased();
         }
     } else {
         if (mTraceFirstBuffer && (stream_type == CAMERA3_STREAM_OUTPUT)) {
@@ -387,8 +382,8 @@
     // Configure consumer-side ANativeWindow interface. The listener may be used
     // to notify buffer manager (if it is used) of the returned buffers.
     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
-            /*listener*/mBufferReleasedListener,
-            /*reportBufferRemoval*/true);
+            /*reportBufferRemoval*/true,
+            /*listener*/mBufferProducerListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mId);
@@ -790,7 +785,7 @@
     return INVALID_OPERATION;
 }
 
-void Camera3OutputStream::BufferReleasedListener::onBufferReleased() {
+void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
     sp<Camera3OutputStream> stream = mParent.promote();
     if (stream == nullptr) {
         ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
@@ -823,6 +818,25 @@
     }
 }
 
+void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
+        const std::vector<sp<GraphicBuffer>>& buffers) {
+    sp<Camera3OutputStream> stream = mParent.promote();
+    if (stream == nullptr) {
+        ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
+        return;
+    }
+
+    if (buffers.size() > 0) {
+        Mutex::Autolock l(stream->mLock);
+        stream->onBuffersRemovedLocked(buffers);
+        if (stream->mUseBufferManager) {
+            stream->mBufferManager->onBuffersRemoved(stream->getId(),
+                    stream->getStreamSetId(), buffers.size());
+        }
+        ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
+    }
+}
+
 void Camera3OutputStream::onBuffersRemovedLocked(
         const std::vector<sp<GraphicBuffer>>& removedBuffers) {
     sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 729c655..b4e49f9 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -146,18 +146,22 @@
      */
     virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers);
 
-    class BufferReleasedListener : public BnProducerListener {
+    class BufferProducerListener : public SurfaceListener {
         public:
-          BufferReleasedListener(wp<Camera3OutputStream> parent) : mParent(parent) {}
+            BufferProducerListener(wp<Camera3OutputStream> parent, bool needsReleaseNotify)
+                    : mParent(parent), mNeedsReleaseNotify(needsReleaseNotify) {}
 
-          /**
-          * Implementation of IProducerListener, used to notify this stream that the consumer
-          * has returned a buffer and it is ready to return to Camera3BufferManager for reuse.
-          */
-          virtual void onBufferReleased();
+            /**
+            * Implementation of IProducerListener, used to notify this stream that the consumer
+            * has returned a buffer and it is ready to return to Camera3BufferManager for reuse.
+            */
+            virtual void onBufferReleased();
+            virtual bool needsReleaseNotify() { return mNeedsReleaseNotify; }
+            virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& buffers);
 
         private:
-          wp<Camera3OutputStream> mParent;
+            wp<Camera3OutputStream> mParent;
+            bool mNeedsReleaseNotify;
     };
 
     virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
@@ -262,10 +266,10 @@
     sp<Camera3BufferManager> mBufferManager;
 
     /**
-     * Buffer released listener, used to notify the buffer manager that a buffer is released
-     * from consumer side.
+     * Buffer producer listener, used to handle notification when a buffer is released
+     * from consumer side, or a set of buffers are discarded by the consumer.
      */
-    sp<BufferReleasedListener> mBufferReleasedListener;
+    sp<BufferProducerListener> mBufferProducerListener;
 
     /**
      * Flag indicating if the buffer manager is used to allocate the stream buffers
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 84c2ec7..3089181 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -139,7 +139,9 @@
     mOutputSlots.clear();
     mConsumerBufferCount.clear();
 
-    mConsumer->consumerDisconnect();
+    if (mConsumer.get() != nullptr) {
+        mConsumer->consumerDisconnect();
+    }
 
     if (mBuffers.size() > 0) {
         SP_LOGW("%zu buffers still being tracked", mBuffers.size());
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 0f6be79..b9e5857 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -54,6 +54,15 @@
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
+    virtual binder::Status onCameraOpened(const ::android::String16& /*cameraId*/,
+            const ::android::String16& /*clientPackageId*/) {
+        // empty implementation
+        return binder::Status::ok();
+    }
+    virtual binder::Status onCameraClosed(const ::android::String16& /*cameraId*/) {
+        // empty implementation
+        return binder::Status::ok();
+    }
 };
 
 } // implementation
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index f47e5a5..78d737d 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -205,6 +205,11 @@
         return mTestCameraProvider;
     }
 
+    virtual hardware::hidl_vec<hardware::hidl_string> listServices() override {
+        hardware::hidl_vec<hardware::hidl_string> ret = {"test/0"};
+        return ret;
+    }
+
 };
 
 struct TestStatusListener : public CameraProviderManager::StatusListener {
@@ -231,37 +236,24 @@
             vendorSection);
     serviceProxy.setProvider(provider);
 
+    int numProviders = static_cast<int>(serviceProxy.listServices().size());
+
     res = providerManager->initialize(statusListener, &serviceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
     // Check that both "legacy" and "external" providers (really the same object) are called
     // once for all the init methods
-    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::SET_CALLBACK], 2) <<
+    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::SET_CALLBACK], numProviders) <<
             "Only one call to setCallback per provider expected during init";
-    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::GET_VENDOR_TAGS], 2) <<
+    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::GET_VENDOR_TAGS], numProviders) <<
             "Only one call to getVendorTags per provider expected during init";
-    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::IS_SET_TORCH_MODE_SUPPORTED], 2) <<
+    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::IS_SET_TORCH_MODE_SUPPORTED],
+            numProviders) <<
             "Only one call to isSetTorchModeSupported per provider expected during init";
-    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::GET_CAMERA_ID_LIST], 2) <<
+    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::GET_CAMERA_ID_LIST], numProviders) <<
             "Only one call to getCameraIdList per provider expected during init";
-    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::NOTIFY_DEVICE_STATE], 2) <<
+    EXPECT_EQ(provider->mCalledCounter[TestICameraProvider::NOTIFY_DEVICE_STATE], numProviders) <<
             "Only one call to notifyDeviceState per provider expected during init";
 
-    std::string legacyInstanceName = "legacy/0";
-    std::string externalInstanceName = "external/0";
-    bool gotLegacy = false;
-    bool gotExternal = false;
-    EXPECT_EQ(2u, serviceProxy.mLastRequestedServiceNames.size()) <<
-            "Only two service queries expected to be seen by hardware service manager";
-
-    for (auto& serviceName : serviceProxy.mLastRequestedServiceNames) {
-        if (serviceName == legacyInstanceName) gotLegacy = true;
-        if (serviceName == externalInstanceName) gotExternal = true;
-    }
-    ASSERT_TRUE(gotLegacy) <<
-            "Legacy instance not requested from service manager";
-    ASSERT_TRUE(gotExternal) <<
-            "External instance not requested from service manager";
-
     hardware::hidl_string testProviderFqInterfaceName =
             "android.hardware.camera.provider@2.4::ICameraProvider";
     hardware::hidl_string testProviderInstanceName = "test/0";
diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h
index ec6f01c..5e41c36 100644
--- a/services/camera/libcameraservice/utils/ClientManager.h
+++ b/services/camera/libcameraservice/utils/ClientManager.h
@@ -497,7 +497,7 @@
                 evictList.push_back(client);
                 return evictList;
             } else if (conflicting || ((totalCost > mMaxCost && curCost > 0) &&
-                    (curPriority >= priority) &&
+                    (curPriority >= priority && priority.getScore() >= 0) &&
                     !(highestPriorityOwner == owner && owner == curOwner))) {
                 // Add a pre-existing client to the eviction list if:
                 // - We are adding a client with higher priority that conflicts with this one.
diff --git a/services/mediacodec/Android.bp b/services/mediacodec/Android.bp
index 99a6d6b..2f3cad9 100644
--- a/services/mediacodec/Android.bp
+++ b/services/mediacodec/Android.bp
@@ -64,5 +64,8 @@
             src: "seccomp_policy/mediacodec-x86.policy",
         },
     },
-    required: ["crash_dump.policy"],
+    required: [
+        "crash_dump.policy",
+        "code_coverage.policy",
+    ],
 }
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index ecc8408..15bc503 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -72,7 +72,7 @@
 LOCAL_MODULE := mediacodec.policy
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_PATH := $(TARGET_OUT)/etc/seccomp_policy
-LOCAL_REQUIRED_MODULES := crash_dump.policy
+LOCAL_REQUIRED_MODULES := crash_dump.policy code_coverage.policy
 # mediacodec runs in 32-bit combatibility mode. For 64 bit architectures,
 # use the 32 bit policy
 ifdef TARGET_2ND_ARCH
diff --git a/services/mediacodec/main_codecservice.cpp b/services/mediacodec/main_codecservice.cpp
index f668c33..6a82b1b 100644
--- a/services/mediacodec/main_codecservice.cpp
+++ b/services/mediacodec/main_codecservice.cpp
@@ -21,6 +21,7 @@
 #include "minijail.h"
 
 #include <binder/ProcessState.h>
+#include <cutils/properties.h>
 #include <hidl/HidlTransportSupport.h>
 #include <media/stagefright/omx/1.0/Omx.h>
 #include <media/stagefright/omx/1.0/OmxStore.h>
@@ -57,7 +58,8 @@
     } else {
         LOG(INFO) << "IOmx HAL service created.";
     }
-    sp<IOmxStore> omxStore = new implementation::OmxStore(omx);
+    sp<IOmxStore> omxStore = new implementation::OmxStore(
+            property_get_int64("vendor.media.omx", 1) ? omx : nullptr);
     if (omxStore == nullptr) {
         LOG(ERROR) << "Cannot create IOmxStore HAL service.";
     } else if (omxStore->registerAsService() != OK) {
diff --git a/services/mediacodec/registrant/Android.bp b/services/mediacodec/registrant/Android.bp
index 17c2e02..fa5bc4a 100644
--- a/services/mediacodec/registrant/Android.bp
+++ b/services/mediacodec/registrant/Android.bp
@@ -42,7 +42,8 @@
         "libcodec2_soft_opusenc",
         "libcodec2_soft_vp8dec",
         "libcodec2_soft_vp9dec",
-        "libcodec2_soft_av1dec",
+        // "libcodec2_soft_av1dec_aom",  // replaced by the gav1 implementation
+        "libcodec2_soft_av1dec_gav1",
         "libcodec2_soft_vp8enc",
         "libcodec2_soft_vp9enc",
         "libcodec2_soft_rawdec",
diff --git a/services/mediacodec/seccomp_policy/mediacodec-arm.policy b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
index 3870a11..835f8bb 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-arm.policy
@@ -59,3 +59,5 @@
 getrandom: 1
 
 @include /system/etc/seccomp_policy/crash_dump.arm.policy
+
+@include /system/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediacodec/seccomp_policy/mediacodec-x86.policy b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
index d9c4045..a9d32d6 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-x86.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
@@ -69,3 +69,4 @@
 gettid: 1
 
 @include /system/etc/seccomp_policy/crash_dump.x86.policy
+@include /system/etc/seccomp_policy/code_coverage.x86.policy
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 9042cd7..93b4852 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -84,3 +84,5 @@
 getgid32: 1
 getegid32: 1
 getgroups32: 1
+
+@include /system/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
index 4faf8b2..bb05770 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm64.policy
@@ -79,3 +79,4 @@
 getegid: 1
 getgroups: 1
 
+@include /system/etc/seccomp_policy/code_coverage.arm64.policy
diff --git a/services/mediaextractor/Android.bp b/services/mediaextractor/Android.bp
index b812244..0c701d7 100644
--- a/services/mediaextractor/Android.bp
+++ b/services/mediaextractor/Android.bp
@@ -66,6 +66,9 @@
             src: "seccomp_policy/mediaextractor-x86_64.policy",
         },
     },
-    required: ["crash_dump.policy"],
+    required: [
+        "crash_dump.policy",
+        "code_coverage.policy",
+    ],
 }
 
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy b/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
index 964acf4..118072e 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
@@ -41,6 +41,9 @@
 getgroups32: 1
 nanosleep: 1
 getrandom: 1
+timer_create: 1
+timer_settime: 1
+timer_delete: 1
 
 # for dynamically loading extractors
 pread64: 1
@@ -50,3 +53,4 @@
 _llseek: 1
 
 @include /system/etc/seccomp_policy/crash_dump.arm.policy
+@include /system/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
index e6c676c..481e29e 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-arm64.policy
@@ -30,6 +30,9 @@
 getrlimit: 1
 nanosleep: 1
 getrandom: 1
+timer_create: 1
+timer_settime: 1
+timer_delete: 1
 
 # for FileSource
 readlinkat: 1
@@ -44,3 +47,4 @@
 sched_yield: 1
 
 @include /system/etc/seccomp_policy/crash_dump.arm64.policy
+@include /system/etc/seccomp_policy/code_coverage.arm64.policy
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy b/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
index 56ad8df..15fb24e 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
@@ -39,6 +39,9 @@
 getgroups32: 1
 nanosleep: 1
 getrandom: 1
+timer_create: 1
+timer_settime: 1
+timer_delete: 1
 
 # for dynamically loading extractors
 getdents64: 1
@@ -57,3 +60,4 @@
 gettid: 1
 
 @include /system/etc/seccomp_policy/crash_dump.x86.policy
+@include /system/etc/seccomp_policy/code_coverage.x86.policy
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy b/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy
index 607a03e..4f2646c 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-x86_64.policy
@@ -34,6 +34,9 @@
 getrlimit: 1
 nanosleep: 1
 getrandom: 1
+timer_create: 1
+timer_settime: 1
+timer_delete: 1
 
 # for dynamically loading extractors
 getdents64: 1
@@ -51,3 +54,4 @@
 gettid: 1
 
 @include /system/etc/seccomp_policy/crash_dump.x86_64.policy
+@include /system/etc/seccomp_policy/code_coverage.x86_64.policy
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 28bfd3f..bdcd5e4 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -21,8 +21,11 @@
 
 #include <binder/IMediaResourceMonitor.h>
 #include <binder/IServiceManager.h>
+#include <cutils/sched_policy.h>
 #include <dirent.h>
 #include <media/stagefright/ProcessInfo.h>
+#include <mediautils/BatteryNotifier.h>
+#include <mediautils/SchedulingPolicyService.h>
 #include <string.h>
 #include <sys/types.h>
 #include <sys/stat.h>
@@ -31,8 +34,7 @@
 
 #include "ResourceManagerService.h"
 #include "ServiceLog.h"
-#include "mediautils/SchedulingPolicyService.h"
-#include <cutils/sched_policy.h>
+
 namespace android {
 
 namespace {
@@ -69,9 +71,9 @@
     return itemsStr;
 }
 
-static bool hasResourceType(MediaResource::Type type, const Vector<MediaResource>& resources) {
-    for (size_t i = 0; i < resources.size(); ++i) {
-        if (resources[i].mType == type) {
+static bool hasResourceType(MediaResource::Type type, const ResourceList& resources) {
+    for (auto it = resources.begin(); it != resources.end(); it++) {
+        if (it->second.mType == type) {
             return true;
         }
     }
@@ -101,20 +103,22 @@
 }
 
 static ResourceInfo& getResourceInfoForEdit(
+        uid_t uid,
         int64_t clientId,
         const sp<IResourceManagerClient>& client,
         ResourceInfos& infos) {
-    for (size_t i = 0; i < infos.size(); ++i) {
-        if (infos[i].clientId == clientId) {
-            return infos.editItemAt(i);
-        }
+    ssize_t index = infos.indexOfKey(clientId);
+
+    if (index < 0) {
+        ResourceInfo info;
+        info.uid = uid;
+        info.clientId = clientId;
+        info.client = client;
+
+        index = infos.add(clientId, info);
     }
-    ResourceInfo info;
-    info.clientId = clientId;
-    info.client = client;
-    info.cpuBoost = false;
-    infos.push_back(info);
-    return infos.editItemAt(infos.size() - 1);
+
+    return infos.editValueAt(index);
 }
 
 static void notifyResourceGranted(int pid, const Vector<MediaResource> &resources) {
@@ -181,10 +185,10 @@
             snprintf(buffer, SIZE, "        Name: %s\n", infos[j].client->getName().string());
             result.append(buffer);
 
-            Vector<MediaResource> resources = infos[j].resources;
+            const ResourceList &resources = infos[j].resources;
             result.append("        Resources:\n");
-            for (size_t k = 0; k < resources.size(); ++k) {
-                snprintf(buffer, SIZE, "          %s\n", resources[k].toString().string());
+            for (auto it = resources.begin(); it != resources.end(); it++) {
+                snprintf(buffer, SIZE, "          %s\n", it->second.toString().string());
                 result.append(buffer);
             }
         }
@@ -196,15 +200,45 @@
     return OK;
 }
 
-ResourceManagerService::ResourceManagerService()
-    : ResourceManagerService(new ProcessInfo()) {}
+struct SystemCallbackImpl :
+        public ResourceManagerService::SystemCallbackInterface {
+    SystemCallbackImpl() {}
 
-ResourceManagerService::ResourceManagerService(sp<ProcessInfoInterface> processInfo)
+    virtual void noteStartVideo(int uid) override {
+        BatteryNotifier::getInstance().noteStartVideo(uid);
+    }
+    virtual void noteStopVideo(int uid) override {
+        BatteryNotifier::getInstance().noteStopVideo(uid);
+    }
+    virtual void noteResetVideo() override {
+        BatteryNotifier::getInstance().noteResetVideo();
+    }
+    virtual bool requestCpusetBoost(
+            bool enable, const sp<IInterface> &client) override {
+        return android::requestCpusetBoost(enable, client);
+    }
+
+protected:
+    virtual ~SystemCallbackImpl() {}
+
+private:
+    DISALLOW_EVIL_CONSTRUCTORS(SystemCallbackImpl);
+};
+
+ResourceManagerService::ResourceManagerService()
+    : ResourceManagerService(new ProcessInfo(), new SystemCallbackImpl()) {}
+
+ResourceManagerService::ResourceManagerService(
+        const sp<ProcessInfoInterface> &processInfo,
+        const sp<SystemCallbackInterface> &systemResource)
     : mProcessInfo(processInfo),
+      mSystemCB(systemResource),
       mServiceLog(new ServiceLog()),
       mSupportsMultipleSecureCodecs(true),
       mSupportsSecureWithNonSecureCodec(true),
-      mCpuBoostCount(0) {}
+      mCpuBoostCount(0) {
+    mSystemCB->noteResetVideo();
+}
 
 ResourceManagerService::~ResourceManagerService() {}
 
@@ -224,8 +258,41 @@
     }
 }
 
+void ResourceManagerService::onFirstAdded(
+        const MediaResource& resource, const ResourceInfo& clientInfo) {
+    // first time added
+    if (resource.mType == MediaResource::kCpuBoost
+     && resource.mSubType == MediaResource::kUnspecifiedSubType) {
+        // Request it on every new instance of kCpuBoost, as the media.codec
+        // could have died, if we only do it the first time subsequent instances
+        // never gets the boost.
+        if (mSystemCB->requestCpusetBoost(true, this) != OK) {
+            ALOGW("couldn't request cpuset boost");
+        }
+        mCpuBoostCount++;
+    } else if (resource.mType == MediaResource::kBattery
+            && resource.mSubType == MediaResource::kVideoCodec) {
+        mSystemCB->noteStartVideo(clientInfo.uid);
+    }
+}
+
+void ResourceManagerService::onLastRemoved(
+        const MediaResource& resource, const ResourceInfo& clientInfo) {
+    if (resource.mType == MediaResource::kCpuBoost
+            && resource.mSubType == MediaResource::kUnspecifiedSubType
+            && mCpuBoostCount > 0) {
+        if (--mCpuBoostCount == 0) {
+            mSystemCB->requestCpusetBoost(false, this);
+        }
+    } else if (resource.mType == MediaResource::kBattery
+            && resource.mSubType == MediaResource::kVideoCodec) {
+        mSystemCB->noteStopVideo(clientInfo.uid);
+    }
+}
+
 void ResourceManagerService::addResource(
         int pid,
+        int uid,
         int64_t clientId,
         const sp<IResourceManagerClient> client,
         const Vector<MediaResource> &resources) {
@@ -239,20 +306,15 @@
         return;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
-    ResourceInfo& info = getResourceInfoForEdit(clientId, client, infos);
-    // TODO: do the merge instead of append.
-    info.resources.appendVector(resources);
+    ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
 
     for (size_t i = 0; i < resources.size(); ++i) {
-        if (resources[i].mType == MediaResource::kCpuBoost && !info.cpuBoost) {
-            info.cpuBoost = true;
-            // Request it on every new instance of kCpuBoost, as the media.codec
-            // could have died, if we only do it the first time subsequent instances
-            // never gets the boost.
-            if (requestCpusetBoost(true, this) != OK) {
-                ALOGW("couldn't request cpuset boost");
-            }
-            mCpuBoostCount++;
+        const auto resType = std::make_pair(resources[i].mType, resources[i].mSubType);
+        if (info.resources.find(resType) == info.resources.end()) {
+            onFirstAdded(resources[i], info);
+            info.resources[resType] = resources[i];
+        } else {
+            info.resources[resType].mValue += resources[i].mValue;
         }
     }
     if (info.deathNotifier == nullptr) {
@@ -262,7 +324,48 @@
     notifyResourceGranted(pid, resources);
 }
 
-void ResourceManagerService::removeResource(int pid, int64_t clientId) {
+void ResourceManagerService::removeResource(int pid, int64_t clientId,
+        const Vector<MediaResource> &resources) {
+    String8 log = String8::format("removeResource(pid %d, clientId %lld, resources %s)",
+            pid, (long long) clientId, getString(resources).string());
+    mServiceLog->add(log);
+
+    Mutex::Autolock lock(mLock);
+    if (!mProcessInfo->isValidPid(pid)) {
+        ALOGE("Rejected removeResource call with invalid pid.");
+        return;
+    }
+    ssize_t index = mMap.indexOfKey(pid);
+    if (index < 0) {
+        ALOGV("removeResource: didn't find pid %d for clientId %lld", pid, (long long) clientId);
+        return;
+    }
+    ResourceInfos &infos = mMap.editValueAt(index);
+
+    index = infos.indexOfKey(clientId);
+    if (index < 0) {
+        ALOGV("removeResource: didn't find clientId %lld", (long long) clientId);
+        return;
+    }
+
+    ResourceInfo &info = infos.editValueAt(index);
+
+    for (size_t i = 0; i < resources.size(); ++i) {
+        const auto resType = std::make_pair(resources[i].mType, resources[i].mSubType);
+        // ignore if we don't have it
+        if (info.resources.find(resType) != info.resources.end()) {
+            MediaResource &resource = info.resources[resType];
+            if (resource.mValue > resources[i].mValue) {
+                resource.mValue -= resources[i].mValue;
+            } else {
+                onLastRemoved(resources[i], info);
+                info.resources.erase(resType);
+            }
+        }
+    }
+}
+
+void ResourceManagerService::removeClient(int pid, int64_t clientId) {
     removeResource(pid, clientId, true);
 }
 
@@ -282,24 +385,22 @@
         ALOGV("removeResource: didn't find pid %d for clientId %lld", pid, (long long) clientId);
         return;
     }
-    bool found = false;
     ResourceInfos &infos = mMap.editValueAt(index);
-    for (size_t j = 0; j < infos.size(); ++j) {
-        if (infos[j].clientId == clientId) {
-            if (infos[j].cpuBoost && mCpuBoostCount > 0) {
-                if (--mCpuBoostCount == 0) {
-                    requestCpusetBoost(false, this);
-                }
-            }
-            IInterface::asBinder(infos[j].client)->unlinkToDeath(infos[j].deathNotifier);
-            j = infos.removeAt(j);
-            found = true;
-            break;
-        }
+
+    index = infos.indexOfKey(clientId);
+    if (index < 0) {
+        ALOGV("removeResource: didn't find clientId %lld", (long long) clientId);
+        return;
     }
-    if (!found) {
-        ALOGV("didn't find client");
+
+    const ResourceInfo &info = infos[index];
+    for (auto it = info.resources.begin(); it != info.resources.end(); it++) {
+        onLastRemoved(it->second, info);
     }
+
+    IInterface::asBinder(info.client)->unlinkToDeath(info.deathNotifier);
+
+    infos.removeItemsAt(index);
 }
 
 void ResourceManagerService::getClientForResource_l(
@@ -410,7 +511,7 @@
             ResourceInfos &infos = mMap.editValueAt(i);
             for (size_t j = 0; j < infos.size();) {
                 if (infos[j].client == failedClient) {
-                    j = infos.removeAt(j);
+                    j = infos.removeItemsAt(j);
                     found = true;
                 } else {
                     ++j;
@@ -538,11 +639,12 @@
     uint64_t largestValue = 0;
     const ResourceInfos &infos = mMap.valueAt(index);
     for (size_t i = 0; i < infos.size(); ++i) {
-        Vector<MediaResource> resources = infos[i].resources;
-        for (size_t j = 0; j < resources.size(); ++j) {
-            if (resources[j].mType == type) {
-                if (resources[j].mValue > largestValue) {
-                    largestValue = resources[j].mValue;
+        const ResourceList &resources = infos[i].resources;
+        for (auto it = resources.begin(); it != resources.end(); it++) {
+            const MediaResource &resource = it->second;
+            if (resource.mType == type) {
+                if (resource.mValue > largestValue) {
+                    largestValue = resource.mValue;
                     clientTemp = infos[i].client;
                 }
             }
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 82d2a0b..f086dc3 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -33,15 +33,17 @@
 class ServiceLog;
 struct ProcessInfoInterface;
 
+typedef std::map<std::pair<MediaResource::Type, MediaResource::SubType>, MediaResource> ResourceList;
 struct ResourceInfo {
     int64_t clientId;
+    uid_t uid;
     sp<IResourceManagerClient> client;
     sp<IBinder::DeathRecipient> deathNotifier;
-    Vector<MediaResource> resources;
-    bool cpuBoost;
+    ResourceList resources;
 };
 
-typedef Vector<ResourceInfo> ResourceInfos;
+// TODO: convert these to std::map
+typedef KeyedVector<int64_t, ResourceInfo> ResourceInfos;
 typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
 
 class ResourceManagerService
@@ -49,23 +51,37 @@
       public BnResourceManagerService
 {
 public:
+    struct SystemCallbackInterface : public RefBase {
+        virtual void noteStartVideo(int uid) = 0;
+        virtual void noteStopVideo(int uid) = 0;
+        virtual void noteResetVideo() = 0;
+        virtual bool requestCpusetBoost(
+                bool enable, const sp<IInterface> &client) = 0;
+    };
+
     static char const *getServiceName() { return "media.resource_manager"; }
 
     virtual status_t dump(int fd, const Vector<String16>& args);
 
     ResourceManagerService();
-    explicit ResourceManagerService(sp<ProcessInfoInterface> processInfo);
+    explicit ResourceManagerService(
+            const sp<ProcessInfoInterface> &processInfo,
+            const sp<SystemCallbackInterface> &systemResource);
 
     // IResourceManagerService interface
     virtual void config(const Vector<MediaResourcePolicy> &policies);
 
     virtual void addResource(
             int pid,
+            int uid,
             int64_t clientId,
             const sp<IResourceManagerClient> client,
             const Vector<MediaResource> &resources);
 
-    virtual void removeResource(int pid, int64_t clientId);
+    virtual void removeResource(int pid, int64_t clientId,
+            const Vector<MediaResource> &resources);
+
+    virtual void removeClient(int pid, int64_t clientId);
 
     // Tries to reclaim resource from processes with lower priority than the calling process
     // according to the requested resources.
@@ -107,8 +123,12 @@
     void getClientForResource_l(
         int callingPid, const MediaResource *res, Vector<sp<IResourceManagerClient>> *clients);
 
+    void onFirstAdded(const MediaResource& res, const ResourceInfo& clientInfo);
+    void onLastRemoved(const MediaResource& res, const ResourceInfo& clientInfo);
+
     mutable Mutex mLock;
     sp<ProcessInfoInterface> mProcessInfo;
+    sp<SystemCallbackInterface> mSystemCB;
     sp<ServiceLog> mServiceLog;
     PidResourceInfosMap mMap;
     bool mSupportsMultipleSecureCodecs;
diff --git a/services/mediaresourcemanager/TEST_MAPPING b/services/mediaresourcemanager/TEST_MAPPING
new file mode 100644
index 0000000..418b159
--- /dev/null
+++ b/services/mediaresourcemanager/TEST_MAPPING
@@ -0,0 +1,10 @@
+{
+  "presubmit": [
+    {
+       "name": "ResourceManagerService_test"
+    },
+    {
+       "name": "ServiceLog_test"
+    }
+  ]
+}
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 70e8833..543c87c 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -2,6 +2,7 @@
 cc_test {
     name: "ResourceManagerService_test",
     srcs: ["ResourceManagerService_test.cpp"],
+    test_suites: ["device-tests"],
     shared_libs: [
         "libbinder",
         "liblog",
@@ -23,6 +24,7 @@
 cc_test {
     name: "ServiceLog_test",
     srcs: ["ServiceLog_test.cpp"],
+    test_suites: ["device-tests"],
     shared_libs: [
         "liblog",
         "libmedia",
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index ed0b0ef..ae97ec8 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -52,13 +52,69 @@
     DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
 };
 
+struct TestSystemCallback :
+        public ResourceManagerService::SystemCallbackInterface {
+    TestSystemCallback() :
+        mLastEvent({EventType::INVALID, 0}), mEventCount(0) {}
+
+    enum EventType {
+        INVALID          = -1,
+        VIDEO_ON         = 0,
+        VIDEO_OFF        = 1,
+        VIDEO_RESET      = 2,
+        CPUSET_ENABLE    = 3,
+        CPUSET_DISABLE   = 4,
+    };
+
+    struct EventEntry {
+        EventType type;
+        int arg;
+    };
+
+    virtual void noteStartVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_ON, uid};
+        mEventCount++;
+    }
+
+    virtual void noteStopVideo(int uid) override {
+        mLastEvent = {EventType::VIDEO_OFF, uid};
+        mEventCount++;
+    }
+
+    virtual void noteResetVideo() override {
+        mLastEvent = {EventType::VIDEO_RESET, 0};
+        mEventCount++;
+    }
+
+    virtual bool requestCpusetBoost(
+            bool enable, const sp<IInterface> &/*client*/) override {
+        mLastEvent = {enable ? EventType::CPUSET_ENABLE : EventType::CPUSET_DISABLE, 0};
+        mEventCount++;
+        return true;
+    }
+
+    size_t eventCount() { return mEventCount; }
+    EventType lastEventType() { return mLastEvent.type; }
+    EventEntry lastEvent() { return mLastEvent; }
+
+protected:
+    virtual ~TestSystemCallback() {}
+
+private:
+    EventEntry mLastEvent;
+    size_t mEventCount;
+
+    DISALLOW_EVIL_CONSTRUCTORS(TestSystemCallback);
+};
+
+
 struct TestClient : public BnResourceManagerClient {
     TestClient(int pid, sp<ResourceManagerService> service)
         : mReclaimed(false), mPid(pid), mService(service) {}
 
     virtual bool reclaimResource() {
         sp<IResourceManagerClient> client(this);
-        mService->removeResource(mPid, (int64_t) client.get());
+        mService->removeClient(mPid, (int64_t) client.get());
         mReclaimed = true;
         return true;
     }
@@ -86,16 +142,26 @@
 };
 
 static const int kTestPid1 = 30;
+static const int kTestUid1 = 1010;
+
 static const int kTestPid2 = 20;
+static const int kTestUid2 = 1011;
 
 static const int kLowPriorityPid = 40;
 static const int kMidPriorityPid = 25;
 static const int kHighPriorityPid = 10;
 
+using EventType = TestSystemCallback::EventType;
+using EventEntry = TestSystemCallback::EventEntry;
+bool operator== (const EventEntry& lhs, const EventEntry& rhs) {
+    return lhs.type == rhs.type && lhs.arg == rhs.arg;
+}
+
 class ResourceManagerServiceTest : public ::testing::Test {
 public:
     ResourceManagerServiceTest()
-        : mService(new ResourceManagerService(new TestProcessInfo)),
+        : mSystemCB(new TestSystemCallback()),
+          mService(new ResourceManagerService(new TestProcessInfo, mSystemCB)),
           mTestClient1(new TestClient(kTestPid1, mService)),
           mTestClient2(new TestClient(kTestPid2, mService)),
           mTestClient3(new TestClient(kTestPid2, mService)) {
@@ -103,20 +169,21 @@
 
 protected:
     static bool isEqualResources(const Vector<MediaResource> &resources1,
-            const Vector<MediaResource> &resources2) {
-        if (resources1.size() != resources2.size()) {
-            return false;
-        }
+            const ResourceList &resources2) {
+        // convert resource1 to ResourceList
+        ResourceList r1;
         for (size_t i = 0; i < resources1.size(); ++i) {
-            if (resources1[i] != resources2[i]) {
-                return false;
-            }
+            const auto resType = std::make_pair(resources1[i].mType, resources1[i].mSubType);
+            r1[resType] = resources1[i];
         }
-        return true;
+        return r1 == resources2;
     }
 
-    static void expectEqResourceInfo(const ResourceInfo &info, sp<IResourceManagerClient> client,
+    static void expectEqResourceInfo(const ResourceInfo &info,
+            int uid,
+            sp<IResourceManagerClient> client,
             const Vector<MediaResource> &resources) {
+        EXPECT_EQ(uid, info.uid);
         EXPECT_EQ(client, info.client);
         EXPECT_TRUE(isEqualResources(resources, info.resources));
     }
@@ -153,24 +220,24 @@
         // kTestPid1 mTestClient1
         Vector<MediaResource> resources1;
         resources1.push_back(MediaResource(MediaResource::kSecureCodec, 1));
-        mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources1);
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
         resources1.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
         Vector<MediaResource> resources11;
         resources11.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
-        mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources11);
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
 
         // kTestPid2 mTestClient2
         Vector<MediaResource> resources2;
         resources2.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
         resources2.push_back(MediaResource(MediaResource::kGraphicMemory, 300));
-        mService->addResource(kTestPid2, getId(mTestClient2), mTestClient2, resources2);
+        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
 
         // kTestPid2 mTestClient3
         Vector<MediaResource> resources3;
-        mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
+        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
         resources3.push_back(MediaResource(MediaResource::kSecureCodec, 1));
         resources3.push_back(MediaResource(MediaResource::kGraphicMemory, 100));
-        mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
+        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(2u, map.size());
@@ -178,14 +245,14 @@
         ASSERT_GE(index1, 0);
         const ResourceInfos &infos1 = map[index1];
         EXPECT_EQ(1u, infos1.size());
-        expectEqResourceInfo(infos1[0], mTestClient1, resources1);
+        expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, resources1);
 
         ssize_t index2 = map.indexOfKey(kTestPid2);
         ASSERT_GE(index2, 0);
         const ResourceInfos &infos2 = map[index2];
         EXPECT_EQ(2u, infos2.size());
-        expectEqResourceInfo(infos2[0], mTestClient2, resources2);
-        expectEqResourceInfo(infos2[1], mTestClient3, resources3);
+        expectEqResourceInfo(infos2.valueFor(getId(mTestClient2)), kTestUid2, mTestClient2, resources2);
+        expectEqResourceInfo(infos2.valueFor(getId(mTestClient3)), kTestUid2, mTestClient3, resources3);
     }
 
     void testConfig() {
@@ -219,10 +286,84 @@
         EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
     }
 
+    void testCombineResource() {
+        // kTestPid1 mTestClient1
+        Vector<MediaResource> resources1;
+        resources1.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+
+        Vector<MediaResource> resources11;
+        resources11.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+
+        const PidResourceInfosMap &map = mService->mMap;
+        EXPECT_EQ(1u, map.size());
+        ssize_t index1 = map.indexOfKey(kTestPid1);
+        ASSERT_GE(index1, 0);
+        const ResourceInfos &infos1 = map[index1];
+        EXPECT_EQ(1u, infos1.size());
+
+        // test adding existing types to combine values
+        resources1.push_back(MediaResource(MediaResource::kGraphicMemory, 100));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+
+        Vector<MediaResource> expected;
+        expected.push_back(MediaResource(MediaResource::kSecureCodec, 2));
+        expected.push_back(MediaResource(MediaResource::kGraphicMemory, 300));
+        expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
+
+        // test adding new types (including types that differs only in subType)
+        resources11.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
+        resources11.push_back(MediaResource(MediaResource::kSecureCodec, MediaResource::kVideoCodec, 1));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+
+        expected.clear();
+        expected.push_back(MediaResource(MediaResource::kSecureCodec, 2));
+        expected.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
+        expected.push_back(MediaResource(MediaResource::kSecureCodec, MediaResource::kVideoCodec, 1));
+        expected.push_back(MediaResource(MediaResource::kGraphicMemory, 500));
+        expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
+    }
+
     void testRemoveResource() {
+        // kTestPid1 mTestClient1
+        Vector<MediaResource> resources1;
+        resources1.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+
+        Vector<MediaResource> resources11;
+        resources11.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
+
+        const PidResourceInfosMap &map = mService->mMap;
+        EXPECT_EQ(1u, map.size());
+        ssize_t index1 = map.indexOfKey(kTestPid1);
+        ASSERT_GE(index1, 0);
+        const ResourceInfos &infos1 = map[index1];
+        EXPECT_EQ(1u, infos1.size());
+
+        // test partial removal
+        resources11.editItemAt(0).mValue = 100;
+        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+
+        Vector<MediaResource> expected;
+        expected.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+        expected.push_back(MediaResource(MediaResource::kGraphicMemory, 100));
+        expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
+
+        // test complete removal with overshoot value
+        resources11.editItemAt(0).mValue = 1000;
+        mService->removeResource(kTestPid1, getId(mTestClient1), resources11);
+
+        expected.clear();
+        expected.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+        expectEqResourceInfo(infos1.valueFor(getId(mTestClient1)), kTestUid1, mTestClient1, expected);
+    }
+
+    void testRemoveClient() {
         addResource();
 
-        mService->removeResource(kTestPid2, getId(mTestClient2));
+        mService->removeClient(kTestPid2, getId(mTestClient2));
 
         const PidResourceInfosMap &map = mService->mMap;
         EXPECT_EQ(2u, map.size());
@@ -231,6 +372,7 @@
         EXPECT_EQ(1u, infos1.size());
         EXPECT_EQ(1u, infos2.size());
         // mTestClient2 has been removed.
+        // (OK to use infos2[0] as there is only 1 entry)
         EXPECT_EQ(mTestClient3, infos2[0].client);
     }
 
@@ -246,6 +388,7 @@
         EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, &clients));
 
         EXPECT_EQ(2u, clients.size());
+        // (OK to require ordering in clients[], as the pid map is sorted)
         EXPECT_EQ(mTestClient3, clients[0]);
         EXPECT_EQ(mTestClient1, clients[1]);
     }
@@ -438,7 +581,7 @@
             verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
 
             // clean up client 3 which still left
-            mService->removeResource(kTestPid2, getId(mTestClient3));
+            mService->removeClient(kTestPid2, getId(mTestClient3));
         }
     }
 
@@ -498,6 +641,84 @@
         EXPECT_TRUE(mService->isCallingPriorityHigher_l(99, 100));
     }
 
+    void testBatteryStats() {
+        // reset should always be called when ResourceManagerService is created (restarted)
+        EXPECT_EQ(1u, mSystemCB->eventCount());
+        EXPECT_EQ(EventType::VIDEO_RESET, mSystemCB->lastEventType());
+
+        // new client request should cause VIDEO_ON
+        Vector<MediaResource> resources1;
+        resources1.push_back(MediaResource(MediaResource::kBattery, MediaResource::kVideoCodec, 1));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        EXPECT_EQ(2u, mSystemCB->eventCount());
+        EXPECT_EQ(EventEntry({EventType::VIDEO_ON, kTestUid1}), mSystemCB->lastEvent());
+
+        // each client should only cause 1 VIDEO_ON
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        EXPECT_EQ(2u, mSystemCB->eventCount());
+
+        // new client request should cause VIDEO_ON
+        Vector<MediaResource> resources2;
+        resources2.push_back(MediaResource(MediaResource::kBattery, MediaResource::kVideoCodec, 2));
+        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        EXPECT_EQ(3u, mSystemCB->eventCount());
+        EXPECT_EQ(EventEntry({EventType::VIDEO_ON, kTestUid2}), mSystemCB->lastEvent());
+
+        // partially remove mTestClient1's request, shouldn't be any VIDEO_OFF
+        mService->removeResource(kTestPid1, getId(mTestClient1), resources1);
+        EXPECT_EQ(3u, mSystemCB->eventCount());
+
+        // remove mTestClient1's request, should be VIDEO_OFF for kTestUid1
+        // (use resource2 to test removing more instances than previously requested)
+        mService->removeResource(kTestPid1, getId(mTestClient1), resources2);
+        EXPECT_EQ(4u, mSystemCB->eventCount());
+        EXPECT_EQ(EventEntry({EventType::VIDEO_OFF, kTestUid1}), mSystemCB->lastEvent());
+
+        // remove mTestClient2, should be VIDEO_OFF for kTestUid2
+        mService->removeClient(kTestPid2, getId(mTestClient2));
+        EXPECT_EQ(5u, mSystemCB->eventCount());
+        EXPECT_EQ(EventEntry({EventType::VIDEO_OFF, kTestUid2}), mSystemCB->lastEvent());
+    }
+
+    void testCpusetBoost() {
+        // reset should always be called when ResourceManagerService is created (restarted)
+        EXPECT_EQ(1u, mSystemCB->eventCount());
+        EXPECT_EQ(EventType::VIDEO_RESET, mSystemCB->lastEventType());
+
+        // new client request should cause CPUSET_ENABLE
+        Vector<MediaResource> resources1;
+        resources1.push_back(MediaResource(MediaResource::kCpuBoost, 1));
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        EXPECT_EQ(2u, mSystemCB->eventCount());
+        EXPECT_EQ(EventType::CPUSET_ENABLE, mSystemCB->lastEventType());
+
+        // each client should only cause 1 CPUSET_ENABLE
+        mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
+        EXPECT_EQ(2u, mSystemCB->eventCount());
+
+        // new client request should cause CPUSET_ENABLE
+        Vector<MediaResource> resources2;
+        resources2.push_back(MediaResource(MediaResource::kCpuBoost, 2));
+        mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
+        EXPECT_EQ(3u, mSystemCB->eventCount());
+        EXPECT_EQ(EventType::CPUSET_ENABLE, mSystemCB->lastEventType());
+
+        // remove mTestClient2 should not cause CPUSET_DISABLE, mTestClient1 still active
+        mService->removeClient(kTestPid2, getId(mTestClient2));
+        EXPECT_EQ(3u, mSystemCB->eventCount());
+
+        // remove 1 cpuboost from mTestClient1, should not be CPUSET_DISABLE (still 1 left)
+        mService->removeResource(kTestPid1, getId(mTestClient1), resources1);
+        EXPECT_EQ(3u, mSystemCB->eventCount());
+
+        // remove 2 cpuboost from mTestClient1, should be CPUSET_DISABLE
+        // (use resource2 to test removing more than previously requested)
+        mService->removeResource(kTestPid1, getId(mTestClient1), resources2);
+        EXPECT_EQ(4u, mSystemCB->eventCount());
+        EXPECT_EQ(EventType::CPUSET_DISABLE, mSystemCB->lastEventType());
+    }
+
+    sp<TestSystemCallback> mSystemCB;
     sp<ResourceManagerService> mService;
     sp<IResourceManagerClient> mTestClient1;
     sp<IResourceManagerClient> mTestClient2;
@@ -512,10 +733,18 @@
     addResource();
 }
 
+TEST_F(ResourceManagerServiceTest, combineResource) {
+    testCombineResource();
+}
+
 TEST_F(ResourceManagerServiceTest, removeResource) {
     testRemoveResource();
 }
 
+TEST_F(ResourceManagerServiceTest, removeClient) {
+    testRemoveClient();
+}
+
 TEST_F(ResourceManagerServiceTest, reclaimResource) {
     testReclaimResourceSecure();
     testReclaimResourceNonSecure();
@@ -541,4 +770,12 @@
     testIsCallingPriorityHigher();
 }
 
+TEST_F(ResourceManagerServiceTest, testBatteryStats) {
+    testBatteryStats();
+}
+
+TEST_F(ResourceManagerServiceTest, testCpusetBoost) {
+    testCpusetBoost();
+}
+
 } // namespace android
diff --git a/services/soundtrigger/SoundTriggerHwService.cpp b/services/soundtrigger/SoundTriggerHwService.cpp
index 377d30b..51afdcd 100644
--- a/services/soundtrigger/SoundTriggerHwService.cpp
+++ b/services/soundtrigger/SoundTriggerHwService.cpp
@@ -40,6 +40,32 @@
 #define HW_MODULE_PREFIX "primary"
 namespace android {
 
+namespace {
+
+// Given an IMemory, returns a copy of its content along with its size.
+// Returns nullptr on failure or if input is nullptr.
+std::pair<std::unique_ptr<uint8_t[]>,
+          size_t> CopyToArray(const sp<IMemory>& mem) {
+    if (mem == nullptr) {
+        return std::make_pair(nullptr, 0);
+    }
+
+    const size_t size = mem->size();
+    if (size == 0) {
+        return std::make_pair(nullptr, 0);
+    }
+
+    std::unique_ptr<uint8_t[]> ar = std::make_unique<uint8_t[]>(size);
+    if (ar == nullptr) {
+        return std::make_pair(nullptr, 0);
+    }
+
+    memcpy(ar.get(), mem->pointer(), size);
+    return std::make_pair(std::move(ar), size);
+}
+
+}
+
 SoundTriggerHwService::SoundTriggerHwService()
     : BnSoundTriggerHwService(),
       mNextUniqueId(1),
@@ -557,8 +583,13 @@
         return NO_INIT;
     }
 
-    struct sound_trigger_sound_model *sound_model =
-            (struct sound_trigger_sound_model *)modelMemory->pointer();
+    auto immutableMemory = CopyToArray(modelMemory);
+    if (immutableMemory.first == nullptr) {
+        return NO_MEMORY;
+    }
+
+    struct sound_trigger_sound_model* sound_model =
+        (struct sound_trigger_sound_model*) immutableMemory.first.get();
 
     size_t structSize;
     if (sound_model->type == SOUND_MODEL_TYPE_KEYPHRASE) {
@@ -568,9 +599,10 @@
     }
 
     if (sound_model->data_offset < structSize ||
-           sound_model->data_size > (UINT_MAX - sound_model->data_offset) ||
-           modelMemory->size() < sound_model->data_offset ||
-           sound_model->data_size > (modelMemory->size() - sound_model->data_offset)) {
+        sound_model->data_size > (UINT_MAX - sound_model->data_offset) ||
+        immutableMemory.second < sound_model->data_offset ||
+            sound_model->data_size >
+            (immutableMemory.second - sound_model->data_offset)) {
         android_errorWriteLog(0x534e4554, "30148546");
         ALOGE("loadSoundModel() data_size is too big");
         return BAD_VALUE;
@@ -651,13 +683,19 @@
         return NO_INIT;
     }
 
-    struct sound_trigger_recognition_config *config =
-            (struct sound_trigger_recognition_config *)dataMemory->pointer();
+    auto immutableMemory = CopyToArray(dataMemory);
+    if (immutableMemory.first == nullptr) {
+        return NO_MEMORY;
+    }
+
+    struct sound_trigger_recognition_config* config =
+        (struct sound_trigger_recognition_config*) immutableMemory.first.get();
 
     if (config->data_offset < sizeof(struct sound_trigger_recognition_config) ||
-            config->data_size > (UINT_MAX - config->data_offset) ||
-            dataMemory->size() < config->data_offset ||
-            config->data_size > (dataMemory->size() - config->data_offset)) {
+        config->data_size > (UINT_MAX - config->data_offset) ||
+        immutableMemory.second < config->data_offset ||
+            config->data_size >
+            (immutableMemory.second - config->data_offset)) {
         ALOGE("startRecognition() data_size is too big");
         return BAD_VALUE;
     }