Merge "Remove timing jitter during startup of audio" into jb-mr2-dev
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index a06a8e1..76aa503 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -177,6 +177,8 @@
         kFlagDequeueOutputPending       = 32,
         kFlagIsSecure                   = 64,
         kFlagSawMediaServerDie          = 128,
+        kFlagIsEncoder                  = 256,
+        kFlagGatherCodecSpecificData    = 512,
     };
 
     struct BufferInfo {
@@ -244,6 +246,8 @@
 
     status_t onSetParameters(const sp<AMessage> &params);
 
+    status_t amendOutputFormatWithCodecSpecificData(const sp<ABuffer> &buffer);
+
     DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
 };
 
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 87e2c85..8a96212 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -616,10 +616,6 @@
               || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE);
     CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT);
 
-    if(pConfig->inputCfg.samplingRate != 44100){
-        return -EINVAL;
-    }
-
     //ALOGV("\tReverb_setConfig calling memcpy");
     pContext->config = *pConfig;
 
@@ -648,7 +644,7 @@
         return -EINVAL;
     }
 
-    if(pContext->SampleRate != SampleRate){
+    if (pContext->SampleRate != SampleRate) {
 
         LVREV_ControlParams_st    ActiveParams;
         LVREV_ReturnStatus_en     LvmStatus = LVREV_SUCCESS;
@@ -662,11 +658,14 @@
         LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "Reverb_setConfig")
         if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
 
+        ActiveParams.SampleRate = SampleRate;
+
         LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams);
 
         LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_setConfig")
+        if(LvmStatus != LVREV_SUCCESS) return -EINVAL;
         //ALOGV("\tReverb_setConfig Succesfully called LVREV_SetControlParameters\n");
-
+        pContext->SampleRate = SampleRate;
     }else{
         //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate);
     }
@@ -818,6 +817,7 @@
     /* General parameters */
     params.OperatingMode  = LVM_MODE_ON;
     params.SampleRate     = LVM_FS_44100;
+    pContext->SampleRate  = LVM_FS_44100;
 
     if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){
         params.SourceFormat   = LVM_MONO;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 607ec6a..b89b1c8 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1257,6 +1257,13 @@
     switch (what) {
         case Source::kWhatPrepared:
         {
+            if (mSource == NULL) {
+                // This is a stale notification from a source that was
+                // asynchronously preparing when the client called reset().
+                // We handled the reset, the source is gone.
+                break;
+            }
+
             int32_t err;
             CHECK(msg->findInt32("err", &err));
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index bdafb29..68b9623 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -333,6 +333,14 @@
         case STATE_RESET_IN_PROGRESS:
             return INVALID_OPERATION;
 
+        case STATE_PREPARING:
+        {
+            CHECK(mIsAsyncPrepare);
+
+            notifyListener(MEDIA_PREPARED);
+            break;
+        }
+
         default:
             break;
     }
@@ -503,6 +511,14 @@
 void NuPlayerDriver::notifyPrepareCompleted(status_t err) {
     Mutex::Autolock autoLock(mLock);
 
+    if (mState != STATE_PREPARING) {
+        // We were preparing asynchronously when the client called
+        // reset(), we sent a premature "prepared" notification and
+        // then initiated the reset. This notification is stale.
+        CHECK(mState == STATE_RESET_IN_PROGRESS || mState == STATE_IDLE);
+        return;
+    }
+
     CHECK_EQ(mState, STATE_PREPARING);
 
     mAsyncResult = err;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6d952c3..058852e 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1470,24 +1470,47 @@
             &format, sizeof(format));
 }
 
+static const struct VideoCodingMapEntry {
+    const char *mMime;
+    OMX_VIDEO_CODINGTYPE mVideoCodingType;
+} kVideoCodingMapEntry[] = {
+    { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
+    { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
+    { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
+    { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
+    { MEDIA_MIMETYPE_VIDEO_VPX, OMX_VIDEO_CodingVPX },
+};
+
 static status_t GetVideoCodingTypeFromMime(
         const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
-    if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
-        *codingType = OMX_VIDEO_CodingAVC;
-    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
-        *codingType = OMX_VIDEO_CodingMPEG4;
-    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
-        *codingType = OMX_VIDEO_CodingH263;
-    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
-        *codingType = OMX_VIDEO_CodingMPEG2;
-    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) {
-        *codingType = OMX_VIDEO_CodingVPX;
-    } else {
-        *codingType = OMX_VIDEO_CodingUnused;
-        return ERROR_UNSUPPORTED;
+    for (size_t i = 0;
+         i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+         ++i) {
+        if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) {
+            *codingType = kVideoCodingMapEntry[i].mVideoCodingType;
+            return OK;
+        }
     }
 
-    return OK;
+    *codingType = OMX_VIDEO_CodingUnused;
+
+    return ERROR_UNSUPPORTED;
+}
+
+static status_t GetMimeTypeForVideoCoding(
+        OMX_VIDEO_CODINGTYPE codingType, AString *mime) {
+    for (size_t i = 0;
+         i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+         ++i) {
+        if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) {
+            *mime = kVideoCodingMapEntry[i].mMime;
+            return OK;
+        }
+    }
+
+    mime->clear();
+
+    return ERROR_UNSUPPORTED;
 }
 
 status_t ACodec::setupVideoDecoder(
@@ -2227,49 +2250,61 @@
         {
             OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
 
-            notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
-            notify->setInt32("width", videoDef->nFrameWidth);
-            notify->setInt32("height", videoDef->nFrameHeight);
-            notify->setInt32("stride", videoDef->nStride);
-            notify->setInt32("slice-height", videoDef->nSliceHeight);
-            notify->setInt32("color-format", videoDef->eColorFormat);
-
-            OMX_CONFIG_RECTTYPE rect;
-            InitOMXParams(&rect);
-            rect.nPortIndex = kPortIndexOutput;
-
-            if (mOMX->getConfig(
-                        mNode, OMX_IndexConfigCommonOutputCrop,
-                        &rect, sizeof(rect)) != OK) {
-                rect.nLeft = 0;
-                rect.nTop = 0;
-                rect.nWidth = videoDef->nFrameWidth;
-                rect.nHeight = videoDef->nFrameHeight;
+            AString mime;
+            if (!mIsEncoder) {
+                notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+            } else if (GetMimeTypeForVideoCoding(
+                        videoDef->eCompressionFormat, &mime) != OK) {
+                notify->setString("mime", "application/octet-stream");
+            } else {
+                notify->setString("mime", mime.c_str());
             }
 
-            CHECK_GE(rect.nLeft, 0);
-            CHECK_GE(rect.nTop, 0);
-            CHECK_GE(rect.nWidth, 0u);
-            CHECK_GE(rect.nHeight, 0u);
-            CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
-            CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+            notify->setInt32("width", videoDef->nFrameWidth);
+            notify->setInt32("height", videoDef->nFrameHeight);
 
-            notify->setRect(
-                    "crop",
-                    rect.nLeft,
-                    rect.nTop,
-                    rect.nLeft + rect.nWidth - 1,
-                    rect.nTop + rect.nHeight - 1);
+            if (!mIsEncoder) {
+                notify->setInt32("stride", videoDef->nStride);
+                notify->setInt32("slice-height", videoDef->nSliceHeight);
+                notify->setInt32("color-format", videoDef->eColorFormat);
 
-            if (mNativeWindow != NULL) {
-                android_native_rect_t crop;
-                crop.left = rect.nLeft;
-                crop.top = rect.nTop;
-                crop.right = rect.nLeft + rect.nWidth;
-                crop.bottom = rect.nTop + rect.nHeight;
+                OMX_CONFIG_RECTTYPE rect;
+                InitOMXParams(&rect);
+                rect.nPortIndex = kPortIndexOutput;
 
-                CHECK_EQ(0, native_window_set_crop(
-                            mNativeWindow.get(), &crop));
+                if (mOMX->getConfig(
+                            mNode, OMX_IndexConfigCommonOutputCrop,
+                            &rect, sizeof(rect)) != OK) {
+                    rect.nLeft = 0;
+                    rect.nTop = 0;
+                    rect.nWidth = videoDef->nFrameWidth;
+                    rect.nHeight = videoDef->nFrameHeight;
+                }
+
+                CHECK_GE(rect.nLeft, 0);
+                CHECK_GE(rect.nTop, 0);
+                CHECK_GE(rect.nWidth, 0u);
+                CHECK_GE(rect.nHeight, 0u);
+                CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+                CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+
+                notify->setRect(
+                        "crop",
+                        rect.nLeft,
+                        rect.nTop,
+                        rect.nLeft + rect.nWidth - 1,
+                        rect.nTop + rect.nHeight - 1);
+
+                if (mNativeWindow != NULL) {
+                    android_native_rect_t crop;
+                    crop.left = rect.nLeft;
+                    crop.top = rect.nTop;
+                    crop.right = rect.nLeft + rect.nWidth;
+                    crop.bottom = rect.nTop + rect.nHeight;
+
+                    CHECK_EQ(0, native_window_set_crop(
+                                mNativeWindow.get(), &crop));
+                }
             }
             break;
         }
@@ -2277,41 +2312,108 @@
         case OMX_PortDomainAudio:
         {
             OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
-            CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM);
 
-            OMX_AUDIO_PARAM_PCMMODETYPE params;
-            InitOMXParams(&params);
-            params.nPortIndex = kPortIndexOutput;
+            switch (audioDef->eEncoding) {
+                case OMX_AUDIO_CodingPCM:
+                {
+                    OMX_AUDIO_PARAM_PCMMODETYPE params;
+                    InitOMXParams(&params);
+                    params.nPortIndex = kPortIndexOutput;
 
-            CHECK_EQ(mOMX->getParameter(
-                        mNode, OMX_IndexParamAudioPcm,
-                        &params, sizeof(params)),
-                     (status_t)OK);
+                    CHECK_EQ(mOMX->getParameter(
+                                mNode, OMX_IndexParamAudioPcm,
+                                &params, sizeof(params)),
+                             (status_t)OK);
 
-            CHECK(params.nChannels == 1 || params.bInterleaved);
-            CHECK_EQ(params.nBitPerSample, 16u);
-            CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
-            CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
+                    CHECK(params.nChannels == 1 || params.bInterleaved);
+                    CHECK_EQ(params.nBitPerSample, 16u);
+                    CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+                    CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
 
-            notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
-            notify->setInt32("channel-count", params.nChannels);
-            notify->setInt32("sample-rate", params.nSamplingRate);
-            if (mEncoderDelay + mEncoderPadding) {
-                size_t frameSize = params.nChannels * sizeof(int16_t);
-                if (mSkipCutBuffer != NULL) {
-                    size_t prevbufsize = mSkipCutBuffer->size();
-                    if (prevbufsize != 0) {
-                        ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize);
+                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+                    notify->setInt32("channel-count", params.nChannels);
+                    notify->setInt32("sample-rate", params.nSamplingRate);
+                    if (mEncoderDelay + mEncoderPadding) {
+                        size_t frameSize = params.nChannels * sizeof(int16_t);
+                        if (mSkipCutBuffer != NULL) {
+                            size_t prevbufsize = mSkipCutBuffer->size();
+                            if (prevbufsize != 0) {
+                                ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize);
+                            }
+                        }
+                        mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize,
+                                                           mEncoderPadding * frameSize);
                     }
+
+                    if (mChannelMaskPresent) {
+                        notify->setInt32("channel-mask", mChannelMask);
+                    }
+                    break;
                 }
-                mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize,
-                                                   mEncoderPadding * frameSize);
-            }
 
-            if (mChannelMaskPresent) {
-                notify->setInt32("channel-mask", mChannelMask);
-            }
+                case OMX_AUDIO_CodingAAC:
+                {
+                    OMX_AUDIO_PARAM_AACPROFILETYPE params;
+                    InitOMXParams(&params);
+                    params.nPortIndex = kPortIndexOutput;
 
+                    CHECK_EQ(mOMX->getParameter(
+                                mNode, OMX_IndexParamAudioAac,
+                                &params, sizeof(params)),
+                             (status_t)OK);
+
+                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+                    notify->setInt32("channel-count", params.nChannels);
+                    notify->setInt32("sample-rate", params.nSampleRate);
+                    break;
+                }
+
+                case OMX_AUDIO_CodingAMR:
+                {
+                    OMX_AUDIO_PARAM_AMRTYPE params;
+                    InitOMXParams(&params);
+                    params.nPortIndex = kPortIndexOutput;
+
+                    CHECK_EQ(mOMX->getParameter(
+                                mNode, OMX_IndexParamAudioAmr,
+                                &params, sizeof(params)),
+                             (status_t)OK);
+
+                    notify->setInt32("channel-count", 1);
+                    if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
+                        notify->setString(
+                                "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
+
+                        notify->setInt32("sample-rate", 16000);
+                    } else {
+                        notify->setString(
+                                "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
+
+                        notify->setInt32("sample-rate", 8000);
+                    }
+                    break;
+                }
+
+                case OMX_AUDIO_CodingFLAC:
+                {
+                    OMX_AUDIO_PARAM_FLACTYPE params;
+                    InitOMXParams(&params);
+                    params.nPortIndex = kPortIndexOutput;
+
+                    CHECK_EQ(mOMX->getParameter(
+                                mNode, OMX_IndexParamAudioFlac,
+                                &params, sizeof(params)),
+                             (status_t)OK);
+
+                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
+                    notify->setInt32("channel-count", params.nChannels);
+                    notify->setInt32("sample-rate", params.nSampleRate);
+                    break;
+                }
+
+                default:
+                    TRESPASS();
+            }
             break;
         }
 
@@ -2957,7 +3059,7 @@
                 break;
             }
 
-            if (!mCodec->mIsEncoder && !mCodec->mSentFormat) {
+            if (!mCodec->mSentFormat) {
                 mCodec->sendFormatChange();
             }
 
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ae7bb17..714da55 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,10 +31,13 @@
 #include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/ACodec.h>
 #include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/NativeWindowWrapper.h>
 
+#include "include/avc_utils.h"
+
 namespace android {
 
 // static
@@ -741,8 +744,16 @@
                     }
 
                     mOutputFormat = msg;
-                    mFlags |= kFlagOutputFormatChanged;
-                    postActivityNotificationIfPossible();
+
+                    if (mFlags & kFlagIsEncoder) {
+                        // Before we announce the format change we should
+                        // collect codec specific data and amend the output
+                        // format as necessary.
+                        mFlags |= kFlagGatherCodecSpecificData;
+                    } else {
+                        mFlags |= kFlagOutputFormatChanged;
+                        postActivityNotificationIfPossible();
+                    }
                     break;
                 }
 
@@ -812,6 +823,25 @@
 
                     buffer->meta()->setInt32("omxFlags", omxFlags);
 
+                    if (mFlags & kFlagGatherCodecSpecificData) {
+                        // This is the very first output buffer after a
+                        // format change was signalled, it'll either contain
+                        // the one piece of codec specific data we can expect
+                        // or there won't be codec specific data.
+                        if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+                            status_t err =
+                                amendOutputFormatWithCodecSpecificData(buffer);
+
+                            if (err != OK) {
+                                ALOGE("Codec spit out malformed codec "
+                                      "specific data!");
+                            }
+                        }
+
+                        mFlags &= ~kFlagGatherCodecSpecificData;
+                        mFlags |= kFlagOutputFormatChanged;
+                    }
+
                     if (mFlags & kFlagDequeueOutputPending) {
                         CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
 
@@ -955,6 +985,7 @@
 
             if (flags & CONFIGURE_FLAG_ENCODE) {
                 format->setInt32("encoder", true);
+                mFlags |= kFlagIsEncoder;
             }
 
             extractCSD(format);
@@ -1413,6 +1444,8 @@
         mFlags &= ~kFlagOutputFormatChanged;
         mFlags &= ~kFlagOutputBuffersChanged;
         mFlags &= ~kFlagStickyError;
+        mFlags &= ~kFlagIsEncoder;
+        mFlags &= ~kFlagGatherCodecSpecificData;
 
         mActivityNotify.clear();
     }
@@ -1720,4 +1753,45 @@
     return OK;
 }
 
+status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
+        const sp<ABuffer> &buffer) {
+    AString mime;
+    CHECK(mOutputFormat->findString("mime", &mime));
+
+    if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+        // Codec specific data should be SPS and PPS in a single buffer,
+        // each prefixed by a startcode (0x00 0x00 0x00 0x01).
+        // We separate the two and put them into the output format
+        // under the keys "csd-0" and "csd-1".
+
+        unsigned csdIndex = 0;
+
+        const uint8_t *data = buffer->data();
+        size_t size = buffer->size();
+
+        const uint8_t *nalStart;
+        size_t nalSize;
+        while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+            sp<ABuffer> csd = new ABuffer(nalSize + 4);
+            memcpy(csd->data(), "\x00\x00\x00\x01", 4);
+            memcpy(csd->data() + 4, nalStart, nalSize);
+
+            mOutputFormat->setBuffer(
+                    StringPrintf("csd-%u", csdIndex).c_str(), csd);
+
+            ++csdIndex;
+        }
+
+        if (csdIndex != 2) {
+            return ERROR_MALFORMED;
+        }
+    } else {
+        // For everything else we just stash the codec specific data into
+        // the output format as a single piece of csd under "csd-0".
+        mOutputFormat->setBuffer("csd-0", buffer);
+    }
+
+    return OK;
+}
+
 }  // namespace android
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index 233aed3..e64fe72 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -109,7 +109,7 @@
     def.eDir = OMX_DirInput;
     def.nBufferCountMin = kNumBuffers;// TODO verify that 1 is enough
     def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2;
+    def.nBufferSize = kMaxInputBufferSize;
     def.bEnabled = OMX_TRUE;
     def.bPopulated = OMX_FALSE;
     def.eDomain = OMX_PortDomainAudio;
@@ -234,6 +234,22 @@
             return OMX_ErrorNone;
         }
 
+        case OMX_IndexParamPortDefinition:
+        {
+            OMX_PARAM_PORTDEFINITIONTYPE *defParams =
+                (OMX_PARAM_PORTDEFINITIONTYPE *)params;
+
+            if (defParams->nPortIndex == 0) {
+                if (defParams->nBufferSize > kMaxInputBufferSize) {
+                    ALOGE("Input buffer size must be at most %zu bytes",
+                        kMaxInputBufferSize);
+                    return OMX_ErrorUnsupportedSetting;
+                }
+            }
+
+            // fall through
+        }
+
         default:
             ALOGV("SoftFlacEncoder::internalSetParameter(default)");
             return SimpleSoftOMXComponent::internalSetParameter(index, params);
@@ -273,7 +289,7 @@
             return;
         }
 
-        if (inHeader->nFilledLen > kMaxNumSamplesPerFrame * sizeof(FLAC__int32) * 2) {
+        if (inHeader->nFilledLen > kMaxInputBufferSize) {
             ALOGE("input buffer too large (%ld).", inHeader->nFilledLen);
             mSignalledError = true;
             notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
@@ -290,6 +306,7 @@
         const unsigned nbInputSamples = inHeader->nFilledLen / 2;
         const OMX_S16 * const pcm16 = reinterpret_cast<OMX_S16 *>(inHeader->pBuffer);
 
+        CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame);
         for (unsigned i=0 ; i < nbInputSamples ; i++) {
             mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
         }
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
index 1e0148a..97361fa 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
@@ -52,6 +52,7 @@
     enum {
         kNumBuffers = 2,
         kMaxNumSamplesPerFrame = 1152,
+        kMaxInputBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2,
         kMaxOutputBufferSize = 65536,    //TODO check if this can be reduced
     };
 
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index c12572f..9850a46 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -1059,7 +1059,7 @@
     ssize_t sectionIndex = mPSISections.indexOfKey(PID);
 
     if (sectionIndex >= 0) {
-        const sp<PSISection> &section = mPSISections.valueAt(sectionIndex);
+        sp<PSISection> section = mPSISections.valueAt(sectionIndex);
 
         if (payload_unit_start_indicator) {
             CHECK(section->isEmpty());
@@ -1068,7 +1068,6 @@
             br->skipBits(skip * 8);
         }
 
-
         CHECK((br->numBitsLeft() % 8) == 0);
         status_t err = section->append(br->data(), br->numBitsLeft() / 8);
 
@@ -1103,10 +1102,13 @@
 
             if (!handled) {
                 mPSISections.removeItem(PID);
+                section.clear();
             }
         }
 
-        section->clear();
+        if (section != NULL) {
+            section->clear();
+        }
 
         return OK;
     }
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index f99ef60..061ae89 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -4,17 +4,10 @@
 
 LOCAL_SRC_FILES:= \
         ANetworkSession.cpp             \
-        MediaReceiver.cpp               \
         MediaSender.cpp                 \
         Parameters.cpp                  \
         ParsedMessage.cpp               \
-        rtp/RTPAssembler.cpp            \
-        rtp/RTPReceiver.cpp             \
         rtp/RTPSender.cpp               \
-        sink/DirectRenderer.cpp         \
-        sink/WifiDisplaySink.cpp        \
-        SNTPClient.cpp                  \
-        TimeSyncer.cpp                  \
         source/Converter.cpp            \
         source/MediaPuller.cpp          \
         source/PlaybackSession.cpp      \
@@ -67,72 +60,3 @@
 LOCAL_MODULE_TAGS := debug
 
 include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        udptest.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= udptest
-
-LOCAL_MODULE_TAGS := debug
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        rtptest.cpp                 \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= rtptest
-
-LOCAL_MODULE_TAGS := debug
-
-include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
-        nettest.cpp                     \
-
-LOCAL_SHARED_LIBRARIES:= \
-        libbinder                       \
-        libgui                          \
-        libmedia                        \
-        libstagefright                  \
-        libstagefright_foundation       \
-        libstagefright_wfd              \
-        libutils                        \
-        liblog                          \
-
-LOCAL_MODULE:= nettest
-
-LOCAL_MODULE_TAGS := debug
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/wifi-display/MediaReceiver.cpp b/media/libstagefright/wifi-display/MediaReceiver.cpp
deleted file mode 100644
index 364acb9..0000000
--- a/media/libstagefright/wifi-display/MediaReceiver.cpp
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaReceiver"
-#include <utils/Log.h>
-
-#include "MediaReceiver.h"
-
-#include "ANetworkSession.h"
-#include "AnotherPacketSource.h"
-#include "rtp/RTPReceiver.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-MediaReceiver::MediaReceiver(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mMode(MODE_UNDEFINED),
-      mGeneration(0),
-      mInitStatus(OK),
-      mInitDoneCount(0) {
-}
-
-MediaReceiver::~MediaReceiver() {
-}
-
-ssize_t MediaReceiver::addTrack(
-        RTPReceiver::TransportMode rtpMode,
-        RTPReceiver::TransportMode rtcpMode,
-        int32_t *localRTPPort) {
-    if (mMode != MODE_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    size_t trackIndex = mTrackInfos.size();
-
-    TrackInfo info;
-
-    sp<AMessage> notify = new AMessage(kWhatReceiverNotify, id());
-    notify->setInt32("generation", mGeneration);
-    notify->setSize("trackIndex", trackIndex);
-
-    info.mReceiver = new RTPReceiver(mNetSession, notify);
-    looper()->registerHandler(info.mReceiver);
-
-    info.mReceiver->registerPacketType(
-            33, RTPReceiver::PACKETIZATION_TRANSPORT_STREAM);
-
-    info.mReceiver->registerPacketType(
-            96, RTPReceiver::PACKETIZATION_AAC);
-
-    info.mReceiver->registerPacketType(
-            97, RTPReceiver::PACKETIZATION_H264);
-
-    status_t err = info.mReceiver->initAsync(
-            rtpMode,
-            rtcpMode,
-            localRTPPort);
-
-    if (err != OK) {
-        looper()->unregisterHandler(info.mReceiver->id());
-        info.mReceiver.clear();
-
-        return err;
-    }
-
-    mTrackInfos.push_back(info);
-
-    return trackIndex;
-}
-
-status_t MediaReceiver::connectTrack(
-        size_t trackIndex,
-        const char *remoteHost,
-        int32_t remoteRTPPort,
-        int32_t remoteRTCPPort) {
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-    return info->mReceiver->connect(remoteHost, remoteRTPPort, remoteRTCPPort);
-}
-
-status_t MediaReceiver::initAsync(Mode mode) {
-    if ((mode == MODE_TRANSPORT_STREAM || mode == MODE_TRANSPORT_STREAM_RAW)
-            && mTrackInfos.size() > 1) {
-        return INVALID_OPERATION;
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatInit, id());
-    msg->setInt32("mode", mode);
-    msg->post();
-
-    return OK;
-}
-
-void MediaReceiver::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatInit:
-        {
-            int32_t mode;
-            CHECK(msg->findInt32("mode", &mode));
-
-            CHECK_EQ(mMode, MODE_UNDEFINED);
-            mMode = (Mode)mode;
-
-            if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) {
-                notifyInitDone(mInitStatus);
-            }
-
-            mTSParser = new ATSParser(
-                    ATSParser::ALIGNED_VIDEO_DATA
-                        | ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE);
-
-            mFormatKnownMask = 0;
-            break;
-        }
-
-        case kWhatReceiverNotify:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-            if (generation != mGeneration) {
-                break;
-            }
-
-            onReceiverNotify(msg);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaReceiver::onReceiverNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case RTPReceiver::kWhatInitDone:
-        {
-            ++mInitDoneCount;
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            if (err != OK) {
-                mInitStatus = err;
-                ++mGeneration;
-            }
-
-            if (mMode != MODE_UNDEFINED) {
-                if (mInitStatus != OK || mInitDoneCount == mTrackInfos.size()) {
-                    notifyInitDone(mInitStatus);
-                }
-            }
-            break;
-        }
-
-        case RTPReceiver::kWhatError:
-        {
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            notifyError(err);
-            break;
-        }
-
-        case RTPReceiver::kWhatAccessUnit:
-        {
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            int32_t followsDiscontinuity;
-            if (!msg->findInt32(
-                        "followsDiscontinuity", &followsDiscontinuity)) {
-                followsDiscontinuity = 0;
-            }
-
-            if (mMode == MODE_TRANSPORT_STREAM) {
-                if (followsDiscontinuity) {
-                    mTSParser->signalDiscontinuity(
-                            ATSParser::DISCONTINUITY_TIME, NULL /* extra */);
-                }
-
-                for (size_t offset = 0;
-                        offset < accessUnit->size(); offset += 188) {
-                    status_t err = mTSParser->feedTSPacket(
-                             accessUnit->data() + offset, 188);
-
-                    if (err != OK) {
-                        notifyError(err);
-                        break;
-                    }
-                }
-
-                drainPackets(0 /* trackIndex */, ATSParser::VIDEO);
-                drainPackets(1 /* trackIndex */, ATSParser::AUDIO);
-            } else {
-                postAccessUnit(trackIndex, accessUnit, NULL);
-            }
-            break;
-        }
-
-        case RTPReceiver::kWhatPacketLost:
-        {
-            notifyPacketLost();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void MediaReceiver::drainPackets(
-        size_t trackIndex, ATSParser::SourceType type) {
-    sp<AnotherPacketSource> source =
-        static_cast<AnotherPacketSource *>(
-                mTSParser->getSource(type).get());
-
-    if (source == NULL) {
-        return;
-    }
-
-    sp<AMessage> format;
-    if (!(mFormatKnownMask & (1ul << trackIndex))) {
-        sp<MetaData> meta = source->getFormat();
-        CHECK(meta != NULL);
-
-        CHECK_EQ((status_t)OK, convertMetaDataToMessage(meta, &format));
-
-        mFormatKnownMask |= 1ul << trackIndex;
-    }
-
-    status_t finalResult;
-    while (source->hasBufferAvailable(&finalResult)) {
-        sp<ABuffer> accessUnit;
-        status_t err = source->dequeueAccessUnit(&accessUnit);
-        if (err == OK) {
-            postAccessUnit(trackIndex, accessUnit, format);
-            format.clear();
-        } else if (err != INFO_DISCONTINUITY) {
-            notifyError(err);
-        }
-    }
-
-    if (finalResult != OK) {
-        notifyError(finalResult);
-    }
-}
-
-void MediaReceiver::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaReceiver::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void MediaReceiver::notifyPacketLost() {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPacketLost);
-    notify->post();
-}
-
-void MediaReceiver::postAccessUnit(
-        size_t trackIndex,
-        const sp<ABuffer> &accessUnit,
-        const sp<AMessage> &format) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatAccessUnit);
-    notify->setSize("trackIndex", trackIndex);
-    notify->setBuffer("accessUnit", accessUnit);
-
-    if (format != NULL) {
-        notify->setMessage("format", format);
-    }
-
-    notify->post();
-}
-
-status_t MediaReceiver::informSender(
-        size_t trackIndex, const sp<AMessage> &params) {
-    if (trackIndex >= mTrackInfos.size()) {
-        return -ERANGE;
-    }
-
-    TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
-    return info->mReceiver->informSender(params);
-}
-
-}  // namespace android
-
-
diff --git a/media/libstagefright/wifi-display/MediaReceiver.h b/media/libstagefright/wifi-display/MediaReceiver.h
deleted file mode 100644
index afbb407..0000000
--- a/media/libstagefright/wifi-display/MediaReceiver.h
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/stagefright/foundation/AHandler.h>
-
-#include "ATSParser.h"
-#include "rtp/RTPReceiver.h"
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-struct AMessage;
-struct ATSParser;
-
-// This class facilitates receiving of media data for one or more tracks
-// over RTP. Either a 1:1 track to RTP channel mapping is used or a single
-// RTP channel provides the data for a transport stream that is consequently
-// demuxed and its track's data provided to the observer.
-struct MediaReceiver : public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatAccessUnit,
-        kWhatPacketLost,
-    };
-
-    MediaReceiver(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    ssize_t addTrack(
-            RTPReceiver::TransportMode rtpMode,
-            RTPReceiver::TransportMode rtcpMode,
-            int32_t *localRTPPort);
-
-    status_t connectTrack(
-            size_t trackIndex,
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            int32_t remoteRTCPPort);
-
-    enum Mode {
-        MODE_UNDEFINED,
-        MODE_TRANSPORT_STREAM,
-        MODE_TRANSPORT_STREAM_RAW,
-        MODE_ELEMENTARY_STREAMS,
-    };
-    status_t initAsync(Mode mode);
-
-    status_t informSender(size_t trackIndex, const sp<AMessage> &params);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~MediaReceiver();
-
-private:
-    enum {
-        kWhatInit,
-        kWhatReceiverNotify,
-    };
-
-    struct TrackInfo {
-        sp<RTPReceiver> mReceiver;
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    Mode mMode;
-    int32_t mGeneration;
-
-    Vector<TrackInfo> mTrackInfos;
-
-    status_t mInitStatus;
-    size_t mInitDoneCount;
-
-    sp<ATSParser> mTSParser;
-    uint32_t mFormatKnownMask;
-
-    void onReceiverNotify(const sp<AMessage> &msg);
-
-    void drainPackets(size_t trackIndex, ATSParser::SourceType type);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyPacketLost();
-
-    void postAccessUnit(
-            size_t trackIndex,
-            const sp<ABuffer> &accessUnit,
-            const sp<AMessage> &format);
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaReceiver);
-};
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp
index 33af66d..8a3566f 100644
--- a/media/libstagefright/wifi-display/MediaSender.cpp
+++ b/media/libstagefright/wifi-display/MediaSender.cpp
@@ -341,22 +341,6 @@
             break;
         }
 
-        case kWhatInformSender:
-        {
-            int64_t avgLatencyUs;
-            CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
-
-            int64_t maxLatencyUs;
-            CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
-
-            sp<AMessage> notify = mNotify->dup();
-            notify->setInt32("what", kWhatInformSender);
-            notify->setInt64("avgLatencyUs", avgLatencyUs);
-            notify->setInt64("maxLatencyUs", maxLatencyUs);
-            notify->post();
-            break;
-        }
-
         default:
             TRESPASS();
     }
diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h
index 04538ea..64722c5 100644
--- a/media/libstagefright/wifi-display/MediaSender.h
+++ b/media/libstagefright/wifi-display/MediaSender.h
@@ -43,7 +43,6 @@
         kWhatInitDone,
         kWhatError,
         kWhatNetworkStall,
-        kWhatInformSender,
     };
 
     MediaSender(
diff --git a/media/libstagefright/wifi-display/SNTPClient.cpp b/media/libstagefright/wifi-display/SNTPClient.cpp
deleted file mode 100644
index 5c0af6a..0000000
--- a/media/libstagefright/wifi-display/SNTPClient.cpp
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SNTPClient.h"
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/Utils.h>
-
-#include <arpa/inet.h>
-#include <netdb.h>
-#include <netinet/in.h>
-#include <sys/socket.h>
-#include <unistd.h>
-
-namespace android {
-
-SNTPClient::SNTPClient() {
-}
-
-status_t SNTPClient::requestTime(const char *host) {
-    struct hostent *ent;
-    int64_t requestTimeNTP, requestTimeUs;
-    ssize_t n;
-    int64_t responseTimeUs, responseTimeNTP;
-    int64_t originateTimeNTP, receiveTimeNTP, transmitTimeNTP;
-    int64_t roundTripTimeNTP, clockOffsetNTP;
-
-    status_t err = UNKNOWN_ERROR;
-
-    int s = socket(AF_INET, SOCK_DGRAM, 0);
-
-    if (s < 0) {
-        err = -errno;
-
-        goto bail;
-    }
-
-    ent = gethostbyname(host);
-
-    if (ent == NULL) {
-        err = -ENOENT;
-        goto bail2;
-    }
-
-    struct sockaddr_in hostAddr;
-    memset(hostAddr.sin_zero, 0, sizeof(hostAddr.sin_zero));
-    hostAddr.sin_family = AF_INET;
-    hostAddr.sin_port = htons(kNTPPort);
-    hostAddr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr;
-
-    uint8_t packet[kNTPPacketSize];
-    memset(packet, 0, sizeof(packet));
-
-    packet[0] = kNTPModeClient | (kNTPVersion << 3);
-
-    requestTimeNTP = getNowNTP();
-    requestTimeUs = ALooper::GetNowUs();
-    writeTimeStamp(&packet[kNTPTransmitTimeOffset], requestTimeNTP);
-
-    n = sendto(
-            s, packet, sizeof(packet), 0,
-            (const struct sockaddr *)&hostAddr, sizeof(hostAddr));
-
-    if (n < 0) {
-        err = -errno;
-        goto bail2;
-    }
-
-    memset(packet, 0, sizeof(packet));
-
-    do {
-        n = recv(s, packet, sizeof(packet), 0);
-    } while (n < 0 && errno == EINTR);
-
-    if (n < 0) {
-        err = -errno;
-        goto bail2;
-    }
-
-    responseTimeUs = ALooper::GetNowUs();
-
-    responseTimeNTP = requestTimeNTP + makeNTP(responseTimeUs - requestTimeUs);
-
-    originateTimeNTP = readTimeStamp(&packet[kNTPOriginateTimeOffset]);
-    receiveTimeNTP = readTimeStamp(&packet[kNTPReceiveTimeOffset]);
-    transmitTimeNTP = readTimeStamp(&packet[kNTPTransmitTimeOffset]);
-
-    roundTripTimeNTP =
-        makeNTP(responseTimeUs - requestTimeUs)
-            - (transmitTimeNTP - receiveTimeNTP);
-
-    clockOffsetNTP =
-        ((receiveTimeNTP - originateTimeNTP)
-            + (transmitTimeNTP - responseTimeNTP)) / 2;
-
-    mTimeReferenceNTP = responseTimeNTP + clockOffsetNTP;
-    mTimeReferenceUs = responseTimeUs;
-    mRoundTripTimeNTP = roundTripTimeNTP;
-
-    err = OK;
-
-bail2:
-    close(s);
-    s = -1;
-
-bail:
-    return err;
-}
-
-int64_t SNTPClient::adjustTimeUs(int64_t timeUs) const {
-    uint64_t nowNTP =
-        mTimeReferenceNTP + makeNTP(timeUs - mTimeReferenceUs);
-
-    int64_t nowUs =
-        (nowNTP >> 32) * 1000000ll
-        + ((nowNTP & 0xffffffff) * 1000000ll) / (1ll << 32);
-
-    nowUs -= ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    return nowUs;
-}
-
-// static
-void SNTPClient::writeTimeStamp(uint8_t *dst, uint64_t ntpTime) {
-    *dst++ = (ntpTime >> 56) & 0xff;
-    *dst++ = (ntpTime >> 48) & 0xff;
-    *dst++ = (ntpTime >> 40) & 0xff;
-    *dst++ = (ntpTime >> 32) & 0xff;
-    *dst++ = (ntpTime >> 24) & 0xff;
-    *dst++ = (ntpTime >> 16) & 0xff;
-    *dst++ = (ntpTime >> 8) & 0xff;
-    *dst++ = ntpTime & 0xff;
-}
-
-// static
-uint64_t SNTPClient::readTimeStamp(const uint8_t *dst) {
-    return U64_AT(dst);
-}
-
-// static
-uint64_t SNTPClient::getNowNTP() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL /* time zone */);
-
-    uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
-
-    nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
-    return makeNTP(nowUs);
-}
-
-// static
-uint64_t SNTPClient::makeNTP(uint64_t deltaUs) {
-    uint64_t hi = deltaUs / 1000000ll;
-    uint64_t lo = ((1ll << 32) * (deltaUs % 1000000ll)) / 1000000ll;
-
-    return (hi << 32) | lo;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/SNTPClient.h b/media/libstagefright/wifi-display/SNTPClient.h
deleted file mode 100644
index 967d1fc..0000000
--- a/media/libstagefright/wifi-display/SNTPClient.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SNTP_CLIENT_H_
-
-#define SNTP_CLIENT_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Errors.h>
-
-namespace android {
-
-// Implementation of the SNTP (Simple Network Time Protocol)
-struct SNTPClient {
-    SNTPClient();
-
-    status_t requestTime(const char *host);
-
-    // given a time obtained from ALooper::GetNowUs()
-    // return the number of us elapsed since Jan 1 1970 00:00:00 (UTC).
-    int64_t adjustTimeUs(int64_t timeUs) const;
-
-private:
-    enum {
-        kNTPPort = 123,
-        kNTPPacketSize = 48,
-        kNTPModeClient = 3,
-        kNTPVersion = 3,
-        kNTPTransmitTimeOffset = 40,
-        kNTPOriginateTimeOffset = 24,
-        kNTPReceiveTimeOffset = 32,
-    };
-
-    uint64_t mTimeReferenceNTP;
-    int64_t mTimeReferenceUs;
-    int64_t mRoundTripTimeNTP;
-
-    static void writeTimeStamp(uint8_t *dst, uint64_t ntpTime);
-    static uint64_t readTimeStamp(const uint8_t *dst);
-
-    static uint64_t getNowNTP();
-    static uint64_t makeNTP(uint64_t deltaUs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(SNTPClient);
-};
-
-}  // namespace android
-
-#endif  // SNTP_CLIENT_H_
diff --git a/media/libstagefright/wifi-display/TimeSyncer.cpp b/media/libstagefright/wifi-display/TimeSyncer.cpp
deleted file mode 100644
index cb429bc..0000000
--- a/media/libstagefright/wifi-display/TimeSyncer.cpp
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "TimeSyncer"
-#include <utils/Log.h>
-
-#include "TimeSyncer.h"
-
-#include "ANetworkSession.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-TimeSyncer::TimeSyncer(
-        const sp<ANetworkSession> &netSession, const sp<AMessage> &notify)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mIsServer(false),
-      mConnected(false),
-      mUDPSession(0),
-      mSeqNo(0),
-      mTotalTimeUs(0.0),
-      mPendingT1(0ll),
-      mTimeoutGeneration(0) {
-}
-
-TimeSyncer::~TimeSyncer() {
-}
-
-void TimeSyncer::startServer(unsigned localPort) {
-    sp<AMessage> msg = new AMessage(kWhatStartServer, id());
-    msg->setInt32("localPort", localPort);
-    msg->post();
-}
-
-void TimeSyncer::startClient(const char *remoteHost, unsigned remotePort) {
-    sp<AMessage> msg = new AMessage(kWhatStartClient, id());
-    msg->setString("remoteHost", remoteHost);
-    msg->setInt32("remotePort", remotePort);
-    msg->post();
-}
-
-void TimeSyncer::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStartClient:
-        {
-            AString remoteHost;
-            CHECK(msg->findString("remoteHost", &remoteHost));
-
-            int32_t remotePort;
-            CHECK(msg->findInt32("remotePort", &remotePort));
-
-            sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createUDPSession(
-                         0 /* localPort */,
-                         remoteHost.c_str(),
-                         remotePort,
-                         notify,
-                         &mUDPSession));
-
-            postSendPacket();
-            break;
-        }
-
-        case kWhatStartServer:
-        {
-            mIsServer = true;
-
-            int32_t localPort;
-            CHECK(msg->findInt32("localPort", &localPort));
-
-            sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createUDPSession(
-                         localPort, notify, &mUDPSession));
-
-            break;
-        }
-
-        case kWhatSendPacket:
-        {
-            if (mHistory.size() == 0) {
-                ALOGI("starting batch");
-            }
-
-            TimeInfo ti;
-            memset(&ti, 0, sizeof(ti));
-
-            ti.mT1 = ALooper::GetNowUs();
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->sendRequest(
-                         mUDPSession, &ti, sizeof(ti)));
-
-            mPendingT1 = ti.mT1;
-            postTimeout();
-            break;
-        }
-
-        case kWhatTimedOut:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mTimeoutGeneration) {
-                break;
-            }
-
-            ALOGI("timed out, sending another request");
-            postSendPacket();
-            break;
-        }
-
-        case kWhatUDPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    mNetSession->destroySession(sessionID);
-
-                    cancelTimeout();
-
-                    notifyError(err);
-                    break;
-                }
-
-                case ANetworkSession::kWhatDatagram:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    sp<ABuffer> packet;
-                    CHECK(msg->findBuffer("data", &packet));
-
-                    int64_t arrivalTimeUs;
-                    CHECK(packet->meta()->findInt64(
-                                "arrivalTimeUs", &arrivalTimeUs));
-
-                    CHECK_EQ(packet->size(), sizeof(TimeInfo));
-
-                    TimeInfo *ti = (TimeInfo *)packet->data();
-
-                    if (mIsServer) {
-                        if (!mConnected) {
-                            AString fromAddr;
-                            CHECK(msg->findString("fromAddr", &fromAddr));
-
-                            int32_t fromPort;
-                            CHECK(msg->findInt32("fromPort", &fromPort));
-
-                            CHECK_EQ((status_t)OK,
-                                     mNetSession->connectUDPSession(
-                                         mUDPSession, fromAddr.c_str(), fromPort));
-
-                            mConnected = true;
-                        }
-
-                        ti->mT2 = arrivalTimeUs;
-                        ti->mT3 = ALooper::GetNowUs();
-
-                        CHECK_EQ((status_t)OK,
-                                 mNetSession->sendRequest(
-                                     mUDPSession, ti, sizeof(*ti)));
-                    } else {
-                        if (ti->mT1 != mPendingT1) {
-                            break;
-                        }
-
-                        cancelTimeout();
-                        mPendingT1 = 0;
-
-                        ti->mT4 = arrivalTimeUs;
-
-                        // One way delay for a packet to travel from client
-                        // to server or back (assumed to be the same either way).
-                        int64_t delay =
-                            (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2;
-
-                        // Offset between the client clock (T1, T4) and the
-                        // server clock (T2, T3) timestamps.
-                        int64_t offset =
-                            (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2;
-
-                        mHistory.push_back(*ti);
-
-                        ALOGV("delay = %lld us,\toffset %lld us",
-                               delay,
-                               offset);
-
-                        if (mHistory.size() < kNumPacketsPerBatch) {
-                            postSendPacket(1000000ll / 30);
-                        } else {
-                            notifyOffset();
-
-                            ALOGI("batch done");
-
-                            mHistory.clear();
-                            postSendPacket(kBatchDelayUs);
-                        }
-                    }
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void TimeSyncer::postSendPacket(int64_t delayUs) {
-    (new AMessage(kWhatSendPacket, id()))->post(delayUs);
-}
-
-void TimeSyncer::postTimeout() {
-    sp<AMessage> msg = new AMessage(kWhatTimedOut, id());
-    msg->setInt32("generation", mTimeoutGeneration);
-    msg->post(kTimeoutDelayUs);
-}
-
-void TimeSyncer::cancelTimeout() {
-    ++mTimeoutGeneration;
-}
-
-void TimeSyncer::notifyError(status_t err) {
-    if (mNotify == NULL) {
-        looper()->stop();
-        return;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-// static
-int TimeSyncer::CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2) {
-    int64_t rt1 = ti1->mT4 - ti1->mT1;
-    int64_t rt2 = ti2->mT4 - ti2->mT1;
-
-    if (rt1 < rt2) {
-        return -1;
-    } else if (rt1 > rt2) {
-        return 1;
-    }
-
-    return 0;
-}
-
-void TimeSyncer::notifyOffset() {
-    mHistory.sort(CompareRountripTime);
-
-    int64_t sum = 0ll;
-    size_t count = 0;
-
-    // Only consider the third of the information associated with the best
-    // (smallest) roundtrip times.
-    for (size_t i = 0; i < mHistory.size() / 3; ++i) {
-        const TimeInfo *ti = &mHistory[i];
-
-#if 0
-        // One way delay for a packet to travel from client
-        // to server or back (assumed to be the same either way).
-        int64_t delay =
-            (ti->mT2 - ti->mT1 + ti->mT4 - ti->mT3) / 2;
-#endif
-
-        // Offset between the client clock (T1, T4) and the
-        // server clock (T2, T3) timestamps.
-        int64_t offset =
-            (ti->mT2 - ti->mT1 - ti->mT4 + ti->mT3) / 2;
-
-        ALOGV("(%d) RT: %lld us, offset: %lld us",
-              i, ti->mT4 - ti->mT1, offset);
-
-        sum += offset;
-        ++count;
-    }
-
-    if (mNotify == NULL) {
-        ALOGI("avg. offset is %lld", sum / count);
-        return;
-    }
-
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatTimeOffset);
-    notify->setInt64("offset", sum / count);
-    notify->post();
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/TimeSyncer.h b/media/libstagefright/wifi-display/TimeSyncer.h
deleted file mode 100644
index 4e7571f..0000000
--- a/media/libstagefright/wifi-display/TimeSyncer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TIME_SYNCER_H_
-
-#define TIME_SYNCER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ANetworkSession;
-
-/*
-   TimeSyncer allows us to synchronize time between a client and a server.
-   The client sends a UDP packet containing its send-time to the server,
-   the server sends that packet back to the client amended with information
-   about when it was received as well as the time the reply was sent back.
-   Finally the client receives the reply and has now enough information to
-   compute the clock offset between client and server assuming that packet
-   exchange is symmetric, i.e. time for a packet client->server and
-   server->client is roughly equal.
-   This exchange is repeated a number of times and the average offset computed
-   over the 30% of packets that had the lowest roundtrip times.
-   The offset is determined every 10 secs to account for slight differences in
-   clock frequency.
-*/
-struct TimeSyncer : public AHandler {
-    enum {
-        kWhatError,
-        kWhatTimeOffset,
-    };
-    TimeSyncer(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify);
-
-    void startServer(unsigned localPort);
-    void startClient(const char *remoteHost, unsigned remotePort);
-
-protected:
-    virtual ~TimeSyncer();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatStartServer,
-        kWhatStartClient,
-        kWhatUDPNotify,
-        kWhatSendPacket,
-        kWhatTimedOut,
-    };
-
-    struct TimeInfo {
-        int64_t mT1;  // client timestamp at send
-        int64_t mT2;  // server timestamp at receive
-        int64_t mT3;  // server timestamp at send
-        int64_t mT4;  // client timestamp at receive
-    };
-
-    enum {
-        kNumPacketsPerBatch = 30,
-    };
-    static const int64_t kTimeoutDelayUs = 500000ll;
-    static const int64_t kBatchDelayUs = 60000000ll;  // every minute
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-
-    bool mIsServer;
-    bool mConnected;
-    int32_t mUDPSession;
-    uint32_t mSeqNo;
-    double mTotalTimeUs;
-
-    Vector<TimeInfo> mHistory;
-
-    int64_t mPendingT1;
-    int32_t mTimeoutGeneration;
-
-    void postSendPacket(int64_t delayUs = 0ll);
-
-    void postTimeout();
-    void cancelTimeout();
-
-    void notifyError(status_t err);
-    void notifyOffset();
-
-    static int CompareRountripTime(const TimeInfo *ti1, const TimeInfo *ti2);
-
-    DISALLOW_EVIL_CONSTRUCTORS(TimeSyncer);
-};
-
-}  // namespace android
-
-#endif  // TIME_SYNCER_H_
diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp
index d171c6f..da557f7 100644
--- a/media/libstagefright/wifi-display/VideoFormats.cpp
+++ b/media/libstagefright/wifi-display/VideoFormats.cpp
@@ -249,11 +249,20 @@
     mNativeIndex = native >> 3;
     mNativeType = (ResolutionType)(native & 7);
 
+    bool success;
     if (mNativeType >= kNumResolutionTypes) {
-        return false;
+        success = false;
+    } else {
+        success = GetConfiguration(
+                mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
     }
 
-    return GetConfiguration(mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
+    if (!success) {
+        ALOGW("sink advertised an illegal native resolution, fortunately "
+              "this value is ignored for the time being...");
+    }
+
+    return true;
 }
 
 AString VideoFormats::getFormatSpec(bool forM4Message) const {
diff --git a/media/libstagefright/wifi-display/nettest.cpp b/media/libstagefright/wifi-display/nettest.cpp
deleted file mode 100644
index 0779bf5..0000000
--- a/media/libstagefright/wifi-display/nettest.cpp
+++ /dev/null
@@ -1,400 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "nettest"
-#include <utils/Log.h>
-
-#include "ANetworkSession.h"
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-struct TestHandler : public AHandler {
-    TestHandler(const sp<ANetworkSession> &netSession);
-
-    void listen(int32_t port);
-    void connect(const char *host, int32_t port);
-
-protected:
-    virtual ~TestHandler();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kTimeSyncerPort = 8123,
-    };
-
-    enum {
-        kWhatListen,
-        kWhatConnect,
-        kWhatTimeSyncerNotify,
-        kWhatNetNotify,
-        kWhatSendMore,
-        kWhatStop,
-    };
-
-    sp<ANetworkSession> mNetSession;
-    sp<TimeSyncer> mTimeSyncer;
-
-    int32_t mServerSessionID;
-    int32_t mSessionID;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    int32_t mCounter;
-
-    int64_t mMaxDelayMs;
-
-    void dumpDelay(int32_t counter, int64_t delayMs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
-    : mNetSession(netSession),
-      mServerSessionID(0),
-      mSessionID(0),
-      mTimeOffsetUs(-1ll),
-      mTimeOffsetValid(false),
-      mCounter(0),
-      mMaxDelayMs(-1ll) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::listen(int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatListen, id());
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-void TestHandler::connect(const char *host, int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatConnect, id());
-    msg->setString("host", host);
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-void TestHandler::dumpDelay(int32_t counter, int64_t delayMs) {
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    if (delayMs > mMaxDelayMs) {
-        mMaxDelayMs = delayMs;
-    }
-
-    ALOGI("[%d] (%4lld ms / %4lld ms) %s",
-          counter,
-          delayMs,
-          mMaxDelayMs,
-          kPattern + kPatternSize - n);
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatListen:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-
-            notify = new AMessage(kWhatNetNotify, id());
-
-            int32_t port;
-            CHECK(msg->findInt32("port", &port));
-
-            struct in_addr ifaceAddr;
-            ifaceAddr.s_addr = INADDR_ANY;
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createTCPDatagramSession(
-                         ifaceAddr,
-                         port,
-                         notify,
-                         &mServerSessionID));
-            break;
-        }
-
-        case kWhatConnect:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-            mTimeSyncer->startServer(kTimeSyncerPort);
-
-            AString host;
-            CHECK(msg->findString("host", &host));
-
-            int32_t port;
-            CHECK(msg->findInt32("port", &port));
-
-            notify = new AMessage(kWhatNetNotify, id());
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->createTCPDatagramSession(
-                         0 /* localPort */,
-                         host.c_str(),
-                         port,
-                         notify,
-                         &mSessionID));
-            break;
-        }
-
-        case kWhatNetNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatConnected:
-                {
-                    ALOGI("kWhatConnected");
-
-                    (new AMessage(kWhatSendMore, id()))->post();
-                    break;
-                }
-
-                case ANetworkSession::kWhatClientConnected:
-                {
-                    ALOGI("kWhatClientConnected");
-
-                    CHECK_EQ(mSessionID, 0);
-                    CHECK(msg->findInt32("sessionID", &mSessionID));
-
-                    AString clientIP;
-                    CHECK(msg->findString("client-ip", &clientIP));
-
-                    mTimeSyncer->startClient(clientIP.c_str(), kTimeSyncerPort);
-                    break;
-                }
-
-                case ANetworkSession::kWhatDatagram:
-                {
-                    sp<ABuffer> packet;
-                    CHECK(msg->findBuffer("data", &packet));
-
-                    CHECK_EQ(packet->size(), 12u);
-
-                    int32_t counter = U32_AT(packet->data());
-                    int64_t timeUs = U64_AT(packet->data() + 4);
-
-                    if (mTimeOffsetValid) {
-                        timeUs -= mTimeOffsetUs;
-                        int64_t nowUs = ALooper::GetNowUs();
-                        int64_t delayMs = (nowUs - timeUs) / 1000ll;
-
-                        dumpDelay(counter, delayMs);
-                    } else {
-                        ALOGI("received %d", counter);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatError:
-                {
-                    ALOGE("kWhatError");
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-            mTimeOffsetValid = true;
-            break;
-        }
-
-        case kWhatSendMore:
-        {
-            uint8_t buffer[4 + 8];
-            buffer[0] = mCounter >> 24;
-            buffer[1] = (mCounter >> 16) & 0xff;
-            buffer[2] = (mCounter >> 8) & 0xff;
-            buffer[3] = mCounter & 0xff;
-
-            int64_t nowUs = ALooper::GetNowUs();
-
-            buffer[4] = nowUs >> 56;
-            buffer[5] = (nowUs >> 48) & 0xff;
-            buffer[6] = (nowUs >> 40) & 0xff;
-            buffer[7] = (nowUs >> 32) & 0xff;
-            buffer[8] = (nowUs >> 24) & 0xff;
-            buffer[9] = (nowUs >> 16) & 0xff;
-            buffer[10] = (nowUs >> 8) & 0xff;
-            buffer[11] = nowUs & 0xff;
-
-            ++mCounter;
-
-            CHECK_EQ((status_t)OK,
-                     mNetSession->sendRequest(
-                         mSessionID,
-                         buffer,
-                         sizeof(buffer),
-                         true /* timeValid */,
-                         nowUs));
-
-            msg->post(100000ll);
-            break;
-        }
-
-        case kWhatStop:
-        {
-            if (mSessionID != 0) {
-                mNetSession->destroySession(mSessionID);
-                mSessionID = 0;
-            }
-
-            if (mServerSessionID != 0) {
-                mNetSession->destroySession(mServerSessionID);
-                mServerSessionID = 0;
-            }
-
-            looper()->stop();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host:port\tconnect to remote host\n"
-            "               -l port   \tlisten\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    // srand(time(NULL));
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int32_t listenOnPort = -1;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    usage(argv[0]);
-                    exit(1);
-                }
-
-                connectToHost.setTo(optarg, colonPos - optarg);
-
-                char *end;
-                connectToPort = strtol(colonPos + 1, &end, 10);
-
-                if (*end != '\0' || end == colonPos + 1
-                        || connectToPort < 0 || connectToPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                char *end;
-                listenOnPort = strtol(optarg, &end, 10);
-
-                if (*end != '\0' || end == optarg
-                        || listenOnPort < 0 || listenOnPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if ((listenOnPort < 0 && connectToPort < 0)
-            || (listenOnPort >= 0 && connectToPort >= 0)) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TestHandler> handler = new TestHandler(netSession);
-    looper->registerHandler(handler);
-
-    if (listenOnPort) {
-        handler->listen(listenOnPort);
-    }
-
-    if (connectToPort >= 0) {
-        handler->connect(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp b/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp
deleted file mode 100644
index 7a96081..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPAssembler.cpp
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPAssembler"
-#include <utils/Log.h>
-
-#include "RTPAssembler.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-RTPReceiver::Assembler::Assembler(const sp<AMessage> &notify)
-    : mNotify(notify) {
-}
-
-void RTPReceiver::Assembler::postAccessUnit(
-        const sp<ABuffer> &accessUnit, bool followsDiscontinuity) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", RTPReceiver::kWhatAccessUnit);
-    notify->setBuffer("accessUnit", accessUnit);
-    notify->setInt32("followsDiscontinuity", followsDiscontinuity);
-    notify->post();
-}
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::TSAssembler::TSAssembler(const sp<AMessage> &notify)
-    : Assembler(notify),
-      mSawDiscontinuity(false) {
-}
-
-void RTPReceiver::TSAssembler::signalDiscontinuity() {
-    mSawDiscontinuity = true;
-}
-
-status_t RTPReceiver::TSAssembler::processPacket(const sp<ABuffer> &packet) {
-    int32_t rtpTime;
-    CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-    packet->meta()->setInt64("timeUs", (rtpTime * 100ll) / 9);
-
-    postAccessUnit(packet, mSawDiscontinuity);
-
-    if (mSawDiscontinuity) {
-        mSawDiscontinuity = false;
-    }
-
-    return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::H264Assembler::H264Assembler(const sp<AMessage> &notify)
-    : Assembler(notify),
-      mState(0),
-      mIndicator(0),
-      mNALType(0),
-      mAccessUnitRTPTime(0) {
-}
-
-void RTPReceiver::H264Assembler::signalDiscontinuity() {
-    reset();
-}
-
-status_t RTPReceiver::H264Assembler::processPacket(const sp<ABuffer> &packet) {
-    status_t err = internalProcessPacket(packet);
-
-    if (err != OK) {
-        reset();
-    }
-
-    return err;
-}
-
-status_t RTPReceiver::H264Assembler::internalProcessPacket(
-        const sp<ABuffer> &packet) {
-    const uint8_t *data = packet->data();
-    size_t size = packet->size();
-
-    switch (mState) {
-        case 0:
-        {
-            if (size < 1 || (data[0] & 0x80)) {
-                ALOGV("Malformed H264 RTP packet (empty or F-bit set)");
-                return ERROR_MALFORMED;
-            }
-
-            unsigned nalType = data[0] & 0x1f;
-            if (nalType >= 1 && nalType <= 23) {
-                addSingleNALUnit(packet);
-                ALOGV("added single NAL packet");
-            } else if (nalType == 28) {
-                // FU-A
-                unsigned indicator = data[0];
-                CHECK((indicator & 0x1f) == 28);
-
-                if (size < 2) {
-                    ALOGV("Malformed H264 FU-A packet (single byte)");
-                    return ERROR_MALFORMED;
-                }
-
-                if (!(data[1] & 0x80)) {
-                    ALOGV("Malformed H264 FU-A packet (no start bit)");
-                    return ERROR_MALFORMED;
-                }
-
-                mIndicator = data[0];
-                mNALType = data[1] & 0x1f;
-                uint32_t nri = (data[0] >> 5) & 3;
-
-                clearAccumulator();
-
-                uint8_t byte = mNALType | (nri << 5);
-                appendToAccumulator(&byte, 1);
-                appendToAccumulator(data + 2, size - 2);
-
-                int32_t rtpTime;
-                CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-                mAccumulator->meta()->setInt32("rtp-time", rtpTime);
-
-                if (data[1] & 0x40) {
-                    // Huh? End bit also set on the first buffer.
-                    addSingleNALUnit(mAccumulator);
-                    clearAccumulator();
-
-                    ALOGV("added FU-A");
-                    break;
-                }
-
-                mState = 1;
-            } else if (nalType == 24) {
-                // STAP-A
-
-                status_t err = addSingleTimeAggregationPacket(packet);
-                if (err != OK) {
-                    return err;
-                }
-            } else {
-                ALOGV("Malformed H264 packet (unknown type %d)", nalType);
-                return ERROR_UNSUPPORTED;
-            }
-            break;
-        }
-
-        case 1:
-        {
-            if (size < 2
-                    || data[0] != mIndicator
-                    || (data[1] & 0x1f) != mNALType
-                    || (data[1] & 0x80)) {
-                ALOGV("Malformed H264 FU-A packet (indicator, "
-                      "type or start bit mismatch)");
-
-                return ERROR_MALFORMED;
-            }
-
-            appendToAccumulator(data + 2, size - 2);
-
-            if (data[1] & 0x40) {
-                addSingleNALUnit(mAccumulator);
-
-                clearAccumulator();
-                mState = 0;
-
-                ALOGV("added FU-A");
-            }
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-
-    int32_t marker;
-    CHECK(packet->meta()->findInt32("M", &marker));
-
-    if (marker) {
-        flushAccessUnit();
-    }
-
-    return OK;
-}
-
-void RTPReceiver::H264Assembler::reset() {
-    mNALUnits.clear();
-
-    clearAccumulator();
-    mState = 0;
-}
-
-void RTPReceiver::H264Assembler::clearAccumulator() {
-    if (mAccumulator != NULL) {
-        // XXX Too expensive.
-        mAccumulator.clear();
-    }
-}
-
-void RTPReceiver::H264Assembler::appendToAccumulator(
-        const void *data, size_t size) {
-    if (mAccumulator == NULL) {
-        mAccumulator = new ABuffer(size);
-        memcpy(mAccumulator->data(), data, size);
-        return;
-    }
-
-    if (mAccumulator->size() + size > mAccumulator->capacity()) {
-        sp<ABuffer> buf = new ABuffer(mAccumulator->size() + size);
-        memcpy(buf->data(), mAccumulator->data(), mAccumulator->size());
-        buf->setRange(0, mAccumulator->size());
-
-        int32_t rtpTime;
-        if (mAccumulator->meta()->findInt32("rtp-time", &rtpTime)) {
-            buf->meta()->setInt32("rtp-time", rtpTime);
-        }
-
-        mAccumulator = buf;
-    }
-
-    memcpy(mAccumulator->data() + mAccumulator->size(), data, size);
-    mAccumulator->setRange(0, mAccumulator->size() + size);
-}
-
-void RTPReceiver::H264Assembler::addSingleNALUnit(const sp<ABuffer> &packet) {
-    if (mNALUnits.empty()) {
-        int32_t rtpTime;
-        CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-        mAccessUnitRTPTime = rtpTime;
-    }
-
-    mNALUnits.push_back(packet);
-}
-
-void RTPReceiver::H264Assembler::flushAccessUnit() {
-    if (mNALUnits.empty()) {
-        return;
-    }
-
-    size_t totalSize = 0;
-    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
-            it != mNALUnits.end(); ++it) {
-        totalSize += 4 + (*it)->size();
-    }
-
-    sp<ABuffer> accessUnit = new ABuffer(totalSize);
-    size_t offset = 0;
-    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
-            it != mNALUnits.end(); ++it) {
-        const sp<ABuffer> nalUnit = *it;
-
-        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
-
-        memcpy(accessUnit->data() + offset + 4,
-               nalUnit->data(),
-               nalUnit->size());
-
-        offset += 4 + nalUnit->size();
-    }
-
-    mNALUnits.clear();
-
-    accessUnit->meta()->setInt64("timeUs", mAccessUnitRTPTime * 100ll / 9ll);
-    postAccessUnit(accessUnit, false /* followsDiscontinuity */);
-}
-
-status_t RTPReceiver::H264Assembler::addSingleTimeAggregationPacket(
-        const sp<ABuffer> &packet) {
-    const uint8_t *data = packet->data();
-    size_t size = packet->size();
-
-    if (size < 3) {
-        ALOGV("Malformed H264 STAP-A packet (too small)");
-        return ERROR_MALFORMED;
-    }
-
-    int32_t rtpTime;
-    CHECK(packet->meta()->findInt32("rtp-time", &rtpTime));
-
-    ++data;
-    --size;
-    while (size >= 2) {
-        size_t nalSize = (data[0] << 8) | data[1];
-
-        if (size < nalSize + 2) {
-            ALOGV("Malformed H264 STAP-A packet (incomplete NAL unit)");
-            return ERROR_MALFORMED;
-        }
-
-        sp<ABuffer> unit = new ABuffer(nalSize);
-        memcpy(unit->data(), &data[2], nalSize);
-
-        unit->meta()->setInt32("rtp-time", rtpTime);
-
-        addSingleNALUnit(unit);
-
-        data += 2 + nalSize;
-        size -= 2 + nalSize;
-    }
-
-    if (size != 0) {
-        ALOGV("Unexpected padding at end of STAP-A packet.");
-    }
-
-    ALOGV("added STAP-A");
-
-    return OK;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPAssembler.h b/media/libstagefright/wifi-display/rtp/RTPAssembler.h
deleted file mode 100644
index e456d32..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPAssembler.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_ASSEMBLER_H_
-
-#define RTP_ASSEMBLER_H_
-
-#include "RTPReceiver.h"
-
-namespace android {
-
-// A helper class to reassemble the payload of RTP packets into access
-// units depending on the packetization scheme.
-struct RTPReceiver::Assembler : public RefBase {
-    Assembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity() = 0;
-    virtual status_t processPacket(const sp<ABuffer> &packet) = 0;
-
-protected:
-    virtual ~Assembler() {}
-
-    void postAccessUnit(
-            const sp<ABuffer> &accessUnit, bool followsDiscontinuity);
-
-private:
-    sp<AMessage> mNotify;
-
-    DISALLOW_EVIL_CONSTRUCTORS(Assembler);
-};
-
-struct RTPReceiver::TSAssembler : public RTPReceiver::Assembler {
-    TSAssembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity();
-    virtual status_t processPacket(const sp<ABuffer> &packet);
-
-private:
-    bool mSawDiscontinuity;
-
-    DISALLOW_EVIL_CONSTRUCTORS(TSAssembler);
-};
-
-struct RTPReceiver::H264Assembler : public RTPReceiver::Assembler {
-    H264Assembler(const sp<AMessage> &notify);
-
-    virtual void signalDiscontinuity();
-    virtual status_t processPacket(const sp<ABuffer> &packet);
-
-private:
-    int32_t mState;
-
-    uint8_t mIndicator;
-    uint8_t mNALType;
-
-    sp<ABuffer> mAccumulator;
-
-    List<sp<ABuffer> > mNALUnits;
-    int32_t mAccessUnitRTPTime;
-
-    status_t internalProcessPacket(const sp<ABuffer> &packet);
-
-    void addSingleNALUnit(const sp<ABuffer> &packet);
-    status_t addSingleTimeAggregationPacket(const sp<ABuffer> &packet);
-
-    void flushAccessUnit();
-
-    void clearAccumulator();
-    void appendToAccumulator(const void *data, size_t size);
-
-    void reset();
-
-    DISALLOW_EVIL_CONSTRUCTORS(H264Assembler);
-};
-
-}  // namespace android
-
-#endif  // RTP_ASSEMBLER_H_
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp b/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp
deleted file mode 100644
index 8fa1dae..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPReceiver.cpp
+++ /dev/null
@@ -1,1153 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPReceiver"
-#include <utils/Log.h>
-
-#include "RTPAssembler.h"
-#include "RTPReceiver.h"
-
-#include "ANetworkSession.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-#define TRACK_PACKET_LOSS       0
-
-namespace android {
-
-////////////////////////////////////////////////////////////////////////////////
-
-struct RTPReceiver::Source : public AHandler {
-    Source(RTPReceiver *receiver, uint32_t ssrc);
-
-    void onPacketReceived(uint16_t seq, const sp<ABuffer> &buffer);
-
-    void addReportBlock(uint32_t ssrc, const sp<ABuffer> &buf);
-
-protected:
-    virtual ~Source();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRetransmit,
-        kWhatDeclareLost,
-    };
-
-    static const uint32_t kMinSequential = 2;
-    static const uint32_t kMaxDropout = 3000;
-    static const uint32_t kMaxMisorder = 100;
-    static const uint32_t kRTPSeqMod = 1u << 16;
-    static const int64_t kReportIntervalUs = 10000000ll;
-
-    RTPReceiver *mReceiver;
-    uint32_t mSSRC;
-    bool mFirst;
-    uint16_t mMaxSeq;
-    uint32_t mCycles;
-    uint32_t mBaseSeq;
-    uint32_t mReceived;
-    uint32_t mExpectedPrior;
-    uint32_t mReceivedPrior;
-
-    int64_t mFirstArrivalTimeUs;
-    int64_t mFirstRTPTimeUs;
-
-    // Ordered by extended seq number.
-    List<sp<ABuffer> > mPackets;
-
-    enum StatusBits {
-        STATUS_DECLARED_LOST            = 1,
-        STATUS_REQUESTED_RETRANSMISSION = 2,
-        STATUS_ARRIVED_LATE             = 4,
-    };
-#if TRACK_PACKET_LOSS
-    KeyedVector<int32_t, uint32_t> mLostPackets;
-#endif
-
-    void modifyPacketStatus(int32_t extSeqNo, uint32_t mask);
-
-    int32_t mAwaitingExtSeqNo;
-    bool mRequestedRetransmission;
-
-    int32_t mActivePacketType;
-    sp<Assembler> mActiveAssembler;
-
-    int64_t mNextReportTimeUs;
-
-    int32_t mNumDeclaredLost;
-    int32_t mNumDeclaredLostPrior;
-
-    int32_t mRetransmitGeneration;
-    int32_t mDeclareLostGeneration;
-    bool mDeclareLostTimerPending;
-
-    void queuePacket(const sp<ABuffer> &packet);
-    void dequeueMore();
-
-    sp<ABuffer> getNextPacket();
-    void resync();
-
-    void postRetransmitTimer(int64_t delayUs);
-    void postDeclareLostTimer(int64_t delayUs);
-    void cancelTimers();
-
-    DISALLOW_EVIL_CONSTRUCTORS(Source);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::Source::Source(RTPReceiver *receiver, uint32_t ssrc)
-    : mReceiver(receiver),
-      mSSRC(ssrc),
-      mFirst(true),
-      mMaxSeq(0),
-      mCycles(0),
-      mBaseSeq(0),
-      mReceived(0),
-      mExpectedPrior(0),
-      mReceivedPrior(0),
-      mFirstArrivalTimeUs(-1ll),
-      mFirstRTPTimeUs(-1ll),
-      mAwaitingExtSeqNo(-1),
-      mRequestedRetransmission(false),
-      mActivePacketType(-1),
-      mNextReportTimeUs(-1ll),
-      mNumDeclaredLost(0),
-      mNumDeclaredLostPrior(0),
-      mRetransmitGeneration(0),
-      mDeclareLostGeneration(0),
-      mDeclareLostTimerPending(false) {
-}
-
-RTPReceiver::Source::~Source() {
-}
-
-void RTPReceiver::Source::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRetransmit:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mRetransmitGeneration) {
-                break;
-            }
-
-            mRequestedRetransmission = true;
-            mReceiver->requestRetransmission(mSSRC, mAwaitingExtSeqNo);
-
-            modifyPacketStatus(
-                    mAwaitingExtSeqNo, STATUS_REQUESTED_RETRANSMISSION);
-            break;
-        }
-
-        case kWhatDeclareLost:
-        {
-            int32_t generation;
-            CHECK(msg->findInt32("generation", &generation));
-
-            if (generation != mDeclareLostGeneration) {
-                break;
-            }
-
-            cancelTimers();
-
-            ALOGV("Lost packet extSeqNo %d %s",
-                  mAwaitingExtSeqNo,
-                  mRequestedRetransmission ? "*" : "");
-
-            mRequestedRetransmission = false;
-            if (mActiveAssembler != NULL) {
-                mActiveAssembler->signalDiscontinuity();
-            }
-
-            modifyPacketStatus(mAwaitingExtSeqNo, STATUS_DECLARED_LOST);
-
-            // resync();
-            ++mAwaitingExtSeqNo;
-            ++mNumDeclaredLost;
-
-            mReceiver->notifyPacketLost();
-
-            dequeueMore();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPReceiver::Source::onPacketReceived(
-        uint16_t seq, const sp<ABuffer> &buffer) {
-    if (mFirst) {
-        buffer->setInt32Data(mCycles | seq);
-        queuePacket(buffer);
-
-        mFirst = false;
-        mBaseSeq = seq;
-        mMaxSeq = seq;
-        ++mReceived;
-        return;
-    }
-
-    uint16_t udelta = seq - mMaxSeq;
-
-    if (udelta < kMaxDropout) {
-        // In order, with permissible gap.
-
-        if (seq < mMaxSeq) {
-            // Sequence number wrapped - count another 64K cycle
-            mCycles += kRTPSeqMod;
-        }
-
-        mMaxSeq = seq;
-
-        ++mReceived;
-    } else if (udelta <= kRTPSeqMod - kMaxMisorder) {
-        // The sequence number made a very large jump
-        return;
-    } else {
-        // Duplicate or reordered packet.
-    }
-
-    buffer->setInt32Data(mCycles | seq);
-    queuePacket(buffer);
-}
-
-void RTPReceiver::Source::queuePacket(const sp<ABuffer> &packet) {
-    int32_t newExtendedSeqNo = packet->int32Data();
-
-    if (mFirstArrivalTimeUs < 0ll) {
-        mFirstArrivalTimeUs = ALooper::GetNowUs();
-
-        uint32_t rtpTime;
-        CHECK(packet->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-
-        mFirstRTPTimeUs = (rtpTime * 100ll) / 9ll;
-    }
-
-    if (mAwaitingExtSeqNo >= 0 && newExtendedSeqNo < mAwaitingExtSeqNo) {
-        // We're no longer interested in these. They're old.
-        ALOGV("dropping stale extSeqNo %d", newExtendedSeqNo);
-
-        modifyPacketStatus(newExtendedSeqNo, STATUS_ARRIVED_LATE);
-        return;
-    }
-
-    if (mPackets.empty()) {
-        mPackets.push_back(packet);
-        dequeueMore();
-        return;
-    }
-
-    List<sp<ABuffer> >::iterator firstIt = mPackets.begin();
-    List<sp<ABuffer> >::iterator it = --mPackets.end();
-    for (;;) {
-        int32_t extendedSeqNo = (*it)->int32Data();
-
-        if (extendedSeqNo == newExtendedSeqNo) {
-            // Duplicate packet.
-            return;
-        }
-
-        if (extendedSeqNo < newExtendedSeqNo) {
-            // Insert new packet after the one at "it".
-            mPackets.insert(++it, packet);
-            break;
-        }
-
-        if (it == firstIt) {
-            // Insert new packet before the first existing one.
-            mPackets.insert(it, packet);
-            break;
-        }
-
-        --it;
-    }
-
-    dequeueMore();
-}
-
-void RTPReceiver::Source::dequeueMore() {
-    int64_t nowUs = ALooper::GetNowUs();
-    if (mNextReportTimeUs < 0ll || nowUs >= mNextReportTimeUs) {
-        if (mNextReportTimeUs >= 0ll) {
-            uint32_t expected = (mMaxSeq | mCycles) - mBaseSeq + 1;
-
-            uint32_t expectedInterval = expected - mExpectedPrior;
-            mExpectedPrior = expected;
-
-            uint32_t receivedInterval = mReceived - mReceivedPrior;
-            mReceivedPrior = mReceived;
-
-            int64_t lostInterval =
-                (int64_t)expectedInterval - (int64_t)receivedInterval;
-
-            int32_t declaredLostInterval =
-                mNumDeclaredLost - mNumDeclaredLostPrior;
-
-            mNumDeclaredLostPrior = mNumDeclaredLost;
-
-            if (declaredLostInterval > 0) {
-                ALOGI("lost %lld packets (%.2f %%), declared %d lost\n",
-                      lostInterval,
-                      100.0f * lostInterval / expectedInterval,
-                      declaredLostInterval);
-            }
-        }
-
-        mNextReportTimeUs = nowUs + kReportIntervalUs;
-
-#if TRACK_PACKET_LOSS
-        for (size_t i = 0; i < mLostPackets.size(); ++i) {
-            int32_t key = mLostPackets.keyAt(i);
-            uint32_t value = mLostPackets.valueAt(i);
-
-            AString status;
-            if (value & STATUS_REQUESTED_RETRANSMISSION) {
-                status.append("retrans ");
-            }
-            if (value & STATUS_ARRIVED_LATE) {
-                status.append("arrived-late ");
-            }
-            ALOGI("Packet %d declared lost %s", key, status.c_str());
-        }
-#endif
-    }
-
-    sp<ABuffer> packet;
-    while ((packet = getNextPacket()) != NULL) {
-        if (mDeclareLostTimerPending) {
-            cancelTimers();
-        }
-
-        CHECK_GE(mAwaitingExtSeqNo, 0);
-#if TRACK_PACKET_LOSS
-        mLostPackets.removeItem(mAwaitingExtSeqNo);
-#endif
-
-        int32_t packetType;
-        CHECK(packet->meta()->findInt32("PT", &packetType));
-
-        if (packetType != mActivePacketType) {
-            mActiveAssembler = mReceiver->makeAssembler(packetType);
-            mActivePacketType = packetType;
-        }
-
-        if (mActiveAssembler != NULL) {
-            status_t err = mActiveAssembler->processPacket(packet);
-            if (err != OK) {
-                ALOGV("assembler returned error %d", err);
-            }
-        }
-
-        ++mAwaitingExtSeqNo;
-    }
-
-    if (mDeclareLostTimerPending) {
-        return;
-    }
-
-    if (mPackets.empty()) {
-        return;
-    }
-
-    CHECK_GE(mAwaitingExtSeqNo, 0);
-
-    const sp<ABuffer> &firstPacket = *mPackets.begin();
-
-    uint32_t rtpTime;
-    CHECK(firstPacket->meta()->findInt32(
-                "rtp-time", (int32_t *)&rtpTime));
-
-
-    int64_t rtpUs = (rtpTime * 100ll) / 9ll;
-
-    int64_t maxArrivalTimeUs =
-        mFirstArrivalTimeUs + rtpUs - mFirstRTPTimeUs;
-
-    nowUs = ALooper::GetNowUs();
-
-    CHECK_LT(mAwaitingExtSeqNo, firstPacket->int32Data());
-
-    ALOGV("waiting for %d, comparing against %d, %lld us left",
-          mAwaitingExtSeqNo,
-          firstPacket->int32Data(),
-          maxArrivalTimeUs - nowUs);
-
-    postDeclareLostTimer(maxArrivalTimeUs + kPacketLostAfterUs);
-
-    if (kRequestRetransmissionAfterUs > 0ll) {
-        postRetransmitTimer(
-                maxArrivalTimeUs + kRequestRetransmissionAfterUs);
-    }
-}
-
-sp<ABuffer> RTPReceiver::Source::getNextPacket() {
-    if (mPackets.empty()) {
-        return NULL;
-    }
-
-    int32_t extSeqNo = (*mPackets.begin())->int32Data();
-
-    if (mAwaitingExtSeqNo < 0) {
-        mAwaitingExtSeqNo = extSeqNo;
-    } else if (extSeqNo != mAwaitingExtSeqNo) {
-        return NULL;
-    }
-
-    sp<ABuffer> packet = *mPackets.begin();
-    mPackets.erase(mPackets.begin());
-
-    return packet;
-}
-
-void RTPReceiver::Source::resync() {
-    mAwaitingExtSeqNo = -1;
-}
-
-void RTPReceiver::Source::addReportBlock(
-        uint32_t ssrc, const sp<ABuffer> &buf) {
-    uint32_t extMaxSeq = mMaxSeq | mCycles;
-    uint32_t expected = extMaxSeq - mBaseSeq + 1;
-
-    int64_t lost = (int64_t)expected - (int64_t)mReceived;
-    if (lost > 0x7fffff) {
-        lost = 0x7fffff;
-    } else if (lost < -0x800000) {
-        lost = -0x800000;
-    }
-
-    uint32_t expectedInterval = expected - mExpectedPrior;
-    mExpectedPrior = expected;
-
-    uint32_t receivedInterval = mReceived - mReceivedPrior;
-    mReceivedPrior = mReceived;
-
-    int64_t lostInterval = expectedInterval - receivedInterval;
-
-    uint8_t fractionLost;
-    if (expectedInterval == 0 || lostInterval <=0) {
-        fractionLost = 0;
-    } else {
-        fractionLost = (lostInterval << 8) / expectedInterval;
-    }
-
-    uint8_t *ptr = buf->data() + buf->size();
-
-    ptr[0] = ssrc >> 24;
-    ptr[1] = (ssrc >> 16) & 0xff;
-    ptr[2] = (ssrc >> 8) & 0xff;
-    ptr[3] = ssrc & 0xff;
-
-    ptr[4] = fractionLost;
-
-    ptr[5] = (lost >> 16) & 0xff;
-    ptr[6] = (lost >> 8) & 0xff;
-    ptr[7] = lost & 0xff;
-
-    ptr[8] = extMaxSeq >> 24;
-    ptr[9] = (extMaxSeq >> 16) & 0xff;
-    ptr[10] = (extMaxSeq >> 8) & 0xff;
-    ptr[11] = extMaxSeq & 0xff;
-
-    // XXX TODO:
-
-    ptr[12] = 0x00;  // interarrival jitter
-    ptr[13] = 0x00;
-    ptr[14] = 0x00;
-    ptr[15] = 0x00;
-
-    ptr[16] = 0x00;  // last SR
-    ptr[17] = 0x00;
-    ptr[18] = 0x00;
-    ptr[19] = 0x00;
-
-    ptr[20] = 0x00;  // delay since last SR
-    ptr[21] = 0x00;
-    ptr[22] = 0x00;
-    ptr[23] = 0x00;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPReceiver::RTPReceiver(
-        const sp<ANetworkSession> &netSession,
-        const sp<AMessage> &notify,
-        uint32_t flags)
-    : mNetSession(netSession),
-      mNotify(notify),
-      mFlags(flags),
-      mRTPMode(TRANSPORT_UNDEFINED),
-      mRTCPMode(TRANSPORT_UNDEFINED),
-      mRTPSessionID(0),
-      mRTCPSessionID(0),
-      mRTPConnected(false),
-      mRTCPConnected(false),
-      mRTPClientSessionID(0),
-      mRTCPClientSessionID(0) {
-}
-
-RTPReceiver::~RTPReceiver() {
-    if (mRTCPClientSessionID != 0) {
-        mNetSession->destroySession(mRTCPClientSessionID);
-        mRTCPClientSessionID = 0;
-    }
-
-    if (mRTPClientSessionID != 0) {
-        mNetSession->destroySession(mRTPClientSessionID);
-        mRTPClientSessionID = 0;
-    }
-
-    if (mRTCPSessionID != 0) {
-        mNetSession->destroySession(mRTCPSessionID);
-        mRTCPSessionID = 0;
-    }
-
-    if (mRTPSessionID != 0) {
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-}
-
-status_t RTPReceiver::initAsync(
-        TransportMode rtpMode,
-        TransportMode rtcpMode,
-        int32_t *outLocalRTPPort) {
-    if (mRTPMode != TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_UNDEFINED
-            || rtpMode == TRANSPORT_NONE
-            || rtcpMode == TRANSPORT_UNDEFINED) {
-        return INVALID_OPERATION;
-    }
-
-    CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
-    CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
-
-    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
-
-    sp<AMessage> rtcpNotify;
-    if (rtcpMode != TRANSPORT_NONE) {
-        rtcpNotify = new AMessage(kWhatRTCPNotify, id());
-    }
-
-    CHECK_EQ(mRTPSessionID, 0);
-    CHECK_EQ(mRTCPSessionID, 0);
-
-    int32_t localRTPPort;
-
-    struct in_addr ifaceAddr;
-    ifaceAddr.s_addr = INADDR_ANY;
-
-    for (;;) {
-        localRTPPort = PickRandomRTPPort();
-
-        status_t err;
-        if (rtpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    ifaceAddr,
-                    localRTPPort,
-                    rtpNotify,
-                    &mRTPSessionID);
-        }
-
-        if (err != OK) {
-            continue;
-        }
-
-        if (rtcpMode == TRANSPORT_NONE) {
-            break;
-        } else if (rtcpMode == TRANSPORT_UDP) {
-            err = mNetSession->createUDPSession(
-                    localRTPPort + 1,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        } else {
-            CHECK_EQ(rtpMode, TRANSPORT_TCP);
-            err = mNetSession->createTCPDatagramSession(
-                    ifaceAddr,
-                    localRTPPort + 1,
-                    rtcpNotify,
-                    &mRTCPSessionID);
-        }
-
-        if (err == OK) {
-            break;
-        }
-
-        mNetSession->destroySession(mRTPSessionID);
-        mRTPSessionID = 0;
-    }
-
-    mRTPMode = rtpMode;
-    mRTCPMode = rtcpMode;
-    *outLocalRTPPort = localRTPPort;
-
-    return OK;
-}
-
-status_t RTPReceiver::connect(
-        const char *remoteHost, int32_t remoteRTPPort, int32_t remoteRTCPPort) {
-    status_t err;
-
-    if (mRTPMode == TRANSPORT_UDP) {
-        CHECK(!mRTPConnected);
-
-        err = mNetSession->connectUDPSession(
-                mRTPSessionID, remoteHost, remoteRTPPort);
-
-        if (err != OK) {
-            notifyInitDone(err);
-            return err;
-        }
-
-        ALOGI("connectUDPSession RTP successful.");
-
-        mRTPConnected = true;
-    }
-
-    if (mRTCPMode == TRANSPORT_UDP) {
-        CHECK(!mRTCPConnected);
-
-        err = mNetSession->connectUDPSession(
-                mRTCPSessionID, remoteHost, remoteRTCPPort);
-
-        if (err != OK) {
-            notifyInitDone(err);
-            return err;
-        }
-
-        scheduleSendRR();
-
-        ALOGI("connectUDPSession RTCP successful.");
-
-        mRTCPConnected = true;
-    }
-
-    if (mRTPConnected
-            && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) {
-        notifyInitDone(OK);
-    }
-
-    return OK;
-}
-
-status_t RTPReceiver::informSender(const sp<AMessage> &params) {
-    if (!mRTCPConnected) {
-        return INVALID_OPERATION;
-    }
-
-    int64_t avgLatencyUs;
-    CHECK(params->findInt64("avgLatencyUs", &avgLatencyUs));
-
-    int64_t maxLatencyUs;
-    CHECK(params->findInt64("maxLatencyUs", &maxLatencyUs));
-
-    sp<ABuffer> buf = new ABuffer(28);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 0;
-    ptr[1] = 204;  // APP
-    ptr[2] = 0;
-
-    CHECK((buf->size() % 4) == 0u);
-    ptr[3] = (buf->size() / 4) - 1;
-
-    ptr[4] = kSourceID >> 24;  // SSRC
-    ptr[5] = (kSourceID >> 16) & 0xff;
-    ptr[6] = (kSourceID >> 8) & 0xff;
-    ptr[7] = kSourceID & 0xff;
-    ptr[8] = 'l';
-    ptr[9] = 'a';
-    ptr[10] = 't';
-    ptr[11] = 'e';
-
-    ptr[12] = avgLatencyUs >> 56;
-    ptr[13] = (avgLatencyUs >> 48) & 0xff;
-    ptr[14] = (avgLatencyUs >> 40) & 0xff;
-    ptr[15] = (avgLatencyUs >> 32) & 0xff;
-    ptr[16] = (avgLatencyUs >> 24) & 0xff;
-    ptr[17] = (avgLatencyUs >> 16) & 0xff;
-    ptr[18] = (avgLatencyUs >> 8) & 0xff;
-    ptr[19] = avgLatencyUs & 0xff;
-
-    ptr[20] = maxLatencyUs >> 56;
-    ptr[21] = (maxLatencyUs >> 48) & 0xff;
-    ptr[22] = (maxLatencyUs >> 40) & 0xff;
-    ptr[23] = (maxLatencyUs >> 32) & 0xff;
-    ptr[24] = (maxLatencyUs >> 24) & 0xff;
-    ptr[25] = (maxLatencyUs >> 16) & 0xff;
-    ptr[26] = (maxLatencyUs >> 8) & 0xff;
-    ptr[27] = maxLatencyUs & 0xff;
-
-    mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-
-    return OK;
-}
-
-void RTPReceiver::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatRTPNotify:
-        case kWhatRTCPNotify:
-            onNetNotify(msg->what() == kWhatRTPNotify, msg);
-            break;
-
-        case kWhatSendRR:
-        {
-            onSendRR();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void RTPReceiver::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
-    int32_t reason;
-    CHECK(msg->findInt32("reason", &reason));
-
-    switch (reason) {
-        case ANetworkSession::kWhatError:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            int32_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            int32_t errorOccuredDuringSend;
-            CHECK(msg->findInt32("send", &errorOccuredDuringSend));
-
-            AString detail;
-            CHECK(msg->findString("detail", &detail));
-
-            ALOGE("An error occurred during %s in session %d "
-                  "(%d, '%s' (%s)).",
-                  errorOccuredDuringSend ? "send" : "receive",
-                  sessionID,
-                  err,
-                  detail.c_str(),
-                  strerror(-err));
-
-            mNetSession->destroySession(sessionID);
-
-            if (sessionID == mRTPSessionID) {
-                mRTPSessionID = 0;
-            } else if (sessionID == mRTCPSessionID) {
-                mRTCPSessionID = 0;
-            } else if (sessionID == mRTPClientSessionID) {
-                mRTPClientSessionID = 0;
-            } else if (sessionID == mRTCPClientSessionID) {
-                mRTCPClientSessionID = 0;
-            }
-
-            if (!mRTPConnected
-                    || (mRTCPMode != TRANSPORT_NONE && !mRTCPConnected)) {
-                notifyInitDone(err);
-                break;
-            }
-
-            notifyError(err);
-            break;
-        }
-
-        case ANetworkSession::kWhatDatagram:
-        {
-            sp<ABuffer> data;
-            CHECK(msg->findBuffer("data", &data));
-
-            if (isRTP) {
-                if (mFlags & FLAG_AUTO_CONNECT) {
-                    AString fromAddr;
-                    CHECK(msg->findString("fromAddr", &fromAddr));
-
-                    int32_t fromPort;
-                    CHECK(msg->findInt32("fromPort", &fromPort));
-
-                    CHECK_EQ((status_t)OK,
-                             connect(
-                                 fromAddr.c_str(), fromPort, fromPort + 1));
-
-                    mFlags &= ~FLAG_AUTO_CONNECT;
-                }
-
-                onRTPData(data);
-            } else {
-                onRTCPData(data);
-            }
-            break;
-        }
-
-        case ANetworkSession::kWhatClientConnected:
-        {
-            int32_t sessionID;
-            CHECK(msg->findInt32("sessionID", &sessionID));
-
-            if (isRTP) {
-                CHECK_EQ(mRTPMode, TRANSPORT_TCP);
-
-                if (mRTPClientSessionID != 0) {
-                    // We only allow a single client connection.
-                    mNetSession->destroySession(sessionID);
-                    sessionID = 0;
-                    break;
-                }
-
-                mRTPClientSessionID = sessionID;
-                mRTPConnected = true;
-            } else {
-                CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
-
-                if (mRTCPClientSessionID != 0) {
-                    // We only allow a single client connection.
-                    mNetSession->destroySession(sessionID);
-                    sessionID = 0;
-                    break;
-                }
-
-                mRTCPClientSessionID = sessionID;
-                mRTCPConnected = true;
-            }
-
-            if (mRTPConnected
-                    && (mRTCPConnected || mRTCPMode == TRANSPORT_NONE)) {
-                notifyInitDone(OK);
-            }
-            break;
-        }
-    }
-}
-
-void RTPReceiver::notifyInitDone(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatInitDone);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPReceiver::notifyError(status_t err) {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatError);
-    notify->setInt32("err", err);
-    notify->post();
-}
-
-void RTPReceiver::notifyPacketLost() {
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPacketLost);
-    notify->post();
-}
-
-status_t RTPReceiver::onRTPData(const sp<ABuffer> &buffer) {
-    size_t size = buffer->size();
-    if (size < 12) {
-        // Too short to be a valid RTP header.
-        return ERROR_MALFORMED;
-    }
-
-    const uint8_t *data = buffer->data();
-
-    if ((data[0] >> 6) != 2) {
-        // Unsupported version.
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (data[0] & 0x20) {
-        // Padding present.
-
-        size_t paddingLength = data[size - 1];
-
-        if (paddingLength + 12 > size) {
-            // If we removed this much padding we'd end up with something
-            // that's too short to be a valid RTP header.
-            return ERROR_MALFORMED;
-        }
-
-        size -= paddingLength;
-    }
-
-    int numCSRCs = data[0] & 0x0f;
-
-    size_t payloadOffset = 12 + 4 * numCSRCs;
-
-    if (size < payloadOffset) {
-        // Not enough data to fit the basic header and all the CSRC entries.
-        return ERROR_MALFORMED;
-    }
-
-    if (data[0] & 0x10) {
-        // Header eXtension present.
-
-        if (size < payloadOffset + 4) {
-            // Not enough data to fit the basic header, all CSRC entries
-            // and the first 4 bytes of the extension header.
-
-            return ERROR_MALFORMED;
-        }
-
-        const uint8_t *extensionData = &data[payloadOffset];
-
-        size_t extensionLength =
-            4 * (extensionData[2] << 8 | extensionData[3]);
-
-        if (size < payloadOffset + 4 + extensionLength) {
-            return ERROR_MALFORMED;
-        }
-
-        payloadOffset += 4 + extensionLength;
-    }
-
-    uint32_t srcId = U32_AT(&data[8]);
-    uint32_t rtpTime = U32_AT(&data[4]);
-    uint16_t seqNo = U16_AT(&data[2]);
-
-    sp<AMessage> meta = buffer->meta();
-    meta->setInt32("ssrc", srcId);
-    meta->setInt32("rtp-time", rtpTime);
-    meta->setInt32("PT", data[1] & 0x7f);
-    meta->setInt32("M", data[1] >> 7);
-
-    buffer->setRange(payloadOffset, size - payloadOffset);
-
-    ssize_t index = mSources.indexOfKey(srcId);
-    sp<Source> source;
-    if (index < 0) {
-        source = new Source(this, srcId);
-        looper()->registerHandler(source);
-
-        mSources.add(srcId, source);
-    } else {
-        source = mSources.valueAt(index);
-    }
-
-    source->onPacketReceived(seqNo, buffer);
-
-    return OK;
-}
-
-status_t RTPReceiver::onRTCPData(const sp<ABuffer> &data) {
-    ALOGI("onRTCPData");
-    return OK;
-}
-
-void RTPReceiver::addSDES(const sp<ABuffer> &buffer) {
-    uint8_t *data = buffer->data() + buffer->size();
-    data[0] = 0x80 | 1;
-    data[1] = 202;  // SDES
-    data[4] = kSourceID >> 24;  // SSRC
-    data[5] = (kSourceID >> 16) & 0xff;
-    data[6] = (kSourceID >> 8) & 0xff;
-    data[7] = kSourceID & 0xff;
-
-    size_t offset = 8;
-
-    data[offset++] = 1;  // CNAME
-
-    AString cname = "stagefright@somewhere";
-    data[offset++] = cname.size();
-
-    memcpy(&data[offset], cname.c_str(), cname.size());
-    offset += cname.size();
-
-    data[offset++] = 6;  // TOOL
-
-    AString tool = "stagefright/1.0";
-    data[offset++] = tool.size();
-
-    memcpy(&data[offset], tool.c_str(), tool.size());
-    offset += tool.size();
-
-    data[offset++] = 0;
-
-    if ((offset % 4) > 0) {
-        size_t count = 4 - (offset % 4);
-        switch (count) {
-            case 3:
-                data[offset++] = 0;
-            case 2:
-                data[offset++] = 0;
-            case 1:
-                data[offset++] = 0;
-        }
-    }
-
-    size_t numWords = (offset / 4) - 1;
-    data[2] = numWords >> 8;
-    data[3] = numWords & 0xff;
-
-    buffer->setRange(buffer->offset(), buffer->size() + offset);
-}
-
-void RTPReceiver::scheduleSendRR() {
-    (new AMessage(kWhatSendRR, id()))->post(5000000ll);
-}
-
-void RTPReceiver::onSendRR() {
-#if 0
-    sp<ABuffer> buf = new ABuffer(kMaxUDPPacketSize);
-    buf->setRange(0, 0);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 0;
-    ptr[1] = 201;  // RR
-    ptr[2] = 0;
-    ptr[3] = 1;
-    ptr[4] = kSourceID >> 24;  // SSRC
-    ptr[5] = (kSourceID >> 16) & 0xff;
-    ptr[6] = (kSourceID >> 8) & 0xff;
-    ptr[7] = kSourceID & 0xff;
-
-    buf->setRange(0, 8);
-
-    size_t numReportBlocks = 0;
-    for (size_t i = 0; i < mSources.size(); ++i) {
-        uint32_t ssrc = mSources.keyAt(i);
-        sp<Source> source = mSources.valueAt(i);
-
-        if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) {
-            // Cannot fit another report block.
-            break;
-        }
-
-        source->addReportBlock(ssrc, buf);
-        ++numReportBlocks;
-    }
-
-    ptr[0] |= numReportBlocks;  // 5 bit
-
-    size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks;
-    ptr[2] = sizeInWordsMinus1 >> 8;
-    ptr[3] = sizeInWordsMinus1 & 0xff;
-
-    buf->setRange(0, (sizeInWordsMinus1 + 1) * 4);
-
-    addSDES(buf);
-
-    mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-#endif
-
-    scheduleSendRR();
-}
-
-status_t RTPReceiver::registerPacketType(
-        uint8_t packetType, PacketizationMode mode) {
-    mPacketTypes.add(packetType, mode);
-
-    return OK;
-}
-
-sp<RTPReceiver::Assembler> RTPReceiver::makeAssembler(uint8_t packetType) {
-    ssize_t index = mPacketTypes.indexOfKey(packetType);
-    if (index < 0) {
-        return NULL;
-    }
-
-    PacketizationMode mode = mPacketTypes.valueAt(index);
-
-    switch (mode) {
-        case PACKETIZATION_NONE:
-        case PACKETIZATION_TRANSPORT_STREAM:
-            return new TSAssembler(mNotify);
-
-        case PACKETIZATION_H264:
-            return new H264Assembler(mNotify);
-
-        default:
-            return NULL;
-    }
-}
-
-void RTPReceiver::requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo) {
-    int32_t blp = 0;
-
-    sp<ABuffer> buf = new ABuffer(16);
-    buf->setRange(0, 0);
-
-    uint8_t *ptr = buf->data();
-    ptr[0] = 0x80 | 1;  // generic NACK
-    ptr[1] = 205;  // TSFB
-    ptr[2] = 0;
-    ptr[3] = 3;
-    ptr[8] = (senderSSRC >> 24) & 0xff;
-    ptr[9] = (senderSSRC >> 16) & 0xff;
-    ptr[10] = (senderSSRC >> 8) & 0xff;
-    ptr[11] = (senderSSRC & 0xff);
-    ptr[8] = (kSourceID >> 24) & 0xff;
-    ptr[9] = (kSourceID >> 16) & 0xff;
-    ptr[10] = (kSourceID >> 8) & 0xff;
-    ptr[11] = (kSourceID & 0xff);
-    ptr[12] = (extSeqNo >> 8) & 0xff;
-    ptr[13] = (extSeqNo & 0xff);
-    ptr[14] = (blp >> 8) & 0xff;
-    ptr[15] = (blp & 0xff);
-
-    buf->setRange(0, 16);
-
-     mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-}
-
-void RTPReceiver::Source::modifyPacketStatus(int32_t extSeqNo, uint32_t mask) {
-#if TRACK_PACKET_LOSS
-    ssize_t index = mLostPackets.indexOfKey(extSeqNo);
-    if (index < 0) {
-        mLostPackets.add(extSeqNo, mask);
-    } else {
-        mLostPackets.editValueAt(index) |= mask;
-    }
-#endif
-}
-
-void RTPReceiver::Source::postRetransmitTimer(int64_t timeUs) {
-    int64_t delayUs = timeUs - ALooper::GetNowUs();
-    sp<AMessage> msg = new AMessage(kWhatRetransmit, id());
-    msg->setInt32("generation", mRetransmitGeneration);
-    msg->post(delayUs);
-}
-
-void RTPReceiver::Source::postDeclareLostTimer(int64_t timeUs) {
-    CHECK(!mDeclareLostTimerPending);
-    mDeclareLostTimerPending = true;
-
-    int64_t delayUs = timeUs - ALooper::GetNowUs();
-    sp<AMessage> msg = new AMessage(kWhatDeclareLost, id());
-    msg->setInt32("generation", mDeclareLostGeneration);
-    msg->post(delayUs);
-}
-
-void RTPReceiver::Source::cancelTimers() {
-    ++mRetransmitGeneration;
-    ++mDeclareLostGeneration;
-    mDeclareLostTimerPending = false;
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/rtp/RTPReceiver.h b/media/libstagefright/wifi-display/rtp/RTPReceiver.h
deleted file mode 100644
index 240ab2e..0000000
--- a/media/libstagefright/wifi-display/rtp/RTPReceiver.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_RECEIVER_H_
-
-#define RTP_RECEIVER_H_
-
-#include "RTPBase.h"
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-
-// An object of this class facilitates receiving of media data on an RTP
-// channel. The channel is established over a UDP or TCP connection depending
-// on which "TransportMode" was chosen. In addition different RTP packetization
-// schemes are supported such as "Transport Stream Packets over RTP",
-// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
-struct RTPReceiver : public RTPBase, public AHandler {
-    enum {
-        kWhatInitDone,
-        kWhatError,
-        kWhatAccessUnit,
-        kWhatPacketLost,
-    };
-
-    enum Flags {
-        FLAG_AUTO_CONNECT = 1,
-    };
-    RTPReceiver(
-            const sp<ANetworkSession> &netSession,
-            const sp<AMessage> &notify,
-            uint32_t flags = 0);
-
-    status_t registerPacketType(
-            uint8_t packetType, PacketizationMode mode);
-
-    status_t initAsync(
-            TransportMode rtpMode,
-            TransportMode rtcpMode,
-            int32_t *outLocalRTPPort);
-
-    status_t connect(
-            const char *remoteHost,
-            int32_t remoteRTPPort,
-            int32_t remoteRTCPPort);
-
-    status_t informSender(const sp<AMessage> &params);
-
-protected:
-    virtual ~RTPReceiver();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatRTPNotify,
-        kWhatRTCPNotify,
-        kWhatSendRR,
-    };
-
-    enum {
-        kSourceID                       = 0xdeadbeef,
-        kPacketLostAfterUs              = 100000,
-        kRequestRetransmissionAfterUs   = -1,
-    };
-
-    struct Assembler;
-    struct H264Assembler;
-    struct Source;
-    struct TSAssembler;
-
-    sp<ANetworkSession> mNetSession;
-    sp<AMessage> mNotify;
-    uint32_t mFlags;
-    TransportMode mRTPMode;
-    TransportMode mRTCPMode;
-    int32_t mRTPSessionID;
-    int32_t mRTCPSessionID;
-    bool mRTPConnected;
-    bool mRTCPConnected;
-
-    int32_t mRTPClientSessionID;  // in TRANSPORT_TCP mode.
-    int32_t mRTCPClientSessionID;  // in TRANSPORT_TCP mode.
-
-    KeyedVector<uint8_t, PacketizationMode> mPacketTypes;
-    KeyedVector<uint32_t, sp<Source> > mSources;
-
-    void onNetNotify(bool isRTP, const sp<AMessage> &msg);
-    status_t onRTPData(const sp<ABuffer> &data);
-    status_t onRTCPData(const sp<ABuffer> &data);
-    void onSendRR();
-
-    void scheduleSendRR();
-    void addSDES(const sp<ABuffer> &buffer);
-
-    void notifyInitDone(status_t err);
-    void notifyError(status_t err);
-    void notifyPacketLost();
-
-    sp<Assembler> makeAssembler(uint8_t packetType);
-
-    void requestRetransmission(uint32_t senderSSRC, int32_t extSeqNo);
-
-    DISALLOW_EVIL_CONSTRUCTORS(RTPReceiver);
-};
-
-}  // namespace android
-
-#endif  // RTP_RECEIVER_H_
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
index 6bbe650..095fd97 100644
--- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
@@ -767,17 +767,6 @@
 }
 
 status_t RTPSender::parseAPP(const uint8_t *data, size_t size) {
-    if (!memcmp("late", &data[8], 4)) {
-        int64_t avgLatencyUs = (int64_t)U64_AT(&data[12]);
-        int64_t maxLatencyUs = (int64_t)U64_AT(&data[20]);
-
-        sp<AMessage> notify = mNotify->dup();
-        notify->setInt32("what", kWhatInformSender);
-        notify->setInt64("avgLatencyUs", avgLatencyUs);
-        notify->setInt64("maxLatencyUs", maxLatencyUs);
-        notify->post();
-    }
-
     return OK;
 }
 
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h
index fefcab7..7dc138a 100644
--- a/media/libstagefright/wifi-display/rtp/RTPSender.h
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.h
@@ -37,7 +37,6 @@
         kWhatInitDone,
         kWhatError,
         kWhatNetworkStall,
-        kWhatInformSender,
     };
     RTPSender(
             const sp<ANetworkSession> &netSession,
diff --git a/media/libstagefright/wifi-display/rtptest.cpp b/media/libstagefright/wifi-display/rtptest.cpp
deleted file mode 100644
index 764a38b..0000000
--- a/media/libstagefright/wifi-display/rtptest.cpp
+++ /dev/null
@@ -1,565 +0,0 @@
-/*
- * Copyright 2013, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "rtptest"
-#include <utils/Log.h>
-
-#include "ANetworkSession.h"
-#include "rtp/RTPSender.h"
-#include "rtp/RTPReceiver.h"
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/NuMediaExtractor.h>
-#include <media/stagefright/Utils.h>
-
-#define MEDIA_FILENAME "/sdcard/Frame Counter HD 30FPS_1080p.mp4"
-
-namespace android {
-
-struct PacketSource : public RefBase {
-    PacketSource() {}
-
-    virtual sp<ABuffer> getNextAccessUnit() = 0;
-
-protected:
-    virtual ~PacketSource() {}
-
-private:
-    DISALLOW_EVIL_CONSTRUCTORS(PacketSource);
-};
-
-struct MediaPacketSource : public PacketSource {
-    MediaPacketSource()
-        : mMaxSampleSize(1024 * 1024) {
-        mExtractor = new NuMediaExtractor;
-        CHECK_EQ((status_t)OK,
-                 mExtractor->setDataSource(MEDIA_FILENAME));
-
-        bool haveVideo = false;
-        for (size_t i = 0; i < mExtractor->countTracks(); ++i) {
-            sp<AMessage> format;
-            CHECK_EQ((status_t)OK, mExtractor->getTrackFormat(i, &format));
-
-            AString mime;
-            CHECK(format->findString("mime", &mime));
-
-            if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str())) {
-                mExtractor->selectTrack(i);
-                haveVideo = true;
-                break;
-            }
-        }
-
-        CHECK(haveVideo);
-    }
-
-    virtual sp<ABuffer> getNextAccessUnit() {
-        int64_t timeUs;
-        status_t err = mExtractor->getSampleTime(&timeUs);
-
-        if (err != OK) {
-            return NULL;
-        }
-
-        sp<ABuffer> accessUnit = new ABuffer(mMaxSampleSize);
-        CHECK_EQ((status_t)OK, mExtractor->readSampleData(accessUnit));
-
-        accessUnit->meta()->setInt64("timeUs", timeUs);
-
-        CHECK_EQ((status_t)OK, mExtractor->advance());
-
-        return accessUnit;
-    }
-
-protected:
-    virtual ~MediaPacketSource() {
-    }
-
-private:
-    sp<NuMediaExtractor> mExtractor;
-    size_t mMaxSampleSize;
-
-    DISALLOW_EVIL_CONSTRUCTORS(MediaPacketSource);
-};
-
-struct SimplePacketSource : public PacketSource {
-    SimplePacketSource()
-        : mCounter(0) {
-    }
-
-    virtual sp<ABuffer> getNextAccessUnit() {
-        sp<ABuffer> buffer = new ABuffer(4);
-        uint8_t *dst = buffer->data();
-        dst[0] = mCounter >> 24;
-        dst[1] = (mCounter >> 16) & 0xff;
-        dst[2] = (mCounter >> 8) & 0xff;
-        dst[3] = mCounter & 0xff;
-
-        buffer->meta()->setInt64("timeUs", mCounter * 1000000ll / kFrameRate);
-
-        ++mCounter;
-
-        return buffer;
-    }
-
-protected:
-    virtual ~SimplePacketSource() {
-    }
-
-private:
-    enum {
-        kFrameRate = 30
-    };
-
-    uint32_t mCounter;
-
-    DISALLOW_EVIL_CONSTRUCTORS(SimplePacketSource);
-};
-
-struct TestHandler : public AHandler {
-    TestHandler(const sp<ANetworkSession> &netSession);
-
-    void listen();
-    void connect(const char *host, int32_t port);
-
-protected:
-    virtual ~TestHandler();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatListen,
-        kWhatConnect,
-        kWhatReceiverNotify,
-        kWhatSenderNotify,
-        kWhatSendMore,
-        kWhatStop,
-        kWhatTimeSyncerNotify,
-    };
-
-#if 1
-    static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_UDP;
-    static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_UDP;
-#else
-    static const RTPBase::TransportMode kRTPMode = RTPBase::TRANSPORT_TCP;
-    static const RTPBase::TransportMode kRTCPMode = RTPBase::TRANSPORT_NONE;
-#endif
-
-#if 1
-    static const RTPBase::PacketizationMode kPacketizationMode
-        = RTPBase::PACKETIZATION_H264;
-#else
-    static const RTPBase::PacketizationMode kPacketizationMode
-        = RTPBase::PACKETIZATION_NONE;
-#endif
-
-    sp<ANetworkSession> mNetSession;
-    sp<PacketSource> mSource;
-    sp<RTPSender> mSender;
-    sp<RTPReceiver> mReceiver;
-
-    sp<TimeSyncer> mTimeSyncer;
-    bool mTimeSyncerStarted;
-
-    int64_t mFirstTimeRealUs;
-    int64_t mFirstTimeMediaUs;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    status_t readMore();
-
-    DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
-    : mNetSession(netSession),
-      mTimeSyncerStarted(false),
-      mFirstTimeRealUs(-1ll),
-      mFirstTimeMediaUs(-1ll),
-      mTimeOffsetUs(-1ll),
-      mTimeOffsetValid(false) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::listen() {
-    sp<AMessage> msg = new AMessage(kWhatListen, id());
-    msg->post();
-}
-
-void TestHandler::connect(const char *host, int32_t port) {
-    sp<AMessage> msg = new AMessage(kWhatConnect, id());
-    msg->setString("host", host);
-    msg->setInt32("port", port);
-    msg->post();
-}
-
-static void dumpDelay(int64_t delayMs) {
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    ALOGI("(%4lld ms) %s\n",
-          delayMs,
-          kPattern + kPatternSize - n);
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatListen:
-        {
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-
-            notify = new AMessage(kWhatReceiverNotify, id());
-            mReceiver = new RTPReceiver(
-                    mNetSession, notify, RTPReceiver::FLAG_AUTO_CONNECT);
-            looper()->registerHandler(mReceiver);
-
-            CHECK_EQ((status_t)OK,
-                     mReceiver->registerPacketType(33, kPacketizationMode));
-
-            int32_t receiverRTPPort;
-            CHECK_EQ((status_t)OK,
-                     mReceiver->initAsync(
-                         kRTPMode,
-                         kRTCPMode,
-                         &receiverRTPPort));
-
-            printf("picked receiverRTPPort %d\n", receiverRTPPort);
-
-#if 0
-            CHECK_EQ((status_t)OK,
-                     mReceiver->connect(
-                         "127.0.0.1", senderRTPPort, senderRTPPort + 1));
-#endif
-            break;
-        }
-
-        case kWhatConnect:
-        {
-            AString host;
-            CHECK(msg->findString("host", &host));
-
-            sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-            mTimeSyncer = new TimeSyncer(mNetSession, notify);
-            looper()->registerHandler(mTimeSyncer);
-            mTimeSyncer->startServer(8123);
-
-            int32_t receiverRTPPort;
-            CHECK(msg->findInt32("port", &receiverRTPPort));
-
-#if 1
-            mSource = new MediaPacketSource;
-#else
-            mSource = new SimplePacketSource;
-#endif
-
-            notify = new AMessage(kWhatSenderNotify, id());
-            mSender = new RTPSender(mNetSession, notify);
-
-            looper()->registerHandler(mSender);
-
-            int32_t senderRTPPort;
-            CHECK_EQ((status_t)OK,
-                     mSender->initAsync(
-                         host.c_str(),
-                         receiverRTPPort,
-                         kRTPMode,
-                         kRTCPMode == RTPBase::TRANSPORT_NONE
-                            ? -1 : receiverRTPPort + 1,
-                         kRTCPMode,
-                         &senderRTPPort));
-
-            printf("picked senderRTPPort %d\n", senderRTPPort);
-            break;
-        }
-
-        case kWhatSenderNotify:
-        {
-            ALOGI("kWhatSenderNotify");
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            switch (what) {
-                case RTPSender::kWhatInitDone:
-                {
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    ALOGI("RTPSender::initAsync completed w/ err %d", err);
-
-                    if (err == OK) {
-                        err = readMore();
-
-                        if (err != OK) {
-                            (new AMessage(kWhatStop, id()))->post();
-                        }
-                    }
-                    break;
-                }
-
-                case RTPSender::kWhatError:
-                    break;
-            }
-            break;
-        }
-
-        case kWhatReceiverNotify:
-        {
-            ALOGV("kWhatReceiverNotify");
-
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            switch (what) {
-                case RTPReceiver::kWhatInitDone:
-                {
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    ALOGI("RTPReceiver::initAsync completed w/ err %d", err);
-                    break;
-                }
-
-                case RTPReceiver::kWhatError:
-                    break;
-
-                case RTPReceiver::kWhatAccessUnit:
-                {
-#if 0
-                    if (!mTimeSyncerStarted) {
-                        mTimeSyncer->startClient("172.18.41.216", 8123);
-                        mTimeSyncerStarted = true;
-                    }
-
-                    sp<ABuffer> accessUnit;
-                    CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-                    int64_t timeUs;
-                    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-                    if (mTimeOffsetValid) {
-                        timeUs -= mTimeOffsetUs;
-                        int64_t nowUs = ALooper::GetNowUs();
-                        int64_t delayMs = (nowUs - timeUs) / 1000ll;
-
-                        dumpDelay(delayMs);
-                    }
-#endif
-                    break;
-                }
-
-                case RTPReceiver::kWhatPacketLost:
-                    ALOGV("kWhatPacketLost");
-                    break;
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatSendMore:
-        {
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            CHECK_EQ((status_t)OK,
-                     mSender->queueBuffer(
-                         accessUnit,
-                         33,
-                         kPacketizationMode));
-
-            status_t err = readMore();
-
-            if (err != OK) {
-                (new AMessage(kWhatStop, id()))->post();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            if (mReceiver != NULL) {
-                looper()->unregisterHandler(mReceiver->id());
-                mReceiver.clear();
-            }
-
-            if (mSender != NULL) {
-                looper()->unregisterHandler(mSender->id());
-                mSender.clear();
-            }
-
-            mSource.clear();
-
-            looper()->stop();
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-            mTimeOffsetValid = true;
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-status_t TestHandler::readMore() {
-    sp<ABuffer> accessUnit = mSource->getNextAccessUnit();
-
-    if (accessUnit == NULL) {
-        return ERROR_END_OF_STREAM;
-    }
-
-    int64_t timeUs;
-    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-    int64_t nowUs = ALooper::GetNowUs();
-    int64_t whenUs;
-
-    if (mFirstTimeRealUs < 0ll) {
-        mFirstTimeRealUs = whenUs = nowUs;
-        mFirstTimeMediaUs = timeUs;
-    } else {
-        whenUs = mFirstTimeRealUs + timeUs - mFirstTimeMediaUs;
-    }
-
-    accessUnit->meta()->setInt64("timeUs", whenUs);
-
-    sp<AMessage> msg = new AMessage(kWhatSendMore, id());
-    msg->setBuffer("accessUnit", accessUnit);
-    msg->post(whenUs - nowUs);
-
-    return OK;
-}
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host:port\tconnect to remote host\n"
-            "               -l       \tlisten\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    // srand(time(NULL));
-
-    ProcessState::self()->startThreadPool();
-
-    DataSource::RegisterDefaultSniffers();
-
-    bool listen = false;
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    usage(argv[0]);
-                    exit(1);
-                }
-
-                connectToHost.setTo(optarg, colonPos - optarg);
-
-                char *end;
-                connectToPort = strtol(colonPos + 1, &end, 10);
-
-                if (*end != '\0' || end == colonPos + 1
-                        || connectToPort < 1 || connectToPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                listen = true;
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (!listen && connectToPort < 0) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TestHandler> handler = new TestHandler(netSession);
-    looper->registerHandler(handler);
-
-    if (listen) {
-        handler->listen();
-    }
-
-    if (connectToPort >= 0) {
-        handler->connect(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
deleted file mode 100644
index 15f9c88..0000000
--- a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
+++ /dev/null
@@ -1,625 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DirectRenderer"
-#include <utils/Log.h>
-
-#include "DirectRenderer.h"
-
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-#include <media/AudioTrack.h>
-#include <media/ICrypto.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaCodec.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-/*
-   Drives the decoding process using a MediaCodec instance. Input buffers
-   queued by calls to "queueInputBuffer" are fed to the decoder as soon
-   as the decoder is ready for them, the client is notified about output
-   buffers as the decoder spits them out.
-*/
-struct DirectRenderer::DecoderContext : public AHandler {
-    enum {
-        kWhatOutputBufferReady,
-    };
-    DecoderContext(const sp<AMessage> &notify);
-
-    status_t init(
-            const sp<AMessage> &format,
-            const sp<IGraphicBufferProducer> &surfaceTex);
-
-    void queueInputBuffer(const sp<ABuffer> &accessUnit);
-
-    status_t renderOutputBufferAndRelease(size_t index);
-    status_t releaseOutputBuffer(size_t index);
-
-protected:
-    virtual ~DecoderContext();
-
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatDecoderNotify,
-    };
-
-    sp<AMessage> mNotify;
-    sp<ALooper> mDecoderLooper;
-    sp<MediaCodec> mDecoder;
-    Vector<sp<ABuffer> > mDecoderInputBuffers;
-    Vector<sp<ABuffer> > mDecoderOutputBuffers;
-    List<size_t> mDecoderInputBuffersAvailable;
-    bool mDecoderNotificationPending;
-
-    List<sp<ABuffer> > mAccessUnits;
-
-    void onDecoderNotify();
-    void scheduleDecoderNotification();
-    void queueDecoderInputBuffers();
-
-    void queueOutputBuffer(
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-    DISALLOW_EVIL_CONSTRUCTORS(DecoderContext);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-/*
-   A "push" audio renderer. The primary function of this renderer is to use
-   an AudioTrack in push mode and making sure not to block the event loop
-   be ensuring that calls to AudioTrack::write never block. This is done by
-   estimating an upper bound of data that can be written to the AudioTrack
-   buffer without delay.
-*/
-struct DirectRenderer::AudioRenderer : public AHandler {
-    AudioRenderer(const sp<DecoderContext> &decoderContext);
-
-    void queueInputBuffer(
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-protected:
-    virtual ~AudioRenderer();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum {
-        kWhatPushAudio,
-    };
-
-    struct BufferInfo {
-        size_t mIndex;
-        int64_t mTimeUs;
-        sp<ABuffer> mBuffer;
-    };
-
-    sp<DecoderContext> mDecoderContext;
-    sp<AudioTrack> mAudioTrack;
-
-    List<BufferInfo> mInputBuffers;
-    bool mPushPending;
-
-    size_t mNumFramesWritten;
-
-    void schedulePushIfNecessary();
-    void onPushAudio();
-
-    ssize_t writeNonBlocking(const uint8_t *data, size_t size);
-
-    DISALLOW_EVIL_CONSTRUCTORS(AudioRenderer);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::DecoderContext::DecoderContext(const sp<AMessage> &notify)
-    : mNotify(notify),
-      mDecoderNotificationPending(false) {
-}
-
-DirectRenderer::DecoderContext::~DecoderContext() {
-    if (mDecoder != NULL) {
-        mDecoder->release();
-        mDecoder.clear();
-
-        mDecoderLooper->stop();
-        mDecoderLooper.clear();
-    }
-}
-
-status_t DirectRenderer::DecoderContext::init(
-        const sp<AMessage> &format,
-        const sp<IGraphicBufferProducer> &surfaceTex) {
-    CHECK(mDecoder == NULL);
-
-    AString mime;
-    CHECK(format->findString("mime", &mime));
-
-    mDecoderLooper = new ALooper;
-    mDecoderLooper->setName("video codec looper");
-
-    mDecoderLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_DEFAULT);
-
-    mDecoder = MediaCodec::CreateByType(
-            mDecoderLooper, mime.c_str(), false /* encoder */);
-
-    CHECK(mDecoder != NULL);
-
-    status_t err = mDecoder->configure(
-            format,
-            surfaceTex == NULL
-                ? NULL : new Surface(surfaceTex),
-            NULL /* crypto */,
-            0 /* flags */);
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->start();
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->getInputBuffers(
-            &mDecoderInputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    err = mDecoder->getOutputBuffers(
-            &mDecoderOutputBuffers);
-    CHECK_EQ(err, (status_t)OK);
-
-    scheduleDecoderNotification();
-
-    return OK;
-}
-
-void DirectRenderer::DecoderContext::queueInputBuffer(
-        const sp<ABuffer> &accessUnit) {
-    CHECK(mDecoder != NULL);
-
-    mAccessUnits.push_back(accessUnit);
-    queueDecoderInputBuffers();
-}
-
-status_t DirectRenderer::DecoderContext::renderOutputBufferAndRelease(
-        size_t index) {
-    return mDecoder->renderOutputBufferAndRelease(index);
-}
-
-status_t DirectRenderer::DecoderContext::releaseOutputBuffer(size_t index) {
-    return mDecoder->releaseOutputBuffer(index);
-}
-
-void DirectRenderer::DecoderContext::queueDecoderInputBuffers() {
-    if (mDecoder == NULL) {
-        return;
-    }
-
-    bool submittedMore = false;
-
-    while (!mAccessUnits.empty()
-            && !mDecoderInputBuffersAvailable.empty()) {
-        size_t index = *mDecoderInputBuffersAvailable.begin();
-
-        mDecoderInputBuffersAvailable.erase(
-                mDecoderInputBuffersAvailable.begin());
-
-        sp<ABuffer> srcBuffer = *mAccessUnits.begin();
-        mAccessUnits.erase(mAccessUnits.begin());
-
-        const sp<ABuffer> &dstBuffer =
-            mDecoderInputBuffers.itemAt(index);
-
-        memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());
-
-        int64_t timeUs;
-        CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
-
-        status_t err = mDecoder->queueInputBuffer(
-                index,
-                0 /* offset */,
-                srcBuffer->size(),
-                timeUs,
-                0 /* flags */);
-        CHECK_EQ(err, (status_t)OK);
-
-        submittedMore = true;
-    }
-
-    if (submittedMore) {
-        scheduleDecoderNotification();
-    }
-}
-
-void DirectRenderer::DecoderContext::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatDecoderNotify:
-        {
-            onDecoderNotify();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::DecoderContext::onDecoderNotify() {
-    mDecoderNotificationPending = false;
-
-    for (;;) {
-        size_t index;
-        status_t err = mDecoder->dequeueInputBuffer(&index);
-
-        if (err == OK) {
-            mDecoderInputBuffersAvailable.push_back(index);
-        } else if (err == -EAGAIN) {
-            break;
-        } else {
-            TRESPASS();
-        }
-    }
-
-    queueDecoderInputBuffers();
-
-    for (;;) {
-        size_t index;
-        size_t offset;
-        size_t size;
-        int64_t timeUs;
-        uint32_t flags;
-        status_t err = mDecoder->dequeueOutputBuffer(
-                &index,
-                &offset,
-                &size,
-                &timeUs,
-                &flags);
-
-        if (err == OK) {
-            queueOutputBuffer(
-                    index, timeUs, mDecoderOutputBuffers.itemAt(index));
-        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
-            err = mDecoder->getOutputBuffers(
-                    &mDecoderOutputBuffers);
-            CHECK_EQ(err, (status_t)OK);
-        } else if (err == INFO_FORMAT_CHANGED) {
-            // We don't care.
-        } else if (err == -EAGAIN) {
-            break;
-        } else {
-            TRESPASS();
-        }
-    }
-
-    scheduleDecoderNotification();
-}
-
-void DirectRenderer::DecoderContext::scheduleDecoderNotification() {
-    if (mDecoderNotificationPending) {
-        return;
-    }
-
-    sp<AMessage> notify =
-        new AMessage(kWhatDecoderNotify, id());
-
-    mDecoder->requestActivityNotification(notify);
-    mDecoderNotificationPending = true;
-}
-
-void DirectRenderer::DecoderContext::queueOutputBuffer(
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    sp<AMessage> msg = mNotify->dup();
-    msg->setInt32("what", kWhatOutputBufferReady);
-    msg->setSize("index", index);
-    msg->setInt64("timeUs", timeUs);
-    msg->setBuffer("buffer", buffer);
-    msg->post();
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::AudioRenderer::AudioRenderer(
-        const sp<DecoderContext> &decoderContext)
-    : mDecoderContext(decoderContext),
-      mPushPending(false),
-      mNumFramesWritten(0) {
-    mAudioTrack = new AudioTrack(
-            AUDIO_STREAM_DEFAULT,
-            48000.0f,
-            AUDIO_FORMAT_PCM,
-            AUDIO_CHANNEL_OUT_STEREO,
-            (int)0 /* frameCount */);
-
-    CHECK_EQ((status_t)OK, mAudioTrack->initCheck());
-
-    mAudioTrack->start();
-}
-
-DirectRenderer::AudioRenderer::~AudioRenderer() {
-}
-
-void DirectRenderer::AudioRenderer::queueInputBuffer(
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    BufferInfo info;
-    info.mIndex = index;
-    info.mTimeUs = timeUs;
-    info.mBuffer = buffer;
-
-    mInputBuffers.push_back(info);
-    schedulePushIfNecessary();
-}
-
-void DirectRenderer::AudioRenderer::onMessageReceived(
-        const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatPushAudio:
-        {
-            onPushAudio();
-            break;
-        }
-
-        default:
-            break;
-    }
-}
-
-void DirectRenderer::AudioRenderer::schedulePushIfNecessary() {
-    if (mPushPending || mInputBuffers.empty()) {
-        return;
-    }
-
-    mPushPending = true;
-
-    uint32_t numFramesPlayed;
-    CHECK_EQ(mAudioTrack->getPosition(&numFramesPlayed),
-             (status_t)OK);
-
-    uint32_t numFramesPendingPlayout = mNumFramesWritten - numFramesPlayed;
-
-    // This is how long the audio sink will have data to
-    // play back.
-    const float msecsPerFrame = 1000.0f / mAudioTrack->getSampleRate();
-
-    int64_t delayUs =
-        msecsPerFrame * numFramesPendingPlayout * 1000ll;
-
-    // Let's give it more data after about half that time
-    // has elapsed.
-    (new AMessage(kWhatPushAudio, id()))->post(delayUs / 2);
-}
-
-void DirectRenderer::AudioRenderer::onPushAudio() {
-    mPushPending = false;
-
-    while (!mInputBuffers.empty()) {
-        const BufferInfo &info = *mInputBuffers.begin();
-
-        ssize_t n = writeNonBlocking(
-                info.mBuffer->data(), info.mBuffer->size());
-
-        if (n < (ssize_t)info.mBuffer->size()) {
-            CHECK_GE(n, 0);
-
-            info.mBuffer->setRange(
-                    info.mBuffer->offset() + n, info.mBuffer->size() - n);
-            break;
-        }
-
-        mDecoderContext->releaseOutputBuffer(info.mIndex);
-
-        mInputBuffers.erase(mInputBuffers.begin());
-    }
-
-    schedulePushIfNecessary();
-}
-
-ssize_t DirectRenderer::AudioRenderer::writeNonBlocking(
-        const uint8_t *data, size_t size) {
-    uint32_t numFramesPlayed;
-    status_t err = mAudioTrack->getPosition(&numFramesPlayed);
-    if (err != OK) {
-        return err;
-    }
-
-    ssize_t numFramesAvailableToWrite =
-        mAudioTrack->frameCount() - (mNumFramesWritten - numFramesPlayed);
-
-    size_t numBytesAvailableToWrite =
-        numFramesAvailableToWrite * mAudioTrack->frameSize();
-
-    if (size > numBytesAvailableToWrite) {
-        size = numBytesAvailableToWrite;
-    }
-
-    CHECK_EQ(mAudioTrack->write(data, size), (ssize_t)size);
-
-    size_t numFramesWritten = size / mAudioTrack->frameSize();
-    mNumFramesWritten += numFramesWritten;
-
-    return size;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DirectRenderer::DirectRenderer(
-        const sp<IGraphicBufferProducer> &bufferProducer)
-    : mSurfaceTex(bufferProducer),
-      mVideoRenderPending(false),
-      mNumFramesLate(0),
-      mNumFrames(0) {
-}
-
-DirectRenderer::~DirectRenderer() {
-}
-
-void DirectRenderer::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatDecoderNotify:
-        {
-            onDecoderNotify(msg);
-            break;
-        }
-
-        case kWhatRenderVideo:
-        {
-            onRenderVideo();
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::setFormat(size_t trackIndex, const sp<AMessage> &format) {
-    CHECK_LT(trackIndex, 2u);
-
-    CHECK(mDecoderContext[trackIndex] == NULL);
-
-    sp<AMessage> notify = new AMessage(kWhatDecoderNotify, id());
-    notify->setSize("trackIndex", trackIndex);
-
-    mDecoderContext[trackIndex] = new DecoderContext(notify);
-    looper()->registerHandler(mDecoderContext[trackIndex]);
-
-    CHECK_EQ((status_t)OK,
-             mDecoderContext[trackIndex]->init(
-                 format, trackIndex == 0 ? mSurfaceTex : NULL));
-
-    if (trackIndex == 1) {
-        // Audio
-        mAudioRenderer = new AudioRenderer(mDecoderContext[1]);
-        looper()->registerHandler(mAudioRenderer);
-    }
-}
-
-void DirectRenderer::queueAccessUnit(
-        size_t trackIndex, const sp<ABuffer> &accessUnit) {
-    CHECK_LT(trackIndex, 2u);
-
-    if (mDecoderContext[trackIndex] == NULL) {
-        CHECK_EQ(trackIndex, 0u);
-
-        sp<AMessage> format = new AMessage;
-        format->setString("mime", "video/avc");
-        format->setInt32("width", 640);
-        format->setInt32("height", 360);
-
-        setFormat(trackIndex, format);
-    }
-
-    mDecoderContext[trackIndex]->queueInputBuffer(accessUnit);
-}
-
-void DirectRenderer::onDecoderNotify(const sp<AMessage> &msg) {
-    size_t trackIndex;
-    CHECK(msg->findSize("trackIndex", &trackIndex));
-
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case DecoderContext::kWhatOutputBufferReady:
-        {
-            size_t index;
-            CHECK(msg->findSize("index", &index));
-
-            int64_t timeUs;
-            CHECK(msg->findInt64("timeUs", &timeUs));
-
-            sp<ABuffer> buffer;
-            CHECK(msg->findBuffer("buffer", &buffer));
-
-            queueOutputBuffer(trackIndex, index, timeUs, buffer);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void DirectRenderer::queueOutputBuffer(
-        size_t trackIndex,
-        size_t index, int64_t timeUs, const sp<ABuffer> &buffer) {
-    if (trackIndex == 1) {
-        // Audio
-        mAudioRenderer->queueInputBuffer(index, timeUs, buffer);
-        return;
-    }
-
-    OutputInfo info;
-    info.mIndex = index;
-    info.mTimeUs = timeUs;
-    info.mBuffer = buffer;
-    mVideoOutputBuffers.push_back(info);
-
-    scheduleVideoRenderIfNecessary();
-}
-
-void DirectRenderer::scheduleVideoRenderIfNecessary() {
-    if (mVideoRenderPending || mVideoOutputBuffers.empty()) {
-        return;
-    }
-
-    mVideoRenderPending = true;
-
-    int64_t timeUs = (*mVideoOutputBuffers.begin()).mTimeUs;
-    int64_t nowUs = ALooper::GetNowUs();
-
-    int64_t delayUs = timeUs - nowUs;
-
-    (new AMessage(kWhatRenderVideo, id()))->post(delayUs);
-}
-
-void DirectRenderer::onRenderVideo() {
-    mVideoRenderPending = false;
-
-    int64_t nowUs = ALooper::GetNowUs();
-
-    while (!mVideoOutputBuffers.empty()) {
-        const OutputInfo &info = *mVideoOutputBuffers.begin();
-
-        if (info.mTimeUs > nowUs) {
-            break;
-        }
-
-        if (info.mTimeUs + 15000ll < nowUs) {
-            ++mNumFramesLate;
-        }
-        ++mNumFrames;
-
-        status_t err =
-            mDecoderContext[0]->renderOutputBufferAndRelease(info.mIndex);
-        CHECK_EQ(err, (status_t)OK);
-
-        mVideoOutputBuffers.erase(mVideoOutputBuffers.begin());
-    }
-
-    scheduleVideoRenderIfNecessary();
-}
-
-}  // namespace android
-
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h
deleted file mode 100644
index c5a4a83..0000000
--- a/media/libstagefright/wifi-display/sink/DirectRenderer.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DIRECT_RENDERER_H_
-
-#define DIRECT_RENDERER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct AudioTrack;
-struct IGraphicBufferProducer;
-struct MediaCodec;
-
-// Renders audio and video data queued by calls to "queueAccessUnit".
-struct DirectRenderer : public AHandler {
-    DirectRenderer(const sp<IGraphicBufferProducer> &bufferProducer);
-
-    void setFormat(size_t trackIndex, const sp<AMessage> &format);
-    void queueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
-
-protected:
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-    virtual ~DirectRenderer();
-
-private:
-    struct DecoderContext;
-    struct AudioRenderer;
-
-    enum {
-        kWhatDecoderNotify,
-        kWhatRenderVideo,
-    };
-
-    struct OutputInfo {
-        size_t mIndex;
-        int64_t mTimeUs;
-        sp<ABuffer> mBuffer;
-    };
-
-    sp<IGraphicBufferProducer> mSurfaceTex;
-
-    sp<DecoderContext> mDecoderContext[2];
-    List<OutputInfo> mVideoOutputBuffers;
-
-    bool mVideoRenderPending;
-
-    sp<AudioRenderer> mAudioRenderer;
-
-    int32_t mNumFramesLate;
-    int32_t mNumFrames;
-
-    void onDecoderNotify(const sp<AMessage> &msg);
-
-    void queueOutputBuffer(
-            size_t trackIndex,
-            size_t index, int64_t timeUs, const sp<ABuffer> &buffer);
-
-    void scheduleVideoRenderIfNecessary();
-    void onRenderVideo();
-
-    DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer);
-};
-
-}  // namespace android
-
-#endif  // DIRECT_RENDERER_H_
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
deleted file mode 100644
index 5db2099..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
+++ /dev/null
@@ -1,917 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "WifiDisplaySink"
-#include <utils/Log.h>
-
-#include "WifiDisplaySink.h"
-
-#include "DirectRenderer.h"
-#include "MediaReceiver.h"
-#include "ParsedMessage.h"
-#include "TimeSyncer.h"
-
-#include <cutils/properties.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-// static
-const AString WifiDisplaySink::sUserAgent = MakeUserAgent();
-
-WifiDisplaySink::WifiDisplaySink(
-        uint32_t flags,
-        const sp<ANetworkSession> &netSession,
-        const sp<IGraphicBufferProducer> &bufferProducer,
-        const sp<AMessage> &notify)
-    : mState(UNDEFINED),
-      mFlags(flags),
-      mNetSession(netSession),
-      mSurfaceTex(bufferProducer),
-      mNotify(notify),
-      mUsingTCPTransport(false),
-      mUsingTCPInterleaving(false),
-      mSessionID(0),
-      mNextCSeq(1),
-      mIDRFrameRequestPending(false),
-      mTimeOffsetUs(0ll),
-      mTimeOffsetValid(false),
-      mSetupDeferred(false),
-      mLatencyCount(0),
-      mLatencySumUs(0ll),
-      mLatencyMaxUs(0ll),
-      mMaxDelayMs(-1ll) {
-    // We support any and all resolutions, but prefer 720p30
-    mSinkSupportedVideoFormats.setNativeResolution(
-            VideoFormats::RESOLUTION_CEA, 5);  // 1280 x 720 p30
-
-    mSinkSupportedVideoFormats.enableAll();
-}
-
-WifiDisplaySink::~WifiDisplaySink() {
-}
-
-void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
-    msg->setString("sourceHost", sourceHost);
-    msg->setInt32("sourcePort", sourcePort);
-    msg->post();
-}
-
-void WifiDisplaySink::start(const char *uri) {
-    sp<AMessage> msg = new AMessage(kWhatStart, id());
-    msg->setString("setupURI", uri);
-    msg->post();
-}
-
-// static
-bool WifiDisplaySink::ParseURL(
-        const char *url, AString *host, int32_t *port, AString *path,
-        AString *user, AString *pass) {
-    host->clear();
-    *port = 0;
-    path->clear();
-    user->clear();
-    pass->clear();
-
-    if (strncasecmp("rtsp://", url, 7)) {
-        return false;
-    }
-
-    const char *slashPos = strchr(&url[7], '/');
-
-    if (slashPos == NULL) {
-        host->setTo(&url[7]);
-        path->setTo("/");
-    } else {
-        host->setTo(&url[7], slashPos - &url[7]);
-        path->setTo(slashPos);
-    }
-
-    ssize_t atPos = host->find("@");
-
-    if (atPos >= 0) {
-        // Split of user:pass@ from hostname.
-
-        AString userPass(*host, 0, atPos);
-        host->erase(0, atPos + 1);
-
-        ssize_t colonPos = userPass.find(":");
-
-        if (colonPos < 0) {
-            *user = userPass;
-        } else {
-            user->setTo(userPass, 0, colonPos);
-            pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
-        }
-    }
-
-    const char *colonPos = strchr(host->c_str(), ':');
-
-    if (colonPos != NULL) {
-        char *end;
-        unsigned long x = strtoul(colonPos + 1, &end, 10);
-
-        if (end == colonPos + 1 || *end != '\0' || x >= 65536) {
-            return false;
-        }
-
-        *port = x;
-
-        size_t colonOffset = colonPos - host->c_str();
-        size_t trailing = host->size() - colonOffset;
-        host->erase(colonOffset, trailing);
-    } else {
-        *port = 554;
-    }
-
-    return true;
-}
-
-void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatStart:
-        {
-            sleep(2);  // XXX
-
-            int32_t sourcePort;
-            CHECK(msg->findString("sourceHost", &mRTSPHost));
-            CHECK(msg->findInt32("sourcePort", &sourcePort));
-
-            sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id());
-
-            status_t err = mNetSession->createRTSPClient(
-                    mRTSPHost.c_str(), sourcePort, notify, &mSessionID);
-            CHECK_EQ(err, (status_t)OK);
-
-            mState = CONNECTING;
-            break;
-        }
-
-        case kWhatRTSPNotify:
-        {
-            int32_t reason;
-            CHECK(msg->findInt32("reason", &reason));
-
-            switch (reason) {
-                case ANetworkSession::kWhatError:
-                {
-                    int32_t sessionID;
-                    CHECK(msg->findInt32("sessionID", &sessionID));
-
-                    int32_t err;
-                    CHECK(msg->findInt32("err", &err));
-
-                    AString detail;
-                    CHECK(msg->findString("detail", &detail));
-
-                    ALOGE("An error occurred in session %d (%d, '%s/%s').",
-                          sessionID,
-                          err,
-                          detail.c_str(),
-                          strerror(-err));
-
-                    if (sessionID == mSessionID) {
-                        ALOGI("Lost control connection.");
-
-                        // The control connection is dead now.
-                        mNetSession->destroySession(mSessionID);
-                        mSessionID = 0;
-
-                        if (mNotify == NULL) {
-                            looper()->stop();
-                        } else {
-                            sp<AMessage> notify = mNotify->dup();
-                            notify->setInt32("what", kWhatDisconnected);
-                            notify->post();
-                        }
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatConnected:
-                {
-                    ALOGI("We're now connected.");
-                    mState = CONNECTED;
-
-                    if (mFlags & FLAG_SPECIAL_MODE) {
-                        sp<AMessage> notify = new AMessage(
-                                kWhatTimeSyncerNotify, id());
-
-                        mTimeSyncer = new TimeSyncer(mNetSession, notify);
-                        looper()->registerHandler(mTimeSyncer);
-
-                        mTimeSyncer->startClient(mRTSPHost.c_str(), 8123);
-                    }
-                    break;
-                }
-
-                case ANetworkSession::kWhatData:
-                {
-                    onReceiveClientData(msg);
-                    break;
-                }
-
-                default:
-                    TRESPASS();
-            }
-            break;
-        }
-
-        case kWhatStop:
-        {
-            looper()->stop();
-            break;
-        }
-
-        case kWhatMediaReceiverNotify:
-        {
-            onMediaReceiverNotify(msg);
-            break;
-        }
-
-        case kWhatTimeSyncerNotify:
-        {
-            int32_t what;
-            CHECK(msg->findInt32("what", &what));
-
-            if (what == TimeSyncer::kWhatTimeOffset) {
-                CHECK(msg->findInt64("offset", &mTimeOffsetUs));
-                mTimeOffsetValid = true;
-
-                if (mSetupDeferred) {
-                    CHECK_EQ((status_t)OK,
-                             sendSetup(
-                                mSessionID,
-                                "rtsp://x.x.x.x:x/wfd1.0/streamid=0"));
-
-                    mSetupDeferred = false;
-                }
-            }
-            break;
-        }
-
-        case kWhatReportLateness:
-        {
-            if (mLatencyCount > 0) {
-                int64_t avgLatencyUs = mLatencySumUs / mLatencyCount;
-
-                ALOGV("avg. latency = %lld ms (max %lld ms)",
-                      avgLatencyUs / 1000ll,
-                      mLatencyMaxUs / 1000ll);
-
-                sp<AMessage> params = new AMessage;
-                params->setInt64("avgLatencyUs", avgLatencyUs);
-                params->setInt64("maxLatencyUs", mLatencyMaxUs);
-                mMediaReceiver->informSender(0 /* trackIndex */, params);
-            }
-
-            mLatencyCount = 0;
-            mLatencySumUs = 0ll;
-            mLatencyMaxUs = 0ll;
-
-            msg->post(kReportLatenessEveryUs);
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySink::dumpDelay(size_t trackIndex, int64_t timeUs) {
-    int64_t delayMs = (ALooper::GetNowUs() - timeUs) / 1000ll;
-
-    if (delayMs > mMaxDelayMs) {
-        mMaxDelayMs = delayMs;
-    }
-
-    static const int64_t kMinDelayMs = 0;
-    static const int64_t kMaxDelayMs = 300;
-
-    const char *kPattern = "########################################";
-    size_t kPatternSize = strlen(kPattern);
-
-    int n = (kPatternSize * (delayMs - kMinDelayMs))
-                / (kMaxDelayMs - kMinDelayMs);
-
-    if (n < 0) {
-        n = 0;
-    } else if ((size_t)n > kPatternSize) {
-        n = kPatternSize;
-    }
-
-    ALOGI("[%lld]: (%4lld ms / %4lld ms) %s",
-          timeUs / 1000,
-          delayMs,
-          mMaxDelayMs,
-          kPattern + kPatternSize - n);
-}
-
-void WifiDisplaySink::onMediaReceiverNotify(const sp<AMessage> &msg) {
-    int32_t what;
-    CHECK(msg->findInt32("what", &what));
-
-    switch (what) {
-        case MediaReceiver::kWhatInitDone:
-        {
-            status_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            ALOGI("MediaReceiver initialization completed w/ err %d", err);
-            break;
-        }
-
-        case MediaReceiver::kWhatError:
-        {
-            status_t err;
-            CHECK(msg->findInt32("err", &err));
-
-            ALOGE("MediaReceiver signaled error %d", err);
-            break;
-        }
-
-        case MediaReceiver::kWhatAccessUnit:
-        {
-            if (mRenderer == NULL) {
-                mRenderer = new DirectRenderer(mSurfaceTex);
-                looper()->registerHandler(mRenderer);
-            }
-
-            sp<ABuffer> accessUnit;
-            CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-            int64_t timeUs;
-            CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
-
-            if (!mTimeOffsetValid && !(mFlags & FLAG_SPECIAL_MODE)) {
-                mTimeOffsetUs = timeUs - ALooper::GetNowUs();
-                mTimeOffsetValid = true;
-            }
-
-            CHECK(mTimeOffsetValid);
-
-            // We are the timesync _client_,
-            // client time = server time - time offset.
-            timeUs -= mTimeOffsetUs;
-
-            size_t trackIndex;
-            CHECK(msg->findSize("trackIndex", &trackIndex));
-
-            int64_t nowUs = ALooper::GetNowUs();
-            int64_t delayUs = nowUs - timeUs;
-
-            mLatencySumUs += delayUs;
-            if (mLatencyCount == 0 || delayUs > mLatencyMaxUs) {
-                mLatencyMaxUs = delayUs;
-            }
-            ++mLatencyCount;
-
-            // dumpDelay(trackIndex, timeUs);
-
-            timeUs += 220000ll;  // Assume 220 ms of latency
-            accessUnit->meta()->setInt64("timeUs", timeUs);
-
-            sp<AMessage> format;
-            if (msg->findMessage("format", &format)) {
-                mRenderer->setFormat(trackIndex, format);
-            }
-
-            mRenderer->queueAccessUnit(trackIndex, accessUnit);
-            break;
-        }
-
-        case MediaReceiver::kWhatPacketLost:
-        {
-#if 0
-            if (!mIDRFrameRequestPending) {
-                ALOGI("requesting IDR frame");
-
-                sendIDRFrameRequest(mSessionID);
-            }
-#endif
-            break;
-        }
-
-        default:
-            TRESPASS();
-    }
-}
-
-void WifiDisplaySink::registerResponseHandler(
-        int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
-    ResponseID id;
-    id.mSessionID = sessionID;
-    id.mCSeq = cseq;
-    mResponseHandlers.add(id, func);
-}
-
-status_t WifiDisplaySink::sendM2(int32_t sessionID) {
-    AString request = "OPTIONS * RTSP/1.0\r\n";
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(
-            "Require: org.wfa.wfd1.0\r\n"
-            "\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveM2Response(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveSetupResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    if (!msg->findString("session", &mPlaybackSessionID)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (!ParsedMessage::GetInt32Attribute(
-                mPlaybackSessionID.c_str(),
-                "timeout",
-                &mPlaybackSessionTimeoutSecs)) {
-        mPlaybackSessionTimeoutSecs = -1;
-    }
-
-    ssize_t colonPos = mPlaybackSessionID.find(";");
-    if (colonPos >= 0) {
-        // Strip any options from the returned session id.
-        mPlaybackSessionID.erase(
-                colonPos, mPlaybackSessionID.size() - colonPos);
-    }
-
-    status_t err = configureTransport(msg);
-
-    if (err != OK) {
-        return err;
-    }
-
-    mState = PAUSED;
-
-    return sendPlay(
-            sessionID,
-            "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-}
-
-status_t WifiDisplaySink::configureTransport(const sp<ParsedMessage> &msg) {
-    if (mUsingTCPTransport && !(mFlags & FLAG_SPECIAL_MODE)) {
-        // In "special" mode we still use a UDP RTCP back-channel that
-        // needs connecting.
-        return OK;
-    }
-
-    AString transport;
-    if (!msg->findString("transport", &transport)) {
-        ALOGE("Missing 'transport' field in SETUP response.");
-        return ERROR_MALFORMED;
-    }
-
-    AString sourceHost;
-    if (!ParsedMessage::GetAttribute(
-                transport.c_str(), "source", &sourceHost)) {
-        sourceHost = mRTSPHost;
-    }
-
-    AString serverPortStr;
-    if (!ParsedMessage::GetAttribute(
-                transport.c_str(), "server_port", &serverPortStr)) {
-        ALOGE("Missing 'server_port' in Transport field.");
-        return ERROR_MALFORMED;
-    }
-
-    int rtpPort, rtcpPort;
-    if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2
-            || rtpPort <= 0 || rtpPort > 65535
-            || rtcpPort <=0 || rtcpPort > 65535
-            || rtcpPort != rtpPort + 1) {
-        ALOGE("Invalid server_port description '%s'.",
-                serverPortStr.c_str());
-
-        return ERROR_MALFORMED;
-    }
-
-    if (rtpPort & 1) {
-        ALOGW("Server picked an odd numbered RTP port.");
-    }
-
-    return mMediaReceiver->connectTrack(
-            0 /* trackIndex */, sourceHost.c_str(), rtpPort, rtcpPort);
-}
-
-status_t WifiDisplaySink::onReceivePlayResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    int32_t statusCode;
-    if (!msg->getStatusCode(&statusCode)) {
-        return ERROR_MALFORMED;
-    }
-
-    if (statusCode != 200) {
-        return ERROR_UNSUPPORTED;
-    }
-
-    mState = PLAYING;
-
-    (new AMessage(kWhatReportLateness, id()))->post(kReportLatenessEveryUs);
-
-    return OK;
-}
-
-status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse(
-        int32_t sessionID, const sp<ParsedMessage> &msg) {
-    CHECK(mIDRFrameRequestPending);
-    mIDRFrameRequestPending = false;
-
-    return OK;
-}
-
-void WifiDisplaySink::onReceiveClientData(const sp<AMessage> &msg) {
-    int32_t sessionID;
-    CHECK(msg->findInt32("sessionID", &sessionID));
-
-    sp<RefBase> obj;
-    CHECK(msg->findObject("data", &obj));
-
-    sp<ParsedMessage> data =
-        static_cast<ParsedMessage *>(obj.get());
-
-    ALOGV("session %d received '%s'",
-          sessionID, data->debugString().c_str());
-
-    AString method;
-    AString uri;
-    data->getRequestField(0, &method);
-
-    int32_t cseq;
-    if (!data->findInt32("cseq", &cseq)) {
-        sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
-        return;
-    }
-
-    if (method.startsWith("RTSP/")) {
-        // This is a response.
-
-        ResponseID id;
-        id.mSessionID = sessionID;
-        id.mCSeq = cseq;
-
-        ssize_t index = mResponseHandlers.indexOfKey(id);
-
-        if (index < 0) {
-            ALOGW("Received unsolicited server response, cseq %d", cseq);
-            return;
-        }
-
-        HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
-        mResponseHandlers.removeItemsAt(index);
-
-        status_t err = (this->*func)(sessionID, data);
-        CHECK_EQ(err, (status_t)OK);
-    } else {
-        AString version;
-        data->getRequestField(2, &version);
-        if (!(version == AString("RTSP/1.0"))) {
-            sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
-            return;
-        }
-
-        if (method == "OPTIONS") {
-            onOptionsRequest(sessionID, cseq, data);
-        } else if (method == "GET_PARAMETER") {
-            onGetParameterRequest(sessionID, cseq, data);
-        } else if (method == "SET_PARAMETER") {
-            onSetParameterRequest(sessionID, cseq, data);
-        } else {
-            sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
-        }
-    }
-}
-
-void WifiDisplaySink::onOptionsRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n");
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-
-    err = sendM2(sessionID);
-    CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::onGetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    AString body;
-
-    if (mState == CONNECTED) {
-        mUsingTCPTransport = false;
-        mUsingTCPInterleaving = false;
-
-        char val[PROPERTY_VALUE_MAX];
-        if (property_get("media.wfd-sink.tcp-mode", val, NULL)) {
-            if (!strcasecmp("true", val) || !strcmp("1", val)) {
-                ALOGI("Using TCP unicast transport.");
-                mUsingTCPTransport = true;
-                mUsingTCPInterleaving = false;
-            } else if (!strcasecmp("interleaved", val)) {
-                ALOGI("Using TCP interleaved transport.");
-                mUsingTCPTransport = true;
-                mUsingTCPInterleaving = true;
-            }
-        } else if (mFlags & FLAG_SPECIAL_MODE) {
-            mUsingTCPTransport = true;
-        }
-
-        body = "wfd_video_formats: ";
-        body.append(mSinkSupportedVideoFormats.getFormatSpec());
-
-        body.append(
-                "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n"
-                "wfd_client_rtp_ports: RTP/AVP/");
-
-        if (mUsingTCPTransport) {
-            body.append("TCP;");
-            if (mUsingTCPInterleaving) {
-                body.append("interleaved");
-            } else {
-                body.append("unicast 19000 0");
-            }
-        } else {
-            body.append("UDP;unicast 19000 0");
-        }
-
-        body.append(" mode=play\r\n");
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("Content-Type: text/parameters\r\n");
-    response.append(StringPrintf("Content-Length: %d\r\n", body.size()));
-    response.append("\r\n");
-    response.append(body);
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) {
-    sp<AMessage> notify = new AMessage(kWhatMediaReceiverNotify, id());
-
-    mMediaReceiverLooper = new ALooper;
-    mMediaReceiverLooper->setName("media_receiver");
-
-    mMediaReceiverLooper->start(
-            false /* runOnCallingThread */,
-            false /* canCallJava */,
-            PRIORITY_AUDIO);
-
-    mMediaReceiver = new MediaReceiver(mNetSession, notify);
-    mMediaReceiverLooper->registerHandler(mMediaReceiver);
-
-    RTPReceiver::TransportMode rtpMode = RTPReceiver::TRANSPORT_UDP;
-    if (mUsingTCPTransport) {
-        if (mUsingTCPInterleaving) {
-            rtpMode = RTPReceiver::TRANSPORT_TCP_INTERLEAVED;
-        } else {
-            rtpMode = RTPReceiver::TRANSPORT_TCP;
-        }
-    }
-
-    int32_t localRTPPort;
-    status_t err = mMediaReceiver->addTrack(
-            rtpMode, RTPReceiver::TRANSPORT_UDP /* rtcpMode */, &localRTPPort);
-
-    if (err == OK) {
-        err = mMediaReceiver->initAsync(MediaReceiver::MODE_TRANSPORT_STREAM);
-    }
-
-    if (err != OK) {
-        mMediaReceiverLooper->unregisterHandler(mMediaReceiver->id());
-        mMediaReceiver.clear();
-
-        mMediaReceiverLooper->stop();
-        mMediaReceiverLooper.clear();
-
-        return err;
-    }
-
-    AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri);
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    if (rtpMode == RTPReceiver::TRANSPORT_TCP_INTERLEAVED) {
-        request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n");
-    } else if (rtpMode == RTPReceiver::TRANSPORT_TCP) {
-        if (mFlags & FLAG_SPECIAL_MODE) {
-            // This isn't quite true, since the RTP connection is through TCP
-            // and the RTCP connection through UDP...
-            request.append(
-                    StringPrintf(
-                        "Transport: RTP/AVP/TCP;unicast;client_port=%d-%d\r\n",
-                        localRTPPort, localRTPPort + 1));
-        } else {
-            request.append(
-                    StringPrintf(
-                        "Transport: RTP/AVP/TCP;unicast;client_port=%d\r\n",
-                        localRTPPort));
-        }
-    } else {
-        request.append(
-                StringPrintf(
-                    "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n",
-                    localRTPPort,
-                    localRTPPort + 1));
-    }
-
-    request.append("\r\n");
-
-    ALOGV("request = '%s'", request.c_str());
-
-    err = mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) {
-    AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri);
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
-    request.append("\r\n");
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse);
-
-    ++mNextCSeq;
-
-    return OK;
-}
-
-status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) {
-    CHECK(!mIDRFrameRequestPending);
-
-    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
-
-    AppendCommonResponse(&request, mNextCSeq);
-
-    AString content = "wfd_idr_request\r\n";
-
-    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
-    request.append(StringPrintf("Content-Length: %d\r\n", content.size()));
-    request.append("\r\n");
-    request.append(content);
-
-    status_t err =
-        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
-    if (err != OK) {
-        return err;
-    }
-
-    registerResponseHandler(
-            sessionID,
-            mNextCSeq,
-            &WifiDisplaySink::onReceiveIDRFrameRequestResponse);
-
-    ++mNextCSeq;
-
-    mIDRFrameRequestPending = true;
-
-    return OK;
-}
-
-void WifiDisplaySink::onSetParameterRequest(
-        int32_t sessionID,
-        int32_t cseq,
-        const sp<ParsedMessage> &data) {
-    const char *content = data->getContent();
-
-    if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) {
-        if ((mFlags & FLAG_SPECIAL_MODE) && !mTimeOffsetValid) {
-            mSetupDeferred = true;
-        } else {
-            status_t err =
-                sendSetup(
-                        sessionID,
-                        "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-
-            CHECK_EQ(err, (status_t)OK);
-        }
-    }
-
-    AString response = "RTSP/1.0 200 OK\r\n";
-    AppendCommonResponse(&response, cseq);
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::sendErrorResponse(
-        int32_t sessionID,
-        const char *errorDetail,
-        int32_t cseq) {
-    AString response;
-    response.append("RTSP/1.0 ");
-    response.append(errorDetail);
-    response.append("\r\n");
-
-    AppendCommonResponse(&response, cseq);
-
-    response.append("\r\n");
-
-    status_t err = mNetSession->sendRequest(sessionID, response.c_str());
-    CHECK_EQ(err, (status_t)OK);
-}
-
-// static
-void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) {
-    time_t now = time(NULL);
-    struct tm *now2 = gmtime(&now);
-    char buf[128];
-    strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
-
-    response->append("Date: ");
-    response->append(buf);
-    response->append("\r\n");
-
-    response->append(StringPrintf("User-Agent: %s\r\n", sUserAgent.c_str()));
-
-    if (cseq >= 0) {
-        response->append(StringPrintf("CSeq: %d\r\n", cseq));
-    }
-}
-
-}  // namespace android
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
deleted file mode 100644
index adb9d89..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef WIFI_DISPLAY_SINK_H_
-
-#define WIFI_DISPLAY_SINK_H_
-
-#include "ANetworkSession.h"
-
-#include "VideoFormats.h"
-
-#include <gui/Surface.h>
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct AMessage;
-struct DirectRenderer;
-struct MediaReceiver;
-struct ParsedMessage;
-struct TimeSyncer;
-
-// Represents the RTSP client acting as a wifi display sink.
-// Connects to a wifi display source and renders the incoming
-// transport stream using a MediaPlayer instance.
-struct WifiDisplaySink : public AHandler {
-    enum {
-        kWhatDisconnected,
-    };
-
-    enum Flags {
-        FLAG_SPECIAL_MODE = 1,
-    };
-
-    // If no notification message is specified (notify == NULL)
-    // the sink will stop its looper() once the session ends,
-    // otherwise it will post an appropriate notification but leave
-    // the looper() running.
-    WifiDisplaySink(
-            uint32_t flags,
-            const sp<ANetworkSession> &netSession,
-            const sp<IGraphicBufferProducer> &bufferProducer = NULL,
-            const sp<AMessage> &notify = NULL);
-
-    void start(const char *sourceHost, int32_t sourcePort);
-    void start(const char *uri);
-
-protected:
-    virtual ~WifiDisplaySink();
-    virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
-    enum State {
-        UNDEFINED,
-        CONNECTING,
-        CONNECTED,
-        PAUSED,
-        PLAYING,
-    };
-
-    enum {
-        kWhatStart,
-        kWhatRTSPNotify,
-        kWhatStop,
-        kWhatMediaReceiverNotify,
-        kWhatTimeSyncerNotify,
-        kWhatReportLateness,
-    };
-
-    struct ResponseID {
-        int32_t mSessionID;
-        int32_t mCSeq;
-
-        bool operator<(const ResponseID &other) const {
-            return mSessionID < other.mSessionID
-                || (mSessionID == other.mSessionID
-                        && mCSeq < other.mCSeq);
-        }
-    };
-
-    typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    static const int64_t kReportLatenessEveryUs = 1000000ll;
-
-    static const AString sUserAgent;
-
-    State mState;
-    uint32_t mFlags;
-    VideoFormats mSinkSupportedVideoFormats;
-    sp<ANetworkSession> mNetSession;
-    sp<IGraphicBufferProducer> mSurfaceTex;
-    sp<AMessage> mNotify;
-    sp<TimeSyncer> mTimeSyncer;
-    bool mUsingTCPTransport;
-    bool mUsingTCPInterleaving;
-    AString mRTSPHost;
-    int32_t mSessionID;
-
-    int32_t mNextCSeq;
-
-    KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
-
-    sp<ALooper> mMediaReceiverLooper;
-    sp<MediaReceiver> mMediaReceiver;
-    sp<DirectRenderer> mRenderer;
-
-    AString mPlaybackSessionID;
-    int32_t mPlaybackSessionTimeoutSecs;
-
-    bool mIDRFrameRequestPending;
-
-    int64_t mTimeOffsetUs;
-    bool mTimeOffsetValid;
-
-    bool mSetupDeferred;
-
-    size_t mLatencyCount;
-    int64_t mLatencySumUs;
-    int64_t mLatencyMaxUs;
-
-    int64_t mMaxDelayMs;
-
-    status_t sendM2(int32_t sessionID);
-    status_t sendSetup(int32_t sessionID, const char *uri);
-    status_t sendPlay(int32_t sessionID, const char *uri);
-    status_t sendIDRFrameRequest(int32_t sessionID);
-
-    status_t onReceiveM2Response(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveSetupResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t configureTransport(const sp<ParsedMessage> &msg);
-
-    status_t onReceivePlayResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    status_t onReceiveIDRFrameRequestResponse(
-            int32_t sessionID, const sp<ParsedMessage> &msg);
-
-    void registerResponseHandler(
-            int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
-
-    void onReceiveClientData(const sp<AMessage> &msg);
-
-    void onOptionsRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onGetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onSetParameterRequest(
-            int32_t sessionID,
-            int32_t cseq,
-            const sp<ParsedMessage> &data);
-
-    void onMediaReceiverNotify(const sp<AMessage> &msg);
-
-    void sendErrorResponse(
-            int32_t sessionID,
-            const char *errorDetail,
-            int32_t cseq);
-
-    static void AppendCommonResponse(AString *response, int32_t cseq);
-
-    bool ParseURL(
-            const char *url, AString *host, int32_t *port, AString *path,
-            AString *user, AString *pass);
-
-    void dumpDelay(size_t trackIndex, int64_t timeUs);
-
-    DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink);
-};
-
-}  // namespace android
-
-#endif  // WIFI_DISPLAY_SINK_H_
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 0a8462c..5344623 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -649,6 +649,13 @@
                 &bufferIndex, &offset, &size, &timeUs, &flags);
 
         if (err != OK) {
+            if (err == INFO_FORMAT_CHANGED) {
+                continue;
+            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+                mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+                continue;
+            }
+
             if (err == -EAGAIN) {
                 err = OK;
             }
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index cacfcca..3d7b865 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -559,8 +559,6 @@
                         converter->dropAFrame();
                     }
                 }
-            } else if (what == MediaSender::kWhatInformSender) {
-                onSinkFeedback(msg);
             } else {
                 TRESPASS();
             }
@@ -656,89 +654,6 @@
     }
 }
 
-void WifiDisplaySource::PlaybackSession::onSinkFeedback(const sp<AMessage> &msg) {
-    int64_t avgLatencyUs;
-    CHECK(msg->findInt64("avgLatencyUs", &avgLatencyUs));
-
-    int64_t maxLatencyUs;
-    CHECK(msg->findInt64("maxLatencyUs", &maxLatencyUs));
-
-    ALOGI("sink reports avg. latency of %lld ms (max %lld ms)",
-          avgLatencyUs / 1000ll,
-          maxLatencyUs / 1000ll);
-
-    if (mVideoTrackIndex >= 0) {
-        const sp<Track> &videoTrack = mTracks.valueFor(mVideoTrackIndex);
-        sp<Converter> converter = videoTrack->converter();
-
-        if (converter != NULL) {
-            int32_t videoBitrate =
-                Converter::GetInt32Property("media.wfd.video-bitrate", -1);
-
-            char val[PROPERTY_VALUE_MAX];
-            if (videoBitrate < 0
-                    && property_get("media.wfd.video-bitrate", val, NULL)
-                    && !strcasecmp("adaptive", val)) {
-                videoBitrate = converter->getVideoBitrate();
-
-                if (avgLatencyUs > 300000ll) {
-                    videoBitrate *= 0.6;
-                } else if (avgLatencyUs < 100000ll) {
-                    videoBitrate *= 1.1;
-                }
-            }
-
-            if (videoBitrate > 0) {
-                if (videoBitrate < 500000) {
-                    videoBitrate = 500000;
-                } else if (videoBitrate > 10000000) {
-                    videoBitrate = 10000000;
-                }
-
-                if (videoBitrate != converter->getVideoBitrate()) {
-                    ALOGI("setting video bitrate to %d bps", videoBitrate);
-
-                    converter->setVideoBitrate(videoBitrate);
-                }
-            }
-        }
-
-        sp<RepeaterSource> repeaterSource = videoTrack->repeaterSource();
-        if (repeaterSource != NULL) {
-            double rateHz =
-                Converter::GetInt32Property(
-                        "media.wfd.video-framerate", -1);
-
-            char val[PROPERTY_VALUE_MAX];
-            if (rateHz < 0.0
-                    && property_get("media.wfd.video-framerate", val, NULL)
-                    && !strcasecmp("adaptive", val)) {
-                 rateHz = repeaterSource->getFrameRate();
-
-                if (avgLatencyUs > 300000ll) {
-                    rateHz *= 0.9;
-                } else if (avgLatencyUs < 200000ll) {
-                    rateHz *= 1.1;
-                }
-            }
-
-            if (rateHz > 0) {
-                if (rateHz < 5.0) {
-                    rateHz = 5.0;
-                } else if (rateHz > 30.0) {
-                    rateHz = 30.0;
-                }
-
-                if (rateHz != repeaterSource->getFrameRate()) {
-                    ALOGI("setting frame rate to %.2f Hz", rateHz);
-
-                    repeaterSource->setFrameRate(rateHz);
-                }
-            }
-        }
-    }
-}
-
 status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
         bool enableAudio, bool enableVideo) {
     DataSource::RegisterDefaultSniffers();
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index 4a49811..2b5bee9 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -23,7 +23,6 @@
 #include "Parameters.h"
 #include "ParsedMessage.h"
 #include "rtp/RTPSender.h"
-#include "TimeSyncer.h"
 
 #include <binder/IServiceManager.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -165,14 +164,6 @@
                 } else {
                     err = -EINVAL;
                 }
-            }
-
-            if (err == OK) {
-                sp<AMessage> notify = new AMessage(kWhatTimeSyncerNotify, id());
-                mTimeSyncer = new TimeSyncer(mNetSession, notify);
-                looper()->registerHandler(mTimeSyncer);
-
-                mTimeSyncer->startServer(8123);
 
                 mState = AWAITING_CLIENT_CONNECTION;
             }
@@ -548,11 +539,6 @@
             break;
         }
 
-        case kWhatTimeSyncerNotify:
-        {
-            break;
-        }
-
         default:
             TRESPASS();
     }
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
index 3efa0b4..44d3e4d 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -30,7 +30,6 @@
 struct IHDCP;
 struct IRemoteDisplayClient;
 struct ParsedMessage;
-struct TimeSyncer;
 
 // Represents the RTSP server acting as a wifi display source.
 // Manages incoming connections, sets up Playback sessions as necessary.
@@ -83,7 +82,6 @@
         kWhatHDCPNotify,
         kWhatFinishStop2,
         kWhatTeardownTriggerTimedOut,
-        kWhatTimeSyncerNotify,
     };
 
     struct ResponseID {
@@ -120,7 +118,6 @@
     sp<ANetworkSession> mNetSession;
     sp<IRemoteDisplayClient> mClient;
     AString mMediaPath;
-    sp<TimeSyncer> mTimeSyncer;
     struct in_addr mInterfaceAddr;
     int32_t mSessionID;
 
diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp
deleted file mode 100644
index 111846d..0000000
--- a/media/libstagefright/wifi-display/udptest.cpp
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "udptest"
-#include <utils/Log.h>
-
-#include "ANetworkSession.h"
-#include "TimeSyncer.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-}  // namespace android
-
-static void usage(const char *me) {
-    fprintf(stderr,
-            "usage: %s -c host[:port]\tconnect to test server\n"
-            "           -l            \tcreate a test server\n",
-            me);
-}
-
-int main(int argc, char **argv) {
-    using namespace android;
-
-    ProcessState::self()->startThreadPool();
-
-    int32_t localPort = -1;
-    int32_t connectToPort = -1;
-    AString connectToHost;
-
-    int res;
-    while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
-        switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    connectToHost = optarg;
-                    connectToPort = 49152;
-                } else {
-                    connectToHost.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    connectToPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || connectToPort < 1 || connectToPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 'l':
-            {
-                char *end;
-                localPort = strtol(optarg, &end, 10);
-
-                if (*end != '\0' || end == optarg
-                        || localPort < 1 || localPort > 65535) {
-                    fprintf(stderr, "Illegal port specified.\n");
-                    exit(1);
-                }
-                break;
-            }
-
-            case '?':
-            case 'h':
-                usage(argv[0]);
-                exit(1);
-        }
-    }
-
-    if (localPort < 0 && connectToPort < 0) {
-        fprintf(stderr,
-                "You need to select either client or server mode.\n");
-        exit(1);
-    }
-
-    sp<ANetworkSession> netSession = new ANetworkSession;
-    netSession->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<TimeSyncer> handler = new TimeSyncer(netSession, NULL /* notify */);
-    looper->registerHandler(handler);
-
-    if (localPort >= 0) {
-        handler->startServer(localPort);
-    } else {
-        handler->startClient(connectToHost.c_str(), connectToPort);
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp
index 9fee4d0..c947765 100644
--- a/media/libstagefright/wifi-display/wfd.cpp
+++ b/media/libstagefright/wifi-display/wfd.cpp
@@ -18,7 +18,6 @@
 #define LOG_TAG "wfd"
 #include <utils/Log.h>
 
-#include "sink/WifiDisplaySink.h"
 #include "source/WifiDisplaySource.h"
 
 #include <binder/ProcessState.h>
@@ -39,12 +38,8 @@
 static void usage(const char *me) {
     fprintf(stderr,
             "usage:\n"
-            "           %s -c host[:port]\tconnect to wifi source\n"
-            "               -u uri        \tconnect to an rtsp uri\n"
-            "               -l ip[:port] \tlisten on the specified port "
-            "               -f(ilename)  \tstream media "
-            "(create a sink)\n"
-            "               -s(pecial)   \trun in 'special' mode\n",
+            "           %s -l iface[:port]\tcreate a wifi display source\n"
+            "               -f(ilename)  \tstream media\n",
             me);
 }
 
@@ -214,48 +209,14 @@
 
     DataSource::RegisterDefaultSniffers();
 
-    AString connectToHost;
-    int32_t connectToPort = -1;
-    AString uri;
-
     AString listenOnAddr;
     int32_t listenOnPort = -1;
 
     AString path;
 
-    bool specialMode = false;
-
     int res;
-    while ((res = getopt(argc, argv, "hc:l:u:f:s")) >= 0) {
+    while ((res = getopt(argc, argv, "hl:f:")) >= 0) {
         switch (res) {
-            case 'c':
-            {
-                const char *colonPos = strrchr(optarg, ':');
-
-                if (colonPos == NULL) {
-                    connectToHost = optarg;
-                    connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort;
-                } else {
-                    connectToHost.setTo(optarg, colonPos - optarg);
-
-                    char *end;
-                    connectToPort = strtol(colonPos + 1, &end, 10);
-
-                    if (*end != '\0' || end == colonPos + 1
-                            || connectToPort < 1 || connectToPort > 65535) {
-                        fprintf(stderr, "Illegal port specified.\n");
-                        exit(1);
-                    }
-                }
-                break;
-            }
-
-            case 'u':
-            {
-                uri = optarg;
-                break;
-            }
-
             case 'f':
             {
                 path = optarg;
@@ -284,12 +245,6 @@
                 break;
             }
 
-            case 's':
-            {
-                specialMode = true;
-                break;
-            }
-
             case '?':
             case 'h':
             default:
@@ -298,13 +253,6 @@
         }
     }
 
-    if (connectToPort >= 0 && listenOnPort >= 0) {
-        fprintf(stderr,
-                "You can connect to a source or create one, "
-                "but not both at the same time.\n");
-        exit(1);
-    }
-
     if (listenOnPort >= 0) {
         if (path.empty()) {
             createSource(listenOnAddr, listenOnPort);
@@ -315,72 +263,7 @@
         exit(0);
     }
 
-    if (connectToPort < 0 && uri.empty()) {
-        fprintf(stderr,
-                "You need to select either source host or uri.\n");
-
-        exit(1);
-    }
-
-    if (connectToPort >= 0 && !uri.empty()) {
-        fprintf(stderr,
-                "You need to either connect to a wfd host or an rtsp url, "
-                "not both.\n");
-        exit(1);
-    }
-
-    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-    CHECK_EQ(composerClient->initCheck(), (status_t)OK);
-
-    sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
-            ISurfaceComposer::eDisplayIdMain));
-    DisplayInfo info;
-    SurfaceComposerClient::getDisplayInfo(display, &info);
-    ssize_t displayWidth = info.w;
-    ssize_t displayHeight = info.h;
-
-    ALOGV("display is %d x %d\n", displayWidth, displayHeight);
-
-    sp<SurfaceControl> control =
-        composerClient->createSurface(
-                String8("A Surface"),
-                displayWidth,
-                displayHeight,
-                PIXEL_FORMAT_RGB_565,
-                0);
-
-    CHECK(control != NULL);
-    CHECK(control->isValid());
-
-    SurfaceComposerClient::openGlobalTransaction();
-    CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
-    CHECK_EQ(control->show(), (status_t)OK);
-    SurfaceComposerClient::closeGlobalTransaction();
-
-    sp<Surface> surface = control->getSurface();
-    CHECK(surface != NULL);
-
-    sp<ANetworkSession> session = new ANetworkSession;
-    session->start();
-
-    sp<ALooper> looper = new ALooper;
-
-    sp<WifiDisplaySink> sink = new WifiDisplaySink(
-            specialMode ? WifiDisplaySink::FLAG_SPECIAL_MODE : 0 /* flags */,
-            session,
-            surface->getIGraphicBufferProducer());
-
-    looper->registerHandler(sink);
-
-    if (connectToPort >= 0) {
-        sink->start(connectToHost.c_str(), connectToPort);
-    } else {
-        sink->start(uri.c_str());
-    }
-
-    looper->start(true /* runOnCallingThread */);
-
-    composerClient->dispose();
+    usage(argv[0]);
 
     return 0;
 }
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index a6ab4f8..5ac3129 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1415,6 +1415,9 @@
         // since client and server are in the same process,
         // the buffer has the same virtual address on both sides
         mClientProxy = new AudioTrackClientProxy(mCblk, mBuffer, mFrameCount, mFrameSize);
+        mClientProxy->setVolumeLR((uint32_t(uint16_t(0x1000)) << 16) | uint16_t(0x1000));
+        mClientProxy->setSendLevel(0.0);
+        mClientProxy->setSampleRate(sampleRate);
     } else {
         ALOGW("Error creating output track on thread %p", playbackThread);
     }
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 3c84703..3479553 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -25,10 +25,12 @@
     camera2/JpegCompressor.cpp \
     camera2/CaptureSequencer.cpp \
     camera2/ProFrameProcessor.cpp \
+    camera2/ZslProcessor3.cpp \
     camera3/Camera3Stream.cpp \
     camera3/Camera3InputStream.cpp \
     camera3/Camera3OutputStream.cpp \
-    camera3/Camera3ZslStream.cpp
+    camera3/Camera3ZslStream.cpp \
+    gui/RingBufferConsumer.cpp \
 
 LOCAL_SHARED_LIBRARIES:= \
     libui \
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index 9421a77..48f3606 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -28,6 +28,9 @@
 #include "Camera2Device.h"
 #include "Camera3Device.h"
 
+#include "camera2/ZslProcessor.h"
+#include "camera2/ZslProcessor3.h"
+
 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
 
@@ -51,12 +54,13 @@
         int deviceVersion):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName,
                 cameraId, cameraFacing, clientPid, clientUid, servicePid),
-        mParameters(cameraId, cameraFacing)
+        mParameters(cameraId, cameraFacing),
+        mDeviceVersion(deviceVersion)
 {
     ATRACE_CALL();
     ALOGI("Camera %d: Opened", cameraId);
 
-    switch (deviceVersion) {
+    switch (mDeviceVersion) {
         case CAMERA_DEVICE_API_VERSION_2_0:
             mDevice = new Camera2Device(cameraId);
             break;
@@ -65,7 +69,7 @@
             break;
         default:
             ALOGE("Camera %d: Unknown HAL device version %d",
-                    cameraId, deviceVersion);
+                    cameraId, mDeviceVersion);
             mDevice = NULL;
             break;
     }
@@ -114,10 +118,27 @@
             mCameraId);
     mJpegProcessor->run(threadName.string());
 
-    mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
+    switch (mDeviceVersion) {
+        case CAMERA_DEVICE_API_VERSION_2_0: {
+            sp<ZslProcessor> zslProc =
+                    new ZslProcessor(this, mCaptureSequencer);
+            mZslProcessor = zslProc;
+            mZslProcessorThread = zslProc;
+            break;
+        }
+        case CAMERA_DEVICE_API_VERSION_3_0:{
+            sp<ZslProcessor3> zslProc =
+                    new ZslProcessor3(this, mCaptureSequencer);
+            mZslProcessor = zslProc;
+            mZslProcessorThread = zslProc;
+            break;
+        }
+        default:
+            break;
+    }
     threadName = String8::format("C2-%d-ZslProc",
             mCameraId);
-    mZslProcessor->run(threadName.string());
+    mZslProcessorThread->run(threadName.string());
 
     mCallbackProcessor = new CallbackProcessor(this);
     threadName = String8::format("C2-%d-CallbkProc",
@@ -135,6 +156,7 @@
 
 Camera2Client::~Camera2Client() {
     ATRACE_CALL();
+    ALOGV("~Camera2Client");
 
     mDestructionStarted = true;
 
@@ -369,6 +391,12 @@
 
     ALOGV("Camera %d: Shutting down", mCameraId);
 
+    /**
+     * disconnect() cannot call any methods that might need to promote a
+     * wp<Camera2Client>, since disconnect can be called from the destructor, at
+     * which point all such promotions will fail.
+     */
+
     stopPreviewL();
 
     {
@@ -386,7 +414,7 @@
     mFrameProcessor->requestExit();
     mCaptureSequencer->requestExit();
     mJpegProcessor->requestExit();
-    mZslProcessor->requestExit();
+    mZslProcessorThread->requestExit();
     mCallbackProcessor->requestExit();
 
     ALOGV("Camera %d: Waiting for threads", mCameraId);
@@ -394,7 +422,7 @@
     mFrameProcessor->join();
     mCaptureSequencer->join();
     mJpegProcessor->join();
-    mZslProcessor->join();
+    mZslProcessorThread->join();
     mCallbackProcessor->join();
 
     ALOGV("Camera %d: Disconnecting device", mCameraId);
@@ -538,7 +566,12 @@
             break;
         case Parameters::PREVIEW:
             // Already running preview - need to stop and create a new stream
-            mStreamingProcessor->stopStream();
+            res = stopStream();
+            if (res != OK) {
+                ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                return res;
+            }
             state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
             break;
     }
@@ -745,7 +778,11 @@
             // no break
         case Parameters::RECORD:
         case Parameters::PREVIEW:
-            mStreamingProcessor->stopStream();
+            res = stopStream();
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
+                        __FUNCTION__, mCameraId, strerror(-res), res);
+            }
             res = mDevice->waitUntilDrained();
             if (res != OK) {
                 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h
index 713fab3..af72ab2 100644
--- a/services/camera/libcameraservice/Camera2Client.h
+++ b/services/camera/libcameraservice/Camera2Client.h
@@ -23,7 +23,7 @@
 #include "camera2/FrameProcessor.h"
 #include "camera2/StreamingProcessor.h"
 #include "camera2/JpegProcessor.h"
-#include "camera2/ZslProcessor.h"
+#include "camera2/ZslProcessorInterface.h"
 #include "camera2/CaptureSequencer.h"
 #include "camera2/CallbackProcessor.h"
 #include "Camera2ClientBase.h"
@@ -154,6 +154,7 @@
 
     void     setPreviewCallbackFlagL(Parameters &params, int flag);
     status_t updateRequests(Parameters &params);
+    int mDeviceVersion;
 
     // Used with stream IDs
     static const int NO_STREAM = -1;
@@ -173,7 +174,8 @@
 
     sp<camera2::CaptureSequencer> mCaptureSequencer;
     sp<camera2::JpegProcessor> mJpegProcessor;
-    sp<camera2::ZslProcessor> mZslProcessor;
+    sp<camera2::ZslProcessorInterface> mZslProcessor;
+    sp<Thread> mZslProcessorThread;
 
     /** Notification-related members */
 
diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp
index 946cdba..77df152 100644
--- a/services/camera/libcameraservice/Camera2Device.cpp
+++ b/services/camera/libcameraservice/Camera2Device.cpp
@@ -1133,7 +1133,8 @@
 status_t Camera2Device::StreamAdapter::release() {
     ATRACE_CALL();
     status_t res;
-    ALOGV("%s: Releasing stream %d", __FUNCTION__, mId);
+    ALOGV("%s: Releasing stream %d (%d x %d, format %d)", __FUNCTION__, mId,
+            mWidth, mHeight, mFormat);
     if (mState >= ALLOCATED) {
         res = mHal2Device->ops->release_stream(mHal2Device, mId);
         if (res != OK) {
diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp
index 1433108..5f87e8b 100644
--- a/services/camera/libcameraservice/Camera3Device.cpp
+++ b/services/camera/libcameraservice/Camera3Device.cpp
@@ -42,6 +42,7 @@
 #include <utils/Timers.h>
 #include "Camera3Device.h"
 #include "camera3/Camera3OutputStream.h"
+#include "camera3/Camera3InputStream.h"
 
 using namespace android::camera3;
 
@@ -392,6 +393,144 @@
     return mRequestThread->waitUntilRequestProcessed(requestId, timeout);
 }
 
+status_t Camera3Device::createInputStream(
+        uint32_t width, uint32_t height, int format, int *id) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+
+    status_t res;
+    bool wasActive = false;
+
+    switch (mStatus) {
+        case STATUS_ERROR:
+            ALOGE("%s: Device has encountered a serious error", __FUNCTION__);
+            return INVALID_OPERATION;
+        case STATUS_UNINITIALIZED:
+            ALOGE("%s: Device not initialized", __FUNCTION__);
+            return INVALID_OPERATION;
+        case STATUS_IDLE:
+            // OK
+            break;
+        case STATUS_ACTIVE:
+            ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
+            mRequestThread->setPaused(true);
+            res = waitUntilDrainedLocked();
+            if (res != OK) {
+                ALOGE("%s: Can't pause captures to reconfigure streams!",
+                        __FUNCTION__);
+                mStatus = STATUS_ERROR;
+                return res;
+            }
+            wasActive = true;
+            break;
+        default:
+            ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+            return INVALID_OPERATION;
+    }
+    assert(mStatus == STATUS_IDLE);
+
+    if (mInputStream != 0) {
+        ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    sp<Camera3InputStream> newStream = new Camera3InputStream(mNextStreamId,
+                width, height, format);
+
+    mInputStream = newStream;
+
+    *id = mNextStreamId++;
+
+    // Continue captures if active at start
+    if (wasActive) {
+        ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
+        res = configureStreamsLocked();
+        if (res != OK) {
+            ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
+                    __FUNCTION__, mNextStreamId, strerror(-res), res);
+            return res;
+        }
+        mRequestThread->setPaused(false);
+    }
+
+    return OK;
+}
+
+
+status_t Camera3Device::createZslStream(
+            uint32_t width, uint32_t height,
+            int depth,
+            /*out*/
+            int *id,
+            sp<Camera3ZslStream>* zslStream) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+
+    status_t res;
+    bool wasActive = false;
+
+    switch (mStatus) {
+        case STATUS_ERROR:
+            ALOGE("%s: Device has encountered a serious error", __FUNCTION__);
+            return INVALID_OPERATION;
+        case STATUS_UNINITIALIZED:
+            ALOGE("%s: Device not initialized", __FUNCTION__);
+            return INVALID_OPERATION;
+        case STATUS_IDLE:
+            // OK
+            break;
+        case STATUS_ACTIVE:
+            ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
+            mRequestThread->setPaused(true);
+            res = waitUntilDrainedLocked();
+            if (res != OK) {
+                ALOGE("%s: Can't pause captures to reconfigure streams!",
+                        __FUNCTION__);
+                mStatus = STATUS_ERROR;
+                return res;
+            }
+            wasActive = true;
+            break;
+        default:
+            ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+            return INVALID_OPERATION;
+    }
+    assert(mStatus == STATUS_IDLE);
+
+    if (mInputStream != 0) {
+        ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    sp<Camera3ZslStream> newStream = new Camera3ZslStream(mNextStreamId,
+                width, height, depth);
+
+    res = mOutputStreams.add(mNextStreamId, newStream);
+    if (res < 0) {
+        ALOGE("%s: Can't add new stream to set: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+    mInputStream = newStream;
+
+    *id = mNextStreamId++;
+    *zslStream = newStream;
+
+    // Continue captures if active at start
+    if (wasActive) {
+        ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
+        res = configureStreamsLocked();
+        if (res != OK) {
+            ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
+                    __FUNCTION__, mNextStreamId, strerror(-res), res);
+            return res;
+        }
+        mRequestThread->setPaused(false);
+    }
+
+    return OK;
+}
+
 status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
         uint32_t width, uint32_t height, int format, size_t size, int *id) {
     ATRACE_CALL();
@@ -545,7 +684,7 @@
         return INVALID_OPERATION;
     }
 
-    sp<Camera3Stream> deletedStream;
+    sp<Camera3StreamInterface> deletedStream;
     if (mInputStream != NULL && id == mInputStream->getId()) {
         deletedStream = mInputStream;
         mInputStream.clear();
@@ -836,7 +975,8 @@
                     streams.data.u8[i]);
             return NULL;
         }
-        sp<Camera3OutputStream> stream = mOutputStreams.editValueAt(idx);
+        sp<Camera3OutputStreamInterface> stream =
+                mOutputStreams.editValueAt(idx);
 
         // Lazy completion of stream configuration (allocation/registration)
         // on first use
@@ -885,6 +1025,15 @@
     }
 
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
+
+        // Don't configure bidi streams twice, nor add them twice to the list
+        if (mOutputStreams[i].get() ==
+            static_cast<Camera3StreamInterface*>(mInputStream.get())) {
+
+            config.num_streams--;
+            continue;
+        }
+
         camera3_stream_t *outputStream;
         outputStream = mOutputStreams.editValueAt(i)->startConfiguration();
         if (outputStream == NULL) {
@@ -1471,7 +1620,7 @@
 
     if (nextRequest->mInputStream != NULL) {
         request.input_buffer = &inputBuffer;
-        res = nextRequest->mInputStream->getBuffer(&inputBuffer);
+        res = nextRequest->mInputStream->getInputBuffer(&inputBuffer);
         if (res != OK) {
             SET_ERR("RequestThread: Can't get input buffer, skipping request:"
                     " %s (%d)", strerror(-res), res);
@@ -1558,6 +1707,23 @@
         mLatestRequestSignal.signal();
     }
 
+    // Return input buffer back to framework
+    if (request.input_buffer != NULL) {
+        Camera3Stream *stream =
+            Camera3Stream::cast(request.input_buffer->stream);
+        res = stream->returnInputBuffer(*(request.input_buffer));
+        // Note: stream may be deallocated at this point, if this buffer was the
+        // last reference to it.
+        if (res != OK) {
+            ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
+                    "  its stream:%s (%d)",  __FUNCTION__,
+                    request.frame_number, strerror(-res), res);
+            // TODO: Report error upstream
+        }
+    }
+
+
+
     return true;
 }
 
@@ -1571,7 +1737,7 @@
     }
     if (request.input_buffer != NULL) {
         request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
-        nextRequest->mInputStream->returnBuffer(*(request.input_buffer), 0);
+        nextRequest->mInputStream->returnInputBuffer(*(request.input_buffer));
     }
     for (size_t i = 0; i < request.num_output_buffers; i++) {
         outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h
index 6cad08e..7a8c22a 100644
--- a/services/camera/libcameraservice/Camera3Device.h
+++ b/services/camera/libcameraservice/Camera3Device.h
@@ -26,6 +26,7 @@
 #include "CameraDeviceBase.h"
 #include "camera3/Camera3Stream.h"
 #include "camera3/Camera3OutputStream.h"
+#include "camera3/Camera3ZslStream.h"
 
 #include "hardware/camera3.h"
 
@@ -82,6 +83,15 @@
     virtual status_t createStream(sp<ANativeWindow> consumer,
             uint32_t width, uint32_t height, int format, size_t size,
             int *id);
+    virtual status_t createInputStream(
+            uint32_t width, uint32_t height, int format,
+            int *id);
+    virtual status_t createZslStream(
+            uint32_t width, uint32_t height,
+            int depth,
+            /*out*/
+            int *id,
+            sp<camera3::Camera3ZslStream>* zslStream);
     virtual status_t createReprocessStreamFromStream(int outputId, int *id);
 
     virtual status_t getStreamInfo(int id,
@@ -133,14 +143,15 @@
     String8                    mErrorCause;
 
     // Mapping of stream IDs to stream instances
-    typedef KeyedVector<int, sp<camera3::Camera3OutputStream> > StreamSet;
+    typedef KeyedVector<int, sp<camera3::Camera3OutputStreamInterface> >
+            StreamSet;
 
     StreamSet                  mOutputStreams;
     sp<camera3::Camera3Stream> mInputStream;
     int                        mNextStreamId;
 
     // Need to hold on to stream references until configure completes.
-    Vector<sp<camera3::Camera3Stream> > mDeletedStreams;
+    Vector<sp<camera3::Camera3StreamInterface> > mDeletedStreams;
 
     /**** End scope for mLock ****/
 
@@ -148,7 +159,8 @@
       public:
         CameraMetadata                      mSettings;
         sp<camera3::Camera3Stream>          mInputStream;
-        Vector<sp<camera3::Camera3Stream> > mOutputStreams;
+        Vector<sp<camera3::Camera3OutputStreamInterface> >
+                                            mOutputStreams;
     };
     typedef List<sp<CaptureRequest> > RequestList;
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 2db5224..cdeb92e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -793,6 +793,7 @@
 
 // tear down the client
 CameraService::Client::~Client() {
+    ALOGV("~Client");
     mDestructionStarted = true;
 
     mCameraService->releaseSound();
@@ -820,10 +821,12 @@
 }
 
 CameraService::BasicClient::~BasicClient() {
+    ALOGV("~BasicClient");
     mDestructionStarted = true;
 }
 
 void CameraService::BasicClient::disconnect() {
+    ALOGV("BasicClient::disconnect");
     mCameraService->removeClientByRemote(mRemoteBinder);
     // client shouldn't be able to call into us anymore
     mClientPid = 0;
@@ -922,6 +925,7 @@
 
 // NOTE: function is idempotent
 void CameraService::Client::disconnect() {
+    ALOGV("Client::disconnect");
     BasicClient::disconnect();
     mCameraService->setCameraFree(mCameraId);
     mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT,
diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/ProCamera2Client.cpp
index 575b075..251fdab 100644
--- a/services/camera/libcameraservice/ProCamera2Client.cpp
+++ b/services/camera/libcameraservice/ProCamera2Client.cpp
@@ -203,6 +203,10 @@
 
     CameraMetadata metadata(request);
 
+    if (!enforceRequestPermissions(metadata)) {
+        return PERMISSION_DENIED;
+    }
+
     if (streaming) {
         return mDevice->setStreamingRequest(metadata);
     } else {
@@ -388,4 +392,55 @@
 
 }
 
+bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) {
+
+    const int pid = IPCThreadState::self()->getCallingPid();
+    const int selfPid = getpid();
+    camera_metadata_entry_t entry;
+
+    /**
+     * Mixin default important security values
+     * - android.led.transmit = defaulted ON
+     */
+    CameraMetadata staticInfo = mDevice->info();
+    entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS);
+    for(size_t i = 0; i < entry.count; ++i) {
+        uint8_t led = entry.data.u8[i];
+
+        switch(led) {
+            case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: {
+                uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON;
+                if (!metadata.exists(ANDROID_LED_TRANSMIT)) {
+                    metadata.update(ANDROID_LED_TRANSMIT,
+                                    &transmitDefault, 1);
+                }
+                break;
+            }
+        }
+    }
+
+    // We can do anything!
+    if (pid == selfPid) {
+        return true;
+    }
+
+    /**
+     * Permission check special fields in the request
+     * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT
+     */
+    entry = metadata.find(ANDROID_LED_TRANSMIT);
+    if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) {
+        String16 permissionString =
+            String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
+        if (!checkCallingPermission(permissionString)) {
+            const int uid = IPCThreadState::self()->getCallingUid();
+            ALOGE("Permission Denial: "
+                  "can't disable transmit LED pid=%d, uid=%d", pid, uid);
+            return false;
+        }
+    }
+
+    return true;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/ProCamera2Client.h
index 1dec263..faee9f9 100644
--- a/services/camera/libcameraservice/ProCamera2Client.h
+++ b/services/camera/libcameraservice/ProCamera2Client.h
@@ -110,6 +110,7 @@
     static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
 
     /** Utility members */
+    bool enforceRequestPermissions(CameraMetadata& metadata);
 
     // Whether or not we have an exclusive lock on the device
     // - if no we can't modify the request queue.
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
index 30c14ef..dd37283 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
@@ -30,9 +30,11 @@
 namespace android {
 namespace camera2 {
 
-CallbackProcessor::CallbackProcessor(wp<Camera2Client> client):
+CallbackProcessor::CallbackProcessor(sp<Camera2Client> client):
         Thread(false),
         mClient(client),
+        mDevice(client->getCameraDevice()),
+        mId(client->getCameraId()),
         mCallbackAvailable(false),
         mCallbackStreamId(NO_STREAM) {
 }
@@ -56,9 +58,11 @@
 
     Mutex::Autolock l(mInputMutex);
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return OK;
-    sp<CameraDeviceBase> device = client->getCameraDevice();
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     if (mCallbackConsumer == 0) {
         // Create CPU buffer queue endpoint
@@ -76,7 +80,7 @@
                 &currentWidth, &currentHeight, &currentFormat);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying callback output stream info: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -87,11 +91,11 @@
             // assuming that all existing use of old callback stream is
             // completed.
             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
-                __FUNCTION__, client->getCameraId(), mCallbackStreamId);
+                __FUNCTION__, mId, mCallbackStreamId);
             res = device->deleteStream(mCallbackStreamId);
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old output stream "
-                        "for callbacks: %s (%d)", __FUNCTION__, client->getCameraId(),
+                        "for callbacks: %s (%d)", __FUNCTION__, mId,
                         strerror(-res), res);
                 return res;
             }
@@ -108,7 +112,7 @@
                 params.previewFormat, 0, &mCallbackStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -119,15 +123,24 @@
 
 status_t CallbackProcessor::deleteStream() {
     ATRACE_CALL();
+    sp<CameraDeviceBase> device;
 
-    Mutex::Autolock l(mInputMutex);
+    {
+        Mutex::Autolock l(mInputMutex);
 
-    if (mCallbackStreamId != NO_STREAM) {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return OK;
-        sp<CameraDeviceBase> device = client->getCameraDevice();
+        if (mCallbackStreamId == NO_STREAM) {
+            return OK;
+        }
+        device = mDevice.promote();
+        if (device == 0) {
+            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
+    }
+    device->deleteStream(mCallbackStreamId);
 
-        device->deleteStream(mCallbackStreamId);
+    {
+        Mutex::Autolock l(mInputMutex);
 
         mCallbackHeap.clear();
         mCallbackWindow.clear();
@@ -161,13 +174,32 @@
 
     do {
         sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return false;
-        res = processNewCallback(client);
+        if (client == 0) {
+            res = discardNewCallback();
+        } else {
+            res = processNewCallback(client);
+        }
     } while (res == OK);
 
     return true;
 }
 
+status_t CallbackProcessor::discardNewCallback() {
+    ATRACE_CALL();
+    status_t res;
+    CpuConsumer::LockedBuffer imgBuffer;
+    res = mCallbackConsumer->lockNextBuffer(&imgBuffer);
+    if (res != OK) {
+        if (res != BAD_VALUE) {
+            ALOGE("%s: Camera %d: Error receiving next callback buffer: "
+                    "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
+        }
+        return res;
+    }
+    mCallbackConsumer->unlockBuffer(imgBuffer);
+    return OK;
+}
+
 status_t CallbackProcessor::processNewCallback(sp<Camera2Client> &client) {
     ATRACE_CALL();
     status_t res;
@@ -181,12 +213,12 @@
     if (res != OK) {
         if (res != BAD_VALUE) {
             ALOGE("%s: Camera %d: Error receiving next callback buffer: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                    "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
         }
         return res;
     }
     ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__,
-            client->getCameraId());
+            mId);
 
     {
         SharedParameters::Lock l(client->getParameters());
@@ -195,7 +227,7 @@
                 && l.mParameters.state != Parameters::RECORD
                 && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) {
             ALOGV("%s: Camera %d: No longer streaming",
-                    __FUNCTION__, client->getCameraId());
+                    __FUNCTION__, mId);
             mCallbackConsumer->unlockBuffer(imgBuffer);
             return OK;
         }
@@ -216,7 +248,7 @@
 
         if (imgBuffer.format != l.mParameters.previewFormat) {
             ALOGE("%s: Camera %d: Unexpected format for callback: "
-                    "%x, expected %x", __FUNCTION__, client->getCameraId(),
+                    "%x, expected %x", __FUNCTION__, mId,
                     imgBuffer.format, l.mParameters.previewFormat);
             mCallbackConsumer->unlockBuffer(imgBuffer);
             return INVALID_OPERATION;
@@ -241,7 +273,7 @@
                 "Camera2Client::CallbackHeap");
         if (mCallbackHeap->mHeap->getSize() == 0) {
             ALOGE("%s: Camera %d: Unable to allocate memory for callbacks",
-                    __FUNCTION__, client->getCameraId());
+                    __FUNCTION__, mId);
             mCallbackConsumer->unlockBuffer(imgBuffer);
             return INVALID_OPERATION;
         }
@@ -252,7 +284,7 @@
 
     if (mCallbackHeapFree == 0) {
         ALOGE("%s: Camera %d: No free callback buffers, dropping frame",
-                __FUNCTION__, client->getCameraId());
+                __FUNCTION__, mId);
         mCallbackConsumer->unlockBuffer(imgBuffer);
         return OK;
     }
@@ -282,7 +314,7 @@
             l(client->mSharedCameraCallbacks);
         if (l.mRemoteCallback != 0) {
             ALOGV("%s: Camera %d: Invoking client data callback",
-                    __FUNCTION__, client->getCameraId());
+                    __FUNCTION__, mId);
             l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_FRAME,
                     mCallbackHeap->mBuffers[heapIdx], NULL);
         }
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h
index e68bb75..1c40a03 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h
@@ -30,6 +30,7 @@
 namespace android {
 
 class Camera2Client;
+class CameraDeviceBase;
 
 namespace camera2 {
 
@@ -39,7 +40,7 @@
 class CallbackProcessor:
             public Thread, public CpuConsumer::FrameAvailableListener {
   public:
-    CallbackProcessor(wp<Camera2Client> client);
+    CallbackProcessor(sp<Camera2Client> client);
     ~CallbackProcessor();
 
     void onFrameAvailable();
@@ -52,6 +53,8 @@
   private:
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
     wp<Camera2Client> mClient;
+    wp<CameraDeviceBase> mDevice;
+    int mId;
 
     mutable Mutex mInputMutex;
     bool mCallbackAvailable;
@@ -72,7 +75,8 @@
     virtual bool threadLoop();
 
     status_t processNewCallback(sp<Camera2Client> &client);
-
+    // Used when shutting down
+    status_t discardNewCallback();
 };
 
 
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
index ee03329..266e516 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
@@ -27,6 +27,7 @@
 #include "../Camera2Device.h"
 #include "../Camera2Client.h"
 #include "Parameters.h"
+#include "ZslProcessorInterface.h"
 
 namespace android {
 namespace camera2 {
@@ -54,7 +55,7 @@
     ALOGV("%s: Exit", __FUNCTION__);
 }
 
-void CaptureSequencer::setZslProcessor(wp<ZslProcessor> processor) {
+void CaptureSequencer::setZslProcessor(wp<ZslProcessorInterface> processor) {
     Mutex::Autolock l(mInputMutex);
     mZslProcessor = processor;
 }
@@ -265,8 +266,10 @@
                 res = INVALID_OPERATION;
         }
     }
-    sp<ZslProcessor> processor = mZslProcessor.promote();
+    sp<ZslProcessorInterface> processor = mZslProcessor.promote();
     if (processor != 0) {
+        ALOGV("%s: Memory optimization, clearing ZSL queue",
+              __FUNCTION__);
         processor->clearZslQueue();
     }
 
@@ -324,7 +327,7 @@
         sp<Camera2Client> &client) {
     ALOGV("%s", __FUNCTION__);
     status_t res;
-    sp<ZslProcessor> processor = mZslProcessor.promote();
+    sp<ZslProcessorInterface> processor = mZslProcessor.promote();
     if (processor == 0) {
         ALOGE("%s: No ZSL queue to use!", __FUNCTION__);
         return DONE;
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h
index 7db8007..76750aa 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h
@@ -33,7 +33,7 @@
 
 namespace camera2 {
 
-class ZslProcessor;
+class ZslProcessorInterface;
 class BurstCapture;
 
 /**
@@ -48,7 +48,7 @@
     ~CaptureSequencer();
 
     // Get reference to the ZslProcessor, which holds the ZSL buffers and frames
-    void setZslProcessor(wp<ZslProcessor> processor);
+    void setZslProcessor(wp<ZslProcessorInterface> processor);
 
     // Begin still image capture
     status_t startCapture(int msgType);
@@ -105,7 +105,7 @@
     static const int kMaxTimeoutsForCaptureEnd    = 40;  // 4 sec
 
     wp<Camera2Client> mClient;
-    wp<ZslProcessor> mZslProcessor;
+    wp<ZslProcessorInterface> mZslProcessor;
     sp<BurstCapture> mBurstCapture;
 
     enum CaptureState {
diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
index 286fac4..01d7f9c 100644
--- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
@@ -35,11 +35,12 @@
 namespace camera2 {
 
 JpegProcessor::JpegProcessor(
-    wp<Camera2Client> client,
+    sp<Camera2Client> client,
     wp<CaptureSequencer> sequencer):
         Thread(false),
-        mClient(client),
+        mDevice(client->getCameraDevice()),
         mSequencer(sequencer),
+        mId(client->getCameraId()),
         mCaptureAvailable(false),
         mCaptureStreamId(NO_STREAM) {
 }
@@ -64,16 +65,18 @@
 
     Mutex::Autolock l(mInputMutex);
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return OK;
-    sp<CameraDeviceBase> device = client->getCameraDevice();
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     // Find out buffer size for JPEG
     camera_metadata_ro_entry_t maxJpegSize =
             params.staticInfo(ANDROID_JPEG_MAX_SIZE);
     if (maxJpegSize.count == 0) {
         ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
-                __FUNCTION__, client->getCameraId());
+                __FUNCTION__, mId);
         return INVALID_OPERATION;
     }
 
@@ -89,7 +92,7 @@
                                        "Camera2Client::CaptureHeap");
         if (mCaptureHeap->getSize() == 0) {
             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
-                    __FUNCTION__, client->getCameraId());
+                    __FUNCTION__, mId);
             return NO_MEMORY;
         }
     }
@@ -102,18 +105,18 @@
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying capture output stream info: "
                     "%s (%d)", __FUNCTION__,
-                    client->getCameraId(), strerror(-res), res);
+                    mId, strerror(-res), res);
             return res;
         }
         if (currentWidth != (uint32_t)params.pictureWidth ||
                 currentHeight != (uint32_t)params.pictureHeight) {
             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
-                __FUNCTION__, client->getCameraId(), mCaptureStreamId);
+                __FUNCTION__, mId, mCaptureStreamId);
             res = device->deleteStream(mCaptureStreamId);
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old output stream "
                         "for capture: %s (%d)", __FUNCTION__,
-                        client->getCameraId(), strerror(-res), res);
+                        mId, strerror(-res), res);
                 return res;
             }
             mCaptureStreamId = NO_STREAM;
@@ -128,7 +131,7 @@
                 &mCaptureStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for capture: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -143,9 +146,11 @@
     Mutex::Autolock l(mInputMutex);
 
     if (mCaptureStreamId != NO_STREAM) {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return OK;
-        sp<CameraDeviceBase> device = client->getCameraDevice();
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device == 0) {
+            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
 
         device->deleteStream(mCaptureStreamId);
 
@@ -180,15 +185,13 @@
     }
 
     do {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return false;
-        res = processNewCapture(client);
+        res = processNewCapture();
     } while (res == OK);
 
     return true;
 }
 
-status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
+status_t JpegProcessor::processNewCapture() {
     ATRACE_CALL();
     status_t res;
     sp<Camera2Heap> captureHeap;
@@ -200,17 +203,17 @@
         if (res != BAD_VALUE) {
             ALOGE("%s: Camera %d: Error receiving still image buffer: "
                     "%s (%d)", __FUNCTION__,
-                    client->getCameraId(), strerror(-res), res);
+                    mId, strerror(-res), res);
         }
         return res;
     }
 
     ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
-            client->getCameraId());
+            mId);
 
     if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
         ALOGE("%s: Camera %d: Unexpected format for still image: "
-                "%x, expected %x", __FUNCTION__, client->getCameraId(),
+                "%x, expected %x", __FUNCTION__, mId,
                 imgBuffer.format,
                 HAL_PIXEL_FORMAT_BLOB);
         mCaptureConsumer->unlockBuffer(imgBuffer);
diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/camera2/JpegProcessor.h
index 74f4738..a38611c 100644
--- a/services/camera/libcameraservice/camera2/JpegProcessor.h
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.h
@@ -29,6 +29,7 @@
 namespace android {
 
 class Camera2Client;
+class CameraDeviceBase;
 class MemoryHeapBase;
 
 namespace camera2 {
@@ -41,7 +42,7 @@
 class JpegProcessor:
             public Thread, public CpuConsumer::FrameAvailableListener {
   public:
-    JpegProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+    JpegProcessor(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
     ~JpegProcessor();
 
     // CpuConsumer listener implementation
@@ -54,8 +55,9 @@
     void dump(int fd, const Vector<String16>& args) const;
   private:
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
-    wp<Camera2Client> mClient;
+    wp<CameraDeviceBase> mDevice;
     wp<CaptureSequencer> mSequencer;
+    int mId;
 
     mutable Mutex mInputMutex;
     bool mCaptureAvailable;
@@ -72,7 +74,7 @@
 
     virtual bool threadLoop();
 
-    status_t processNewCapture(sp<Camera2Client> &client);
+    status_t processNewCapture();
     size_t findJpegSize(uint8_t* jpegBuffer, size_t maxSize);
 
 };
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index d13fe8b..b26cd09 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -659,15 +659,13 @@
     float minFocalLength = availableFocalLengths.data.f[0];
     params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
 
-    camera_metadata_ro_entry_t sensorSize =
-        staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
-    if (!sensorSize.count) return NO_INIT;
+    float horizFov, vertFov;
+    res = calculatePictureFovs(&horizFov, &vertFov);
+    if (res != OK) {
+        ALOGE("%s: Can't calculate field of views!", __FUNCTION__);
+        return res;
+    }
 
-    // The fields of view here assume infinity focus, maximum wide angle
-    float horizFov = 180 / M_PI *
-            2 * atanf(sensorSize.data.f[0] / (2 * minFocalLength));
-    float vertFov  = 180 / M_PI *
-            2 * atanf(sensorSize.data.f[1] / (2 * minFocalLength));
     params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, horizFov);
     params.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, vertFov);
 
@@ -861,6 +859,10 @@
         staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
     bool fixedLens = (minFocusDistance.data.f[0] == 0);
 
+    camera_metadata_ro_entry_t availableFocalLengths =
+        staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
+    if (!availableFocalLengths.count) return NO_INIT;
+
     if (sceneModeOverrides.count > 0) {
         // sceneModeOverrides is defined to have 3 entries for each scene mode,
         // which are AE, AWB, and AF override modes the HAL wants for that scene
@@ -928,6 +930,16 @@
     fastInfo.arrayHeight = arrayHeight;
     fastInfo.bestFaceDetectMode = bestFaceDetectMode;
     fastInfo.maxFaces = maxFaces;
+
+    // Find smallest (widest-angle) focal length to use as basis of still
+    // picture FOV reporting.
+    fastInfo.minFocalLength = availableFocalLengths.data.f[0];
+    for (size_t i = 1; i < availableFocalLengths.count; i++) {
+        if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) {
+            fastInfo.minFocalLength = availableFocalLengths.data.f[i];
+        }
+    }
+
     return OK;
 }
 
@@ -952,10 +964,10 @@
 }
 
 camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag,
-        size_t minCount, size_t maxCount) const {
+        size_t minCount, size_t maxCount, bool required) const {
     camera_metadata_ro_entry_t entry = info->find(tag);
 
-    if (CC_UNLIKELY( entry.count == 0 )) {
+    if (CC_UNLIKELY( entry.count == 0 ) && required) {
         const char* tagSection = get_camera_metadata_section_name(tag);
         if (tagSection == NULL) tagSection = "<unknown>";
         const char* tagName = get_camera_metadata_tag_name(tag);
@@ -1577,6 +1589,21 @@
 
     *this = validatedParams;
 
+    /** Update external parameters calculated from the internal ones */
+
+    // HORIZONTAL/VERTICAL FIELD OF VIEW
+    float horizFov, vertFov;
+    res = calculatePictureFovs(&horizFov, &vertFov);
+    if (res != OK) {
+        ALOGE("%s: Can't calculate FOVs", __FUNCTION__);
+        // continue so parameters are at least consistent
+    }
+    newParams.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,
+            horizFov);
+    newParams.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE,
+            vertFov);
+    ALOGV("Current still picture FOV: %f x %f deg", horizFov, vertFov);
+
     // Need to flatten again in case of overrides
     paramsFlattened = newParams.flatten();
     params = newParams;
@@ -1588,6 +1615,34 @@
     ATRACE_CALL();
     status_t res;
 
+    /**
+     * Mixin default important security values
+     * - android.led.transmit = defaulted ON
+     */
+    camera_metadata_ro_entry_t entry = staticInfo(ANDROID_LED_AVAILABLE_LEDS,
+                                                  /*minimumCount*/0,
+                                                  /*maximumCount*/0,
+                                                  /*required*/false);
+    for(size_t i = 0; i < entry.count; ++i) {
+        uint8_t led = entry.data.u8[i];
+
+        switch(led) {
+            // Transmit LED is unconditionally on when using
+            // the android.hardware.Camera API
+            case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: {
+                uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON;
+                res = request->update(ANDROID_LED_TRANSMIT,
+                                      &transmitDefault, 1);
+                if (res != OK) return res;
+                break;
+            }
+        }
+    }
+
+    /**
+     * Construct metadata from parameters
+     */
+
     uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
     res = request->update(ANDROID_REQUEST_METADATA_MODE,
             &metadataMode, 1);
@@ -2218,7 +2273,7 @@
 
     CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
     ALOG_ASSERT(x < previewCrop.width, "Crop-relative X coordinate = '%d' "
-                    "is out of bounds (upper = %d)", x, previewCrop.width);
+                    "is out of bounds (upper = %f)", x, previewCrop.width);
 
     int ret = x + previewCrop.left;
 
@@ -2234,7 +2289,7 @@
 
     CropRegion previewCrop = calculateCropRegion(CropRegion::OUTPUT_PREVIEW);
     ALOG_ASSERT(y < previewCrop.height, "Crop-relative Y coordinate = '%d' is "
-                "out of bounds (upper = %d)", y, previewCrop.height);
+                "out of bounds (upper = %f)", y, previewCrop.height);
 
     int ret = y + previewCrop.top;
 
@@ -2440,6 +2495,90 @@
     return crop;
 }
 
+status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
+        const {
+    camera_metadata_ro_entry_t sensorSize =
+            staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
+    if (!sensorSize.count) return NO_INIT;
+
+    camera_metadata_ro_entry_t availableFocalLengths =
+            staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
+    if (!availableFocalLengths.count) return NO_INIT;
+
+    float arrayAspect = static_cast<float>(fastInfo.arrayWidth) /
+            fastInfo.arrayHeight;
+    float stillAspect = static_cast<float>(pictureWidth) / pictureHeight;
+    ALOGV("Array aspect: %f, still aspect: %f", arrayAspect, stillAspect);
+
+    // The crop factors from the full sensor array to the still picture crop
+    // region
+    float horizCropFactor = 1.f;
+    float vertCropFactor = 1.f;
+
+    /**
+     * Need to calculate the still image field of view based on the total pixel
+     * array field of view, and the relative aspect ratios of the pixel array
+     * and output streams.
+     *
+     * Special treatment for quirky definition of crop region and relative
+     * stream cropping.
+     */
+    if (quirks.meteringCropRegion) {
+        /**
+         * All streams are the same in height, so narrower aspect ratios will
+         * get cropped on the sides.  First find the largest (widest) aspect
+         * ratio, then calculate the crop of the still FOV based on that.
+         */
+        float cropAspect = arrayAspect;
+        float aspects[] = {
+            stillAspect,
+            static_cast<float>(previewWidth) / previewHeight,
+            static_cast<float>(videoWidth) / videoHeight
+        };
+        for (size_t i = 0; i < sizeof(aspects)/sizeof(aspects[0]); i++) {
+            if (cropAspect < aspects[i]) cropAspect = aspects[i];
+        }
+        ALOGV("Widest crop aspect: %f", cropAspect);
+        // Horizontal crop of still is done based on fitting in the widest
+        // aspect ratio
+        horizCropFactor = stillAspect / cropAspect;
+        // Vertical crop is a function of the array aspect ratio and the
+        // widest aspect ratio.
+        vertCropFactor = arrayAspect / cropAspect;
+    } else {
+        /**
+         * Crop are just a function of just the still/array relative aspect
+         * ratios. Since each stream will maximize its area within the crop
+         * region, and for FOV we assume a full-sensor crop region, we only ever
+         * crop the FOV either vertically or horizontally, never both.
+         */
+        horizCropFactor = (arrayAspect > stillAspect) ?
+                (stillAspect / arrayAspect) : 1.f;
+        vertCropFactor = (arrayAspect < stillAspect) ?
+                (arrayAspect / stillAspect) : 1.f;
+    }
+    ALOGV("Horiz crop factor: %f, vert crop fact: %f",
+            horizCropFactor, vertCropFactor);
+    /**
+     * Basic field of view formula is:
+     *   angle of view = 2 * arctangent ( d / 2f )
+     * where d is the physical sensor dimension of interest, and f is
+     * the focal length. This only applies to rectilinear sensors, for focusing
+     * at distances >> f, etc.
+     */
+    if (horizFov != NULL) {
+        *horizFov = 180 / M_PI * 2 *
+                atanf(horizCropFactor * sensorSize.data.f[0] /
+                        (2 * fastInfo.minFocalLength));
+    }
+    if (vertFov != NULL) {
+        *vertFov = 180 / M_PI * 2 *
+                atanf(vertCropFactor * sensorSize.data.f[1] /
+                        (2 * fastInfo.minFocalLength));
+    }
+    return OK;
+}
+
 int32_t Parameters::fpsFromRange(int32_t /*min*/, int32_t max) const {
     return max;
 }
diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h
index fe3ec1d..6d85037 100644
--- a/services/camera/libcameraservice/camera2/Parameters.h
+++ b/services/camera/libcameraservice/camera2/Parameters.h
@@ -183,6 +183,7 @@
             }
         };
         DefaultKeyedVector<uint8_t, OverrideModes> sceneModeOverrides;
+        float minFocalLength;
     } fastInfo;
 
     // Quirks information; these are short-lived flags to enable workarounds for
@@ -213,7 +214,7 @@
     // max/minCount means to do no bounds check in that direction. In case of
     // error, the entry data pointer is null and the count is 0.
     camera_metadata_ro_entry_t staticInfo(uint32_t tag,
-            size_t minCount=0, size_t maxCount=0) const;
+            size_t minCount=0, size_t maxCount=0, bool required=true) const;
 
     // Validate and update camera parameters based on new settings
     status_t set(const String8 &paramString);
@@ -243,6 +244,9 @@
     };
     CropRegion calculateCropRegion(CropRegion::Outputs outputs) const;
 
+    // Calculate the field of view of the high-resolution JPEG capture
+    status_t calculatePictureFovs(float *horizFov, float *vertFov) const;
+
     // Static methods for debugging and converting between camera1 and camera2
     // parameters
 
diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
index fbc5b93..c36cf87 100644
--- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
@@ -31,8 +31,10 @@
 namespace android {
 namespace camera2 {
 
-StreamingProcessor::StreamingProcessor(wp<Camera2Client> client):
+StreamingProcessor::StreamingProcessor(sp<Camera2Client> client):
         mClient(client),
+        mDevice(client->getCameraDevice()),
+        mId(client->getCameraId()),
         mActiveRequest(NONE),
         mPreviewRequestId(Camera2Client::kPreviewRequestIdStart),
         mPreviewStreamId(NO_STREAM),
@@ -40,7 +42,6 @@
         mRecordingStreamId(NO_STREAM),
         mRecordingHeapCount(kDefaultRecordingHeapCount)
 {
-
 }
 
 StreamingProcessor::~StreamingProcessor() {
@@ -70,16 +71,19 @@
 status_t StreamingProcessor::updatePreviewRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     Mutex::Autolock m(mMutex);
     if (mPreviewRequest.entryCount() == 0) {
-        res = client->getCameraDevice()->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
+        res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
                 &mPreviewRequest);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create default preview request: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                    "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
             return res;
         }
     }
@@ -87,7 +91,7 @@
     res = params.updateRequest(&mPreviewRequest);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of preview "
-                "request: %s (%d)", __FUNCTION__, client->getCameraId(),
+                "request: %s (%d)", __FUNCTION__, mId,
                 strerror(-res), res);
         return res;
     }
@@ -96,7 +100,7 @@
             &mPreviewRequestId, 1);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update request id for preview: %s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
 
@@ -108,9 +112,11 @@
     Mutex::Autolock m(mMutex);
 
     status_t res;
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
-    sp<CameraDeviceBase> device = client->getCameraDevice();
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     if (mPreviewStreamId != NO_STREAM) {
         // Check if stream parameters have to change
@@ -119,24 +125,24 @@
                 &currentWidth, &currentHeight, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying preview stream info: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                    "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
             return res;
         }
         if (currentWidth != (uint32_t)params.previewWidth ||
                 currentHeight != (uint32_t)params.previewHeight) {
             ALOGV("%s: Camera %d: Preview size switch: %d x %d -> %d x %d",
-                    __FUNCTION__, client->getCameraId(), currentWidth, currentHeight,
+                    __FUNCTION__, mId, currentWidth, currentHeight,
                     params.previewWidth, params.previewHeight);
             res = device->waitUntilDrained();
             if (res != OK) {
                 ALOGE("%s: Camera %d: Error waiting for preview to drain: "
-                        "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                        "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
                 return res;
             }
             res = device->deleteStream(mPreviewStreamId);
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old output stream "
-                        "for preview: %s (%d)", __FUNCTION__, client->getCameraId(),
+                        "for preview: %s (%d)", __FUNCTION__, mId,
                         strerror(-res), res);
                 return res;
             }
@@ -151,7 +157,7 @@
                 &mPreviewStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
-                    __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                    __FUNCTION__, mId, strerror(-res), res);
             return res;
         }
     }
@@ -160,7 +166,7 @@
             params.previewTransform);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to set preview stream transform: "
-                "%s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
 
@@ -174,12 +180,14 @@
     Mutex::Autolock m(mMutex);
 
     if (mPreviewStreamId != NO_STREAM) {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return INVALID_OPERATION;
-        sp<CameraDeviceBase> device = client->getCameraDevice();
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device == 0) {
+            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
 
         ALOGV("%s: for cameraId %d on streamId %d",
-            __FUNCTION__, client->getCameraId(), mPreviewStreamId);
+            __FUNCTION__, mId, mPreviewStreamId);
 
         res = device->waitUntilDrained();
         if (res != OK) {
@@ -206,11 +214,9 @@
 status_t StreamingProcessor::setRecordingBufferCount(size_t count) {
     ATRACE_CALL();
     // 32 is the current upper limit on the video buffer count for BufferQueue
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
     if (count > 32) {
         ALOGE("%s: Camera %d: Error setting %d as video buffer count value",
-                __FUNCTION__, client->getCameraId(), count);
+                __FUNCTION__, mId, count);
         return BAD_VALUE;
     }
 
@@ -233,15 +239,18 @@
     status_t res;
     Mutex::Autolock m(mMutex);
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     if (mRecordingRequest.entryCount() == 0) {
-        res = client->getCameraDevice()->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD,
+        res = device->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD,
                 &mRecordingRequest);
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create default recording request:"
-                    " %s (%d)", __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                    " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
             return res;
         }
     }
@@ -249,7 +258,7 @@
     res = params.updateRequest(&mRecordingRequest);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of recording "
-                "request: %s (%d)", __FUNCTION__, client->getCameraId(),
+                "request: %s (%d)", __FUNCTION__, mId,
                 strerror(-res), res);
         return res;
     }
@@ -258,7 +267,7 @@
             &mRecordingRequestId, 1);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update request id for request: %s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
 
@@ -270,9 +279,11 @@
     status_t res;
     Mutex::Autolock m(mMutex);
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
-    sp<CameraDeviceBase> device = client->getCameraDevice();
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     if (mRecordingConsumer == 0) {
         // Create CPU buffer queue endpoint. We need one more buffer here so that we can
@@ -296,7 +307,7 @@
                 &currentWidth, &currentHeight, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying recording output stream info: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -307,7 +318,7 @@
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old output stream "
                         "for recording: %s (%d)", __FUNCTION__,
-                        client->getCameraId(), strerror(-res), res);
+                        mId, strerror(-res), res);
                 return res;
             }
             mRecordingStreamId = NO_STREAM;
@@ -321,7 +332,7 @@
                 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, &mRecordingStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -337,9 +348,11 @@
     Mutex::Autolock m(mMutex);
 
     if (mRecordingStreamId != NO_STREAM) {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return INVALID_OPERATION;
-        sp<CameraDeviceBase> device = client->getCameraDevice();
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device == 0) {
+            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
 
         res = device->waitUntilDrained();
         if (res != OK) {
@@ -369,10 +382,13 @@
 
     if (type == NONE) return INVALID_OPERATION;
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
-    ALOGV("%s: Camera %d: type = %d", __FUNCTION__, client->getCameraId(), type);
+    ALOGV("%s: Camera %d: type = %d", __FUNCTION__, mId, type);
 
     Mutex::Autolock m(mMutex);
 
@@ -384,22 +400,22 @@
         outputStreams);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
 
     res = request.sort();
     if (res != OK) {
         ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
 
-    res = client->getCameraDevice()->setStreamingRequest(request);
+    res = device->setStreamingRequest(request);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to set preview request to start preview: "
                 "%s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
     mActiveRequest = type;
@@ -413,16 +429,19 @@
 
     Mutex::Autolock m(mMutex);
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return INVALID_OPERATION;
-    sp<CameraDeviceBase> device = client->getCameraDevice();
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     res = device->clearStreamingRequest();
     if (res != OK) {
         ALOGE("%s: Camera %d: Can't clear stream request: %s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
         return res;
     }
+
     mActiveRequest = NONE;
 
     return OK;
@@ -466,7 +485,18 @@
     nsecs_t timestamp;
 
     sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return;
+    if (client == 0) {
+        // Discard frames during shutdown
+        BufferItemConsumer::BufferItem imgBuffer;
+        res = mRecordingConsumer->acquireBuffer(&imgBuffer);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Error receiving recording buffer: %s (%d)",
+                    __FUNCTION__, mId, strerror(-res), res);
+            return;
+        }
+        mRecordingConsumer->releaseBuffer(imgBuffer);
+        return;
+    }
 
     {
         /* acquire SharedParameters before mMutex so we don't dead lock
@@ -477,7 +507,7 @@
         res = mRecordingConsumer->acquireBuffer(&imgBuffer);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error receiving recording buffer: %s (%d)",
-                    __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                    __FUNCTION__, mId, strerror(-res), res);
             return;
         }
         timestamp = imgBuffer.mTimestamp;
@@ -490,7 +520,7 @@
                 l.mParameters.state != Parameters::VIDEO_SNAPSHOT) {
             ALOGV("%s: Camera %d: Discarding recording image buffers "
                     "received after recording done", __FUNCTION__,
-                    client->getCameraId());
+                    mId);
             mRecordingConsumer->releaseBuffer(imgBuffer);
             return;
         }
@@ -498,14 +528,14 @@
         if (mRecordingHeap == 0) {
             const size_t bufferSize = 4 + sizeof(buffer_handle_t);
             ALOGV("%s: Camera %d: Creating recording heap with %d buffers of "
-                    "size %d bytes", __FUNCTION__, client->getCameraId(),
+                    "size %d bytes", __FUNCTION__, mId,
                     mRecordingHeapCount, bufferSize);
 
             mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount,
                     "Camera2Client::RecordingHeap");
             if (mRecordingHeap->mHeap->getSize() == 0) {
                 ALOGE("%s: Camera %d: Unable to allocate memory for recording",
-                        __FUNCTION__, client->getCameraId());
+                        __FUNCTION__, mId);
                 mRecordingConsumer->releaseBuffer(imgBuffer);
                 return;
             }
@@ -513,7 +543,7 @@
                 if (mRecordingBuffers[i].mBuf !=
                         BufferItemConsumer::INVALID_BUFFER_SLOT) {
                     ALOGE("%s: Camera %d: Non-empty recording buffers list!",
-                            __FUNCTION__, client->getCameraId());
+                            __FUNCTION__, mId);
                 }
             }
             mRecordingBuffers.clear();
@@ -526,7 +556,7 @@
 
         if ( mRecordingHeapFree == 0) {
             ALOGE("%s: Camera %d: No free recording buffers, dropping frame",
-                    __FUNCTION__, client->getCameraId());
+                    __FUNCTION__, mId);
             mRecordingConsumer->releaseBuffer(imgBuffer);
             return;
         }
@@ -536,7 +566,7 @@
         mRecordingHeapFree--;
 
         ALOGV("%s: Camera %d: Timestamp %lld",
-                __FUNCTION__, client->getCameraId(), timestamp);
+                __FUNCTION__, mId, timestamp);
 
         ssize_t offset;
         size_t size;
@@ -549,7 +579,7 @@
         *((uint32_t*)data) = type;
         *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle;
         ALOGV("%s: Camera %d: Sending out buffer_handle_t %p",
-                __FUNCTION__, client->getCameraId(),
+                __FUNCTION__, mId,
                 imgBuffer.mGraphicBuffer->handle);
         mRecordingBuffers.replaceAt(imgBuffer, heapIdx);
         recordingHeap = mRecordingHeap;
@@ -568,9 +598,6 @@
     ATRACE_CALL();
     status_t res;
 
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return;
-
     Mutex::Autolock m(mMutex);
     // Make sure this is for the current heap
     ssize_t offset;
@@ -578,7 +605,7 @@
     sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
     if (heap->getHeapID() != mRecordingHeap->mHeap->getHeapID()) {
         ALOGW("%s: Camera %d: Mismatched heap ID, ignoring release "
-                "(got %x, expected %x)", __FUNCTION__, client->getCameraId(),
+                "(got %x, expected %x)", __FUNCTION__, mId,
                 heap->getHeapID(), mRecordingHeap->mHeap->getHeapID());
         return;
     }
@@ -586,7 +613,7 @@
     uint32_t type = *(uint32_t*)data;
     if (type != kMetadataBufferTypeGrallocSource) {
         ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)",
-                __FUNCTION__, client->getCameraId(), type,
+                __FUNCTION__, mId, type,
                 kMetadataBufferTypeGrallocSource);
         return;
     }
@@ -606,19 +633,19 @@
     }
     if (itemIndex == mRecordingBuffers.size()) {
         ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of "
-                "outstanding buffers", __FUNCTION__, client->getCameraId(),
+                "outstanding buffers", __FUNCTION__, mId,
                 imgHandle);
         return;
     }
 
     ALOGV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__,
-            client->getCameraId(), imgHandle);
+            mId, imgHandle);
 
     res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to free recording frame "
                 "(buffer_handle_t: %p): %s (%d)", __FUNCTION__,
-                client->getCameraId(), imgHandle, strerror(-res), res);
+                mId, imgHandle, strerror(-res), res);
         return;
     }
     mRecordingBuffers.replaceAt(itemIndex);
diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.h b/services/camera/libcameraservice/camera2/StreamingProcessor.h
index e5732ad..643114e 100644
--- a/services/camera/libcameraservice/camera2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/camera2/StreamingProcessor.h
@@ -27,6 +27,7 @@
 namespace android {
 
 class Camera2Client;
+class CameraDeviceBase;
 class IMemory;
 
 namespace camera2 {
@@ -38,7 +39,7 @@
  */
 class StreamingProcessor: public BufferItemConsumer::FrameAvailableListener {
   public:
-    StreamingProcessor(wp<Camera2Client> client);
+    StreamingProcessor(sp<Camera2Client> client);
     ~StreamingProcessor();
 
     status_t setPreviewWindow(sp<ANativeWindow> window);
@@ -86,6 +87,8 @@
     };
 
     wp<Camera2Client> mClient;
+    wp<CameraDeviceBase> mDevice;
+    int mId;
 
     StreamType mActiveRequest;
 
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
index 769d9bd..94059cd 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
@@ -38,12 +38,14 @@
 namespace camera2 {
 
 ZslProcessor::ZslProcessor(
-    wp<Camera2Client> client,
+    sp<Camera2Client> client,
     wp<CaptureSequencer> sequencer):
         Thread(false),
         mState(RUNNING),
         mClient(client),
+        mDevice(client->getCameraDevice()),
         mSequencer(sequencer),
+        mId(client->getCameraId()),
         mZslBufferAvailable(false),
         mZslStreamId(NO_STREAM),
         mZslReprocessStreamId(NO_STREAM),
@@ -69,7 +71,8 @@
     }
 }
 
-void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, const CameraMetadata &frame) {
+void ZslProcessor::onFrameAvailable(int32_t /*frameId*/,
+        const CameraMetadata &frame) {
     Mutex::Autolock l(mInputMutex);
     camera_metadata_ro_entry_t entry;
     entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
@@ -113,8 +116,15 @@
     Mutex::Autolock l(mInputMutex);
 
     sp<Camera2Client> client = mClient.promote();
-    if (client == 0) return OK;
-    sp<CameraDeviceBase> device = client->getCameraDevice();
+    if (client == 0) {
+        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+    sp<CameraDeviceBase> device = mDevice.promote();
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     if (mZslConsumer == 0) {
         // Create CPU buffer queue endpoint
@@ -136,7 +146,7 @@
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying capture output stream info: "
                     "%s (%d)", __FUNCTION__,
-                    client->getCameraId(), strerror(-res), res);
+                    mId, strerror(-res), res);
             return res;
         }
         if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
@@ -145,16 +155,16 @@
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old reprocess stream "
                         "for ZSL: %s (%d)", __FUNCTION__,
-                        client->getCameraId(), strerror(-res), res);
+                        mId, strerror(-res), res);
                 return res;
             }
             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
-                __FUNCTION__, client->getCameraId(), mZslStreamId);
+                __FUNCTION__, mId, mZslStreamId);
             res = device->deleteStream(mZslStreamId);
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old output stream "
                         "for ZSL: %s (%d)", __FUNCTION__,
-                        client->getCameraId(), strerror(-res), res);
+                        mId, strerror(-res), res);
                 return res;
             }
             mZslStreamId = NO_STREAM;
@@ -173,7 +183,7 @@
                 &mZslStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for ZSL: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -181,7 +191,7 @@
                 &mZslReprocessStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     strerror(-res), res);
             return res;
         }
@@ -200,14 +210,18 @@
     Mutex::Autolock l(mInputMutex);
 
     if (mZslStreamId != NO_STREAM) {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return OK;
-        sp<CameraDeviceBase> device = client->getCameraDevice();
+        sp<CameraDeviceBase> device = mDevice.promote();
+        if (device == 0) {
+            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
+
+        clearZslQueueLocked();
 
         res = device->deleteReprocessStream(mZslReprocessStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Cannot delete ZSL reprocessing stream %d: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     mZslReprocessStreamId, strerror(-res), res);
             return res;
         }
@@ -216,7 +230,7 @@
         res = device->deleteStream(mZslStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
-                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    "%s (%d)", __FUNCTION__, mId,
                     mZslStreamId, strerror(-res), res);
             return res;
         }
@@ -234,11 +248,6 @@
     return mZslStreamId;
 }
 
-int ZslProcessor::getReprocessStreamId() const {
-    Mutex::Autolock l(mInputMutex);
-    return mZslReprocessStreamId;
-}
-
 status_t ZslProcessor::pushToReprocess(int32_t requestId) {
     ALOGV("%s: Send in reprocess request with id %d",
             __FUNCTION__, requestId);
@@ -246,7 +255,10 @@
     status_t res;
     sp<Camera2Client> client = mClient.promote();
 
-    if (client == 0) return INVALID_OPERATION;
+    if (client == 0) {
+        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
 
     IF_ALOGV() {
         dumpZslQueue(-1);
@@ -309,7 +321,7 @@
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
                 "%s (%d)",
-                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+                __FUNCTION__, mId, strerror(-res), res);
             return INVALID_OPERATION;
         }
         // TODO: have push-and-clear be atomic
@@ -328,7 +340,7 @@
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
                         "capture request: %s (%d)", __FUNCTION__,
-                        client->getCameraId(),
+                        mId,
                         strerror(-res), res);
                 return res;
             }
@@ -397,26 +409,29 @@
     }
 
     do {
-        sp<Camera2Client> client = mClient.promote();
-        if (client == 0) return false;
-        res = processNewZslBuffer(client);
+        res = processNewZslBuffer();
     } while (res == OK);
 
     return true;
 }
 
-status_t ZslProcessor::processNewZslBuffer(sp<Camera2Client> &client) {
+status_t ZslProcessor::processNewZslBuffer() {
     ATRACE_CALL();
     status_t res;
-
+    sp<BufferItemConsumer> zslConsumer;
+    {
+        Mutex::Autolock l(mInputMutex);
+        if (mZslConsumer == 0) return OK;
+        zslConsumer = mZslConsumer;
+    }
     ALOGVV("Trying to get next buffer");
     BufferItemConsumer::BufferItem item;
-    res = mZslConsumer->acquireBuffer(&item);
+    res = zslConsumer->acquireBuffer(&item);
     if (res != OK) {
         if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
             ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
                     "%s (%d)", __FUNCTION__,
-                    client->getCameraId(), strerror(-res), res);
+                    mId, strerror(-res), res);
         } else {
             ALOGVV("  No buffer");
         }
@@ -427,7 +442,7 @@
 
     if (mState == LOCKED) {
         ALOGVV("In capture, discarding new ZSL buffers");
-        mZslConsumer->releaseBuffer(item);
+        zslConsumer->releaseBuffer(item);
         return OK;
     }
 
@@ -435,7 +450,7 @@
 
     if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) {
         ALOGVV("Releasing oldest buffer");
-        mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
+        zslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
         mZslQueue.replaceAt(mZslQueueTail);
         mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth;
     }
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h
index b2cf5b1..27b597e 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.h
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.h
@@ -28,6 +28,7 @@
 #include "camera/CameraMetadata.h"
 #include "Camera2Heap.h"
 #include "../CameraDeviceBase.h"
+#include "ZslProcessorInterface.h"
 
 namespace android {
 
@@ -44,9 +45,10 @@
             virtual public Thread,
             virtual public BufferItemConsumer::FrameAvailableListener,
             virtual public FrameProcessor::FilteredListener,
-            virtual public CameraDeviceBase::BufferReleasedListener {
+            virtual public CameraDeviceBase::BufferReleasedListener,
+                    public ZslProcessorInterface {
   public:
-    ZslProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+    ZslProcessor(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
     ~ZslProcessor();
 
     // From mZslConsumer
@@ -56,10 +58,15 @@
 
     virtual void onBufferReleased(buffer_handle_t *handle);
 
+    /**
+     ****************************************
+     * ZslProcessorInterface implementation *
+     ****************************************
+     */
+
     status_t updateStream(const Parameters &params);
     status_t deleteStream();
     int getStreamId() const;
-    int getReprocessStreamId() const;
 
     status_t pushToReprocess(int32_t requestId);
     status_t clearZslQueue();
@@ -74,7 +81,9 @@
     } mState;
 
     wp<Camera2Client> mClient;
+    wp<CameraDeviceBase> mDevice;
     wp<CaptureSequencer> mSequencer;
+    int mId;
 
     mutable Mutex mInputMutex;
     bool mZslBufferAvailable;
@@ -109,7 +118,7 @@
 
     virtual bool threadLoop();
 
-    status_t processNewZslBuffer(sp<Camera2Client> &client);
+    status_t processNewZslBuffer();
 
     // Match up entries from frame list to buffers in ZSL queue
     void findMatchesLocked();
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp
new file mode 100644
index 0000000..be1ffeb
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp
@@ -0,0 +1,445 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2-ZslProcessor3"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "ZslProcessor3.h"
+#include <gui/Surface.h>
+#include "../CameraDeviceBase.h"
+#include "../Camera3Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+ZslProcessor3::ZslProcessor3(
+    sp<Camera2Client> client,
+    wp<CaptureSequencer> sequencer):
+        Thread(false),
+        mState(RUNNING),
+        mClient(client),
+        mSequencer(sequencer),
+        mId(client->getCameraId()),
+        mZslStreamId(NO_STREAM),
+        mFrameListHead(0),
+        mZslQueueHead(0),
+        mZslQueueTail(0) {
+    mZslQueue.insertAt(0, kZslBufferDepth);
+    mFrameList.insertAt(0, kFrameListDepth);
+    sp<CaptureSequencer> captureSequencer = mSequencer.promote();
+    if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
+}
+
+ZslProcessor3::~ZslProcessor3() {
+    ALOGV("%s: Exit", __FUNCTION__);
+    deleteStream();
+}
+
+void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/,
+                                     const CameraMetadata &frame) {
+    Mutex::Autolock l(mInputMutex);
+    camera_metadata_ro_entry_t entry;
+    entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+    nsecs_t timestamp = entry.data.i64[0];
+    (void)timestamp;
+    ALOGVV("Got preview metadata for timestamp %lld", timestamp);
+
+    if (mState != RUNNING) return;
+
+    mFrameList.editItemAt(mFrameListHead) = frame;
+    mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
+}
+
+status_t ZslProcessor3::updateStream(const Parameters &params) {
+    ATRACE_CALL();
+    ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
+    status_t res;
+
+    Mutex::Autolock l(mInputMutex);
+
+    sp<Camera2Client> client = mClient.promote();
+    if (client == 0) {
+        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+    sp<Camera3Device> device =
+        static_cast<Camera3Device*>(client->getCameraDevice().get());
+    if (device == 0) {
+        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+
+    if (mZslStreamId != NO_STREAM) {
+        // Check if stream parameters have to change
+        uint32_t currentWidth, currentHeight;
+        res = device->getStreamInfo(mZslStreamId,
+                &currentWidth, &currentHeight, 0);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Error querying capture output stream info: "
+                    "%s (%d)", __FUNCTION__,
+                    client->getCameraId(), strerror(-res), res);
+            return res;
+        }
+        if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
+                currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
+            ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
+                  "dimensions changed",
+                __FUNCTION__, client->getCameraId(), mZslStreamId);
+            res = device->deleteStream(mZslStreamId);
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Unable to delete old output stream "
+                        "for ZSL: %s (%d)", __FUNCTION__,
+                        client->getCameraId(), strerror(-res), res);
+                return res;
+            }
+            mZslStreamId = NO_STREAM;
+        }
+    }
+
+    if (mZslStreamId == NO_STREAM) {
+        // Create stream for HAL production
+        // TODO: Sort out better way to select resolution for ZSL
+
+        // Note that format specified internally in Camera3ZslStream
+        res = device->createZslStream(
+                params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
+                kZslBufferDepth,
+                &mZslStreamId,
+                &mZslStream);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Can't create ZSL stream: "
+                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    strerror(-res), res);
+            return res;
+        }
+    }
+    client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
+            Camera2Client::kPreviewRequestIdEnd,
+            this);
+
+    return OK;
+}
+
+status_t ZslProcessor3::deleteStream() {
+    ATRACE_CALL();
+    status_t res;
+
+    Mutex::Autolock l(mInputMutex);
+
+    if (mZslStreamId != NO_STREAM) {
+        sp<Camera2Client> client = mClient.promote();
+        if (client == 0) {
+            ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
+
+        sp<Camera3Device> device =
+            reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
+        if (device == 0) {
+            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
+
+        res = device->deleteStream(mZslStreamId);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
+                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    mZslStreamId, strerror(-res), res);
+            return res;
+        }
+
+        mZslStreamId = NO_STREAM;
+    }
+    return OK;
+}
+
+int ZslProcessor3::getStreamId() const {
+    Mutex::Autolock l(mInputMutex);
+    return mZslStreamId;
+}
+
+status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
+    ALOGV("%s: Send in reprocess request with id %d",
+            __FUNCTION__, requestId);
+    Mutex::Autolock l(mInputMutex);
+    status_t res;
+    sp<Camera2Client> client = mClient.promote();
+
+    if (client == 0) {
+        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+
+    IF_ALOGV() {
+        dumpZslQueue(-1);
+    }
+
+    size_t metadataIdx;
+    nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
+
+    if (candidateTimestamp == -1) {
+        ALOGE("%s: Could not find good candidate for ZSL reprocessing",
+              __FUNCTION__);
+        return NOT_ENOUGH_DATA;
+    }
+
+    res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp,
+                                                    /*actualTimestamp*/NULL);
+
+    if (res == mZslStream->NO_BUFFER_AVAILABLE) {
+        ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
+        return NOT_ENOUGH_DATA;
+    } else if (res != OK) {
+        ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    {
+        CameraMetadata request = mFrameList[metadataIdx];
+
+        // Verify that the frame is reasonable for reprocessing
+
+        camera_metadata_entry_t entry;
+        entry = request.find(ANDROID_CONTROL_AE_STATE);
+        if (entry.count == 0) {
+            ALOGE("%s: ZSL queue frame has no AE state field!",
+                    __FUNCTION__);
+            return BAD_VALUE;
+        }
+        if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
+                entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
+            ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
+                    __FUNCTION__, entry.data.u8[0]);
+            return NOT_ENOUGH_DATA;
+        }
+
+        uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
+        res = request.update(ANDROID_REQUEST_TYPE,
+                &requestType, 1);
+        uint8_t inputStreams[1] =
+                { static_cast<uint8_t>(mZslStreamId) };
+        if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
+                inputStreams, 1);
+        // TODO: Shouldn't we also update the latest preview frame?
+        uint8_t outputStreams[1] =
+                { static_cast<uint8_t>(client->getCaptureStreamId()) };
+        if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+                outputStreams, 1);
+        res = request.update(ANDROID_REQUEST_ID,
+                &requestId, 1);
+
+        if (res != OK ) {
+            ALOGE("%s: Unable to update frame to a reprocess request",
+                  __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+
+        res = client->stopStream();
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
+                "%s (%d)",
+                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+            return INVALID_OPERATION;
+        }
+
+        // Update JPEG settings
+        {
+            SharedParameters::Lock l(client->getParameters());
+            res = l.mParameters.updateRequestJpeg(&request);
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
+                        "capture request: %s (%d)", __FUNCTION__,
+                        client->getCameraId(),
+                        strerror(-res), res);
+                return res;
+            }
+        }
+
+        mLatestCapturedRequest = request;
+        res = client->getCameraDevice()->capture(request);
+        if (res != OK ) {
+            ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
+                  " (%d)", __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+
+        mState = LOCKED;
+    }
+
+    return OK;
+}
+
+status_t ZslProcessor3::clearZslQueue() {
+    Mutex::Autolock l(mInputMutex);
+    // If in middle of capture, can't clear out queue
+    if (mState == LOCKED) return OK;
+
+    return clearZslQueueLocked();
+}
+
+status_t ZslProcessor3::clearZslQueueLocked() {
+    if (mZslStream != 0) {
+        return mZslStream->clearInputRingBuffer();
+    }
+    return OK;
+}
+
+void ZslProcessor3::dump(int fd, const Vector<String16>& /*args*/) const {
+    Mutex::Autolock l(mInputMutex);
+    if (!mLatestCapturedRequest.isEmpty()) {
+        String8 result("    Latest ZSL capture request:\n");
+        write(fd, result.string(), result.size());
+        mLatestCapturedRequest.dump(fd, 2, 6);
+    } else {
+        String8 result("    Latest ZSL capture request: none yet\n");
+        write(fd, result.string(), result.size());
+    }
+    dumpZslQueue(fd);
+}
+
+bool ZslProcessor3::threadLoop() {
+    // TODO: remove dependency on thread
+    return true;
+}
+
+void ZslProcessor3::dumpZslQueue(int fd) const {
+    String8 header("ZSL queue contents:");
+    String8 indent("    ");
+    ALOGV("%s", header.string());
+    if (fd != -1) {
+        header = indent + header + "\n";
+        write(fd, header.string(), header.size());
+    }
+    for (size_t i = 0; i < mZslQueue.size(); i++) {
+        const ZslPair &queueEntry = mZslQueue[i];
+        nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
+        camera_metadata_ro_entry_t entry;
+        nsecs_t frameTimestamp = 0;
+        int frameAeState = -1;
+        if (!queueEntry.frame.isEmpty()) {
+            entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
+            if (entry.count > 0) frameTimestamp = entry.data.i64[0];
+            entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
+            if (entry.count > 0) frameAeState = entry.data.u8[0];
+        }
+        String8 result =
+                String8::format("   %d: b: %lld\tf: %lld, AE state: %d", i,
+                        bufferTimestamp, frameTimestamp, frameAeState);
+        ALOGV("%s", result.string());
+        if (fd != -1) {
+            result = indent + result + "\n";
+            write(fd, result.string(), result.size());
+        }
+
+    }
+}
+
+nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const {
+    /**
+     * Find the smallest timestamp we know about so far
+     * - ensure that aeState is either converged or locked
+     */
+
+    size_t idx = 0;
+    nsecs_t minTimestamp = -1;
+    for (size_t j = 0; j < mFrameList.size(); j++) {
+        const CameraMetadata &frame = mFrameList[j];
+        if (!frame.isEmpty()) {
+            camera_metadata_ro_entry_t entry;
+            entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+            if (entry.count == 0) {
+                ALOGE("%s: Can't find timestamp in frame!",
+                        __FUNCTION__);
+                continue;
+            }
+            nsecs_t frameTimestamp = entry.data.i64[0];
+            if (minTimestamp > frameTimestamp) {
+
+                entry = frame.find(ANDROID_CONTROL_AE_STATE);
+                if (entry.count == 0) {
+                    ALOGW("%s: ZSL queue frame has no AE state field!",
+                            __FUNCTION__);
+                    continue;
+                }
+                if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
+                        entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
+                    ALOGVV("%s: ZSL queue frame AE state is %d, need "
+                           "full capture",  __FUNCTION__, entry.data.u8[0]);
+                    continue;
+                }
+
+                minTimestamp = frameTimestamp;
+                idx = j;
+            }
+        }
+    }
+
+    if (metadataIdx) {
+        *metadataIdx = idx;
+    }
+
+    return minTimestamp;
+}
+
+void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
+    // Intentionally left empty
+    // Although theoretically we could use this to get better dump info
+}
+
+void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) {
+    Mutex::Autolock l(mInputMutex);
+
+    // ignore output buffers
+    if (bufferInfo.mOutput) {
+        return;
+    }
+
+    // TODO: Verify that the buffer is in our queue by looking at timestamp
+    // theoretically unnecessary unless we change the following assumptions:
+    // -- only 1 buffer reprocessed at a time (which is the case now)
+
+    // Erase entire ZSL queue since we've now completed the capture and preview
+    // is stopped.
+    //
+    // We need to guarantee that if we do two back-to-back captures,
+    // the second won't use a buffer that's older/the same as the first, which
+    // is theoretically possible if we don't clear out the queue and the
+    // selection criteria is something like 'newest'. Clearing out the queue
+    // on a completed capture ensures we'll only use new data.
+    ALOGV("%s: Memory optimization, clearing ZSL queue",
+          __FUNCTION__);
+    clearZslQueueLocked();
+
+    // Required so we accept more ZSL requests
+    mState = RUNNING;
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.h b/services/camera/libcameraservice/camera2/ZslProcessor3.h
new file mode 100644
index 0000000..cb98b99
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor3.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/BufferItemConsumer.h>
+#include "Parameters.h"
+#include "FrameProcessor.h"
+#include "camera/CameraMetadata.h"
+#include "Camera2Heap.h"
+#include "../CameraDeviceBase.h"
+#include "ZslProcessorInterface.h"
+#include "../camera3/Camera3ZslStream.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class CaptureSequencer;
+
+/***
+ * ZSL queue processing
+ */
+class ZslProcessor3 :
+                    public ZslProcessorInterface,
+                    public camera3::Camera3StreamBufferListener,
+            virtual public Thread,
+            virtual public FrameProcessor::FilteredListener {
+  public:
+    ZslProcessor3(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+    ~ZslProcessor3();
+
+    // From FrameProcessor
+    virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+
+    /**
+     ****************************************
+     * ZslProcessorInterface implementation *
+     ****************************************
+     */
+
+    virtual status_t updateStream(const Parameters &params);
+    virtual status_t deleteStream();
+    virtual int getStreamId() const;
+
+    virtual status_t pushToReprocess(int32_t requestId);
+    virtual status_t clearZslQueue();
+
+    void dump(int fd, const Vector<String16>& args) const;
+
+  protected:
+    /**
+     **********************************************
+     * Camera3StreamBufferListener implementation *
+     **********************************************
+     */
+    typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo;
+    // Buffer was acquired by the HAL
+    virtual void onBufferAcquired(const BufferInfo& bufferInfo);
+    // Buffer was released by the HAL
+    virtual void onBufferReleased(const BufferInfo& bufferInfo);
+
+  private:
+    static const nsecs_t kWaitDuration = 10000000; // 10 ms
+
+    enum {
+        RUNNING,
+        LOCKED
+    } mState;
+
+    wp<Camera2Client> mClient;
+    wp<CaptureSequencer> mSequencer;
+
+    const int mId;
+
+    mutable Mutex mInputMutex;
+
+    enum {
+        NO_STREAM = -1
+    };
+
+    int mZslStreamId;
+    sp<camera3::Camera3ZslStream> mZslStream;
+
+    struct ZslPair {
+        BufferItemConsumer::BufferItem buffer;
+        CameraMetadata frame;
+    };
+
+    static const size_t kZslBufferDepth = 4;
+    static const size_t kFrameListDepth = kZslBufferDepth * 2;
+    Vector<CameraMetadata> mFrameList;
+    size_t mFrameListHead;
+
+    ZslPair mNextPair;
+
+    Vector<ZslPair> mZslQueue;
+    size_t mZslQueueHead;
+    size_t mZslQueueTail;
+
+    CameraMetadata mLatestCapturedRequest;
+
+    virtual bool threadLoop();
+
+    status_t clearZslQueueLocked();
+
+    void dumpZslQueue(int id) const;
+
+    nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const;
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h
new file mode 100644
index 0000000..183c0c2
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+namespace camera2 {
+
+class Parameters;
+
+class ZslProcessorInterface : virtual public RefBase {
+public:
+
+    // Get ID for use with android.request.outputStreams / inputStreams
+    virtual int getStreamId() const = 0;
+
+    // Update the streams by recreating them if the size/format has changed
+    virtual status_t updateStream(const Parameters& params) = 0;
+
+    // Delete the underlying CameraDevice streams
+    virtual status_t deleteStream() = 0;
+
+    /**
+     * Submits a ZSL capture request (id = requestId)
+     *
+     * An appropriate ZSL buffer is selected by the closest timestamp,
+     * then we push that buffer to be reprocessed by the HAL.
+     * A capture request is created and submitted on behalf of the client.
+     */
+    virtual status_t pushToReprocess(int32_t requestId) = 0;
+
+    // Flush the ZSL buffer queue, freeing up all the buffers
+    virtual status_t clearZslQueue() = 0;
+
+    // (Debugging only) Dump the current state to the specified file descriptor
+    virtual void dump(int fd, const Vector<String16>& args) const = 0;
+};
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
index 8a48ee5..c7dd12a 100644
--- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
@@ -18,6 +18,9 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+// This is needed for stdint.h to define INT64_MAX in C++
+#define __STDC_LIMIT_MACROS
+
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include "Camera3InputStream.h"
@@ -28,38 +31,262 @@
 
 Camera3InputStream::Camera3InputStream(int id,
         uint32_t width, uint32_t height, int format) :
-        Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format) {
+        Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format),
+        mTotalBufferCount(0),
+        mDequeuedBufferCount(0),
+        mFrameCount(0),
+        mLastTimestamp(0) {
+    mCombinedFence = new Fence();
+
+    if (format == HAL_PIXEL_FORMAT_BLOB) {
+        ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
+        mState = STATE_ERROR;
+    }
 }
 
-status_t Camera3InputStream::getBufferLocked(camera3_stream_buffer *buffer) {
-    (void) buffer;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+Camera3InputStream::~Camera3InputStream() {
+    disconnectLocked();
 }
 
-status_t Camera3InputStream::returnBufferLocked(
-        const camera3_stream_buffer &buffer,
-        nsecs_t timestamp) {
-    (void) timestamp;
-    (void) buffer;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+status_t Camera3InputStream::getInputBufferLocked(
+        camera3_stream_buffer *buffer) {
+    ATRACE_CALL();
+    status_t res;
+
+    // FIXME: will not work in (re-)registration
+    if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
+        ALOGE("%s: Stream %d: Buffer registration for input streams"
+              " not implemented (state %d)",
+              __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+
+    // Allow acquire during IN_[RE]CONFIG for registration
+    if (mState != STATE_CONFIGURED &&
+            mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) {
+        ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d",
+                __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+
+    // Only limit acquire amount when fully configured
+    if (mState == STATE_CONFIGURED &&
+            mDequeuedBufferCount == camera3_stream::max_buffers) {
+        ALOGE("%s: Stream %d: Already acquired maximum number of simultaneous"
+                " buffers (%d)", __FUNCTION__, mId,
+                camera3_stream::max_buffers);
+        return INVALID_OPERATION;
+    }
+
+    ANativeWindowBuffer* anb;
+    int fenceFd;
+
+    assert(mConsumer != 0);
+
+    BufferItem bufferItem;
+    res = mConsumer->acquireBuffer(&bufferItem, /*waitForFence*/false);
+
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Can't acquire next output buffer: %s (%d)",
+                __FUNCTION__, mId, strerror(-res), res);
+        return res;
+    }
+
+    anb = bufferItem.mGraphicBuffer->getNativeBuffer();
+    assert(anb != NULL);
+    fenceFd = bufferItem.mFence->dup();
+    /**
+     * FenceFD now owned by HAL except in case of error,
+     * in which case we reassign it to acquire_fence
+     */
+
+    // Handing out a raw pointer to this object. Increment internal refcount.
+    incStrong(this);
+    buffer->stream = this;
+    buffer->buffer = &(anb->handle);
+    buffer->acquire_fence = fenceFd;
+    buffer->release_fence = -1;
+    buffer->status = CAMERA3_BUFFER_STATUS_OK;
+
+    mDequeuedBufferCount++;
+
+    mBuffersInFlight.push_back(bufferItem);
+
+    return OK;
+}
+
+status_t Camera3InputStream::returnInputBufferLocked(
+        const camera3_stream_buffer &buffer) {
+    ATRACE_CALL();
+    status_t res;
+
+    // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be
+    // decrementing the internal refcount next. In case this is the last ref, we
+    // might get destructed on the decStrong(), so keep an sp around until the
+    // end of the call - otherwise have to sprinkle the decStrong on all exit
+    // points.
+    sp<Camera3InputStream> keepAlive(this);
+    decStrong(this);
+
+    // Allow buffers to be returned in the error state, to allow for disconnect
+    // and in the in-config states for registration
+    if (mState == STATE_CONSTRUCTED) {
+        ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d",
+                __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+    if (mDequeuedBufferCount == 0) {
+        ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
+                mId);
+        return INVALID_OPERATION;
+    }
+
+    bool bufferFound = false;
+    BufferItem bufferItem;
+    {
+        // Find the buffer we are returning
+        Vector<BufferItem>::iterator it, end;
+        for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end();
+             it != end;
+             ++it) {
+
+            const BufferItem& tmp = *it;
+            ANativeWindowBuffer *anb = tmp.mGraphicBuffer->getNativeBuffer();
+            if (anb != NULL && &(anb->handle) == buffer.buffer) {
+                bufferFound = true;
+                bufferItem = tmp;
+                mBuffersInFlight.erase(it);
+                mDequeuedBufferCount--;
+            }
+        }
+    }
+    if (!bufferFound) {
+        ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL",
+              __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+
+    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+        if (buffer.release_fence != -1) {
+            ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when "
+                  "there is an error", __FUNCTION__, mId, buffer.release_fence);
+            close(buffer.release_fence);
+        }
+
+        /**
+         * Reassign release fence as the acquire fence incase of error
+         */
+        const_cast<camera3_stream_buffer*>(&buffer)->release_fence =
+                buffer.acquire_fence;
+    }
+
+    /**
+     * Unconditionally return buffer to the buffer queue.
+     * - Fwk takes over the release_fence ownership
+     */
+    sp<Fence> releaseFence = new Fence(buffer.release_fence);
+    res = mConsumer->releaseBuffer(bufferItem, releaseFence);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:"
+                " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+        return res;
+    }
+
+    mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+
+    mBufferReturnedSignal.signal();
+
+    return OK;
+
 }
 
 bool Camera3InputStream::hasOutstandingBuffersLocked() const {
-    ALOGE("%s: Not implemented", __FUNCTION__);
+    nsecs_t signalTime = mCombinedFence->getSignalTime();
+    ALOGV("%s: Stream %d: Has %d outstanding buffers,"
+            " buffer signal time is %lld",
+            __FUNCTION__, mId, mDequeuedBufferCount, signalTime);
+    if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) {
+        return true;
+    }
     return false;
 }
 
 status_t Camera3InputStream::waitUntilIdle(nsecs_t timeout) {
-    (void) timeout;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+    status_t res;
+    {
+        Mutex::Autolock l(mLock);
+        while (mDequeuedBufferCount > 0) {
+            if (timeout != TIMEOUT_NEVER) {
+                nsecs_t startTime = systemTime();
+                res = mBufferReturnedSignal.waitRelative(mLock, timeout);
+                if (res == TIMED_OUT) {
+                    return res;
+                } else if (res != OK) {
+                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                    return res;
+                }
+                nsecs_t deltaTime = systemTime() - startTime;
+                if (timeout <= deltaTime) {
+                    timeout = 0;
+                } else {
+                    timeout -= deltaTime;
+                }
+            } else {
+                res = mBufferReturnedSignal.wait(mLock);
+                if (res != OK) {
+                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                    return res;
+                }
+            }
+        }
+    }
+
+    // No lock
+
+    unsigned int timeoutMs;
+    if (timeout == TIMEOUT_NEVER) {
+        timeoutMs = Fence::TIMEOUT_NEVER;
+    } else if (timeout == 0) {
+        timeoutMs = 0;
+    } else {
+        // Round up to wait at least 1 ms
+        timeoutMs = (timeout + 999999) / 1000000;
+    }
+
+    return mCombinedFence->wait(timeoutMs);
+}
+
+size_t Camera3InputStream::getBufferCountLocked() {
+    return mTotalBufferCount;
 }
 
 status_t Camera3InputStream::disconnectLocked() {
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+    switch (mState) {
+        case STATE_IN_RECONFIG:
+        case STATE_CONFIGURED:
+            // OK
+            break;
+        default:
+            // No connection, nothing to do
+            return OK;
+    }
+
+    if (mDequeuedBufferCount > 0) {
+        ALOGE("%s: Can't disconnect with %d buffers still acquired!",
+                __FUNCTION__, mDequeuedBufferCount);
+        return INVALID_OPERATION;
+    }
+
+    assert(mBuffersInFlight.size() == 0);
+
+    /**
+     *  no-op since we can't disconnect the producer from the consumer-side
+     */
+
+    mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED;
+    return OK;
 }
 
 sp<IGraphicBufferProducer> Camera3InputStream::getProducerInterface() const {
@@ -67,9 +294,71 @@
 }
 
 void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
-    (void) fd;
     (void) args;
-    ALOGE("%s: Not implemented", __FUNCTION__);
+    String8 lines;
+    lines.appendFormat("    Stream[%d]: Input\n", mId);
+    lines.appendFormat("      State: %d\n", mState);
+    lines.appendFormat("      Dims: %d x %d, format 0x%x\n",
+            camera3_stream::width, camera3_stream::height,
+            camera3_stream::format);
+    lines.appendFormat("      Max size: %d\n", mMaxSize);
+    lines.appendFormat("      Usage: %d, max HAL buffers: %d\n",
+            camera3_stream::usage, camera3_stream::max_buffers);
+    lines.appendFormat("      Frames produced: %d, last timestamp: %lld ns\n",
+            mFrameCount, mLastTimestamp);
+    lines.appendFormat("      Total buffers: %d, currently acquired: %d\n",
+            mTotalBufferCount, mDequeuedBufferCount);
+    write(fd, lines.string(), lines.size());
+}
+
+status_t Camera3InputStream::configureQueueLocked() {
+    status_t res;
+
+    switch (mState) {
+        case STATE_IN_RECONFIG:
+            res = disconnectLocked();
+            if (res != OK) {
+                return res;
+            }
+            break;
+        case STATE_IN_CONFIG:
+            // OK
+            break;
+        default:
+            ALOGE("%s: Bad state: %d", __FUNCTION__, mState);
+            return INVALID_OPERATION;
+    }
+
+    assert(mMaxSize == 0);
+    assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB);
+
+    mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS +
+                        camera3_stream::max_buffers;
+    mDequeuedBufferCount = 0;
+    mFrameCount = 0;
+
+    if (mConsumer.get() == 0) {
+        mConsumer = new BufferItemConsumer(camera3_stream::usage,
+                                           mTotalBufferCount,
+                                           /*synchronousMode*/true);
+        mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
+    }
+
+    res = mConsumer->setDefaultBufferSize(camera3_stream::width,
+                                          camera3_stream::height);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Could not set buffer dimensions %dx%d",
+              __FUNCTION__, mId, camera3_stream::width, camera3_stream::height);
+        return res;
+    }
+    res = mConsumer->setDefaultBufferFormat(camera3_stream::format);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Could not set buffer format %d",
+              __FUNCTION__, mId, camera3_stream::format);
+        return res;
+    }
+
+    return OK;
 }
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/camera3/Camera3InputStream.h
index c4b5dd9..fd9f464 100644
--- a/services/camera/libcameraservice/camera3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/camera3/Camera3InputStream.h
@@ -29,6 +29,10 @@
 
 /**
  * A class for managing a single stream of input data to the camera device.
+ *
+ * This class serves as a consumer adapter for the HAL, and will consume the
+ * buffers by feeding them into the HAL, as well as releasing the buffers back
+ * the buffers once the HAL is done with them.
  */
 class Camera3InputStream : public Camera3Stream {
   public:
@@ -36,6 +40,7 @@
      * Set up a stream for formats that have fixed size, such as RAW and YUV.
      */
     Camera3InputStream(int id, uint32_t width, uint32_t height, int format);
+    ~Camera3InputStream();
 
     virtual status_t waitUntilIdle(nsecs_t timeout);
     virtual void     dump(int fd, const Vector<String16> &args) const;
@@ -49,18 +54,32 @@
 
   private:
 
+    typedef BufferItemConsumer::BufferItem BufferItem;
+
     sp<BufferItemConsumer> mConsumer;
+    Vector<BufferItem> mBuffersInFlight;
+    size_t            mTotalBufferCount;
+    size_t            mDequeuedBufferCount;
+    Condition         mBufferReturnedSignal;
+    uint32_t          mFrameCount;
+    nsecs_t           mLastTimestamp;
+
+    // The merged release fence for all returned buffers
+    sp<Fence>         mCombinedFence;
 
     /**
      * Camera3Stream interface
      */
 
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
-    virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
-            nsecs_t timestamp);
+    virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+    virtual status_t returnInputBufferLocked(
+            const camera3_stream_buffer &buffer);
     virtual bool     hasOutstandingBuffersLocked() const;
     virtual status_t disconnectLocked();
 
+    virtual status_t configureQueueLocked();
+    virtual size_t   getBufferCountLocked();
+
 }; // class Camera3InputStream
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
index 276b940..ec8cf0d 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
@@ -298,7 +298,7 @@
 
     switch (mState) {
         case STATE_IN_RECONFIG:
-            res = disconnect();
+            res = disconnectLocked();
             if (res != OK) {
                 return res;
             }
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/camera3/Camera3OutputStream.h
index d331a94..2464dce 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.h
@@ -21,6 +21,7 @@
 #include <gui/Surface.h>
 
 #include "Camera3Stream.h"
+#include "Camera3OutputStreamInterface.h"
 
 namespace android {
 
@@ -29,7 +30,9 @@
 /**
  * A class for managing a single stream of output data from the camera device.
  */
-class Camera3OutputStream : public Camera3Stream {
+class Camera3OutputStream :
+        public Camera3Stream,
+        public Camera3OutputStreamInterface {
   public:
     /**
      * Set up a stream for formats that have 2 dimensions, such as RAW and YUV.
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h
new file mode 100644
index 0000000..aae72cf
--- /dev/null
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H
+
+#include "Camera3StreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * An interface for managing a single stream of output data from the camera
+ * device.
+ */
+class Camera3OutputStreamInterface : public virtual Camera3StreamInterface {
+  public:
+    /**
+     * Set the transform on the output stream; one of the
+     * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
+     */
+    virtual status_t setTransform(int transform) = 0;
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp
index cf3072b..f137227 100644
--- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3Stream.cpp
@@ -178,14 +178,75 @@
 status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
-    return getBufferLocked(buffer);
+
+    status_t res = getBufferLocked(buffer);
+    if (res == OK) {
+        fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
+    }
+
+    return res;
 }
 
 status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
         nsecs_t timestamp) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
-    return returnBufferLocked(buffer, timestamp);
+
+    status_t res = returnBufferLocked(buffer, timestamp);
+    if (res == OK) {
+        fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true);
+    }
+
+    return res;
+}
+
+status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+
+    status_t res = getInputBufferLocked(buffer);
+    if (res == OK) {
+        fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
+    }
+
+    return res;
+}
+
+status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+
+    status_t res = returnInputBufferLocked(buffer);
+    if (res == OK) {
+        fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false);
+    }
+    return res;
+}
+
+void Camera3Stream::fireBufferListenersLocked(
+        const camera3_stream_buffer& /*buffer*/, bool acquired, bool output) {
+    List<wp<Camera3StreamBufferListener> >::iterator it, end;
+
+    // TODO: finish implementing
+
+    Camera3StreamBufferListener::BufferInfo info =
+        Camera3StreamBufferListener::BufferInfo();
+    info.mOutput = output;
+    // TODO: rest of fields
+
+    for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();
+         it != end;
+         ++it) {
+
+        sp<Camera3StreamBufferListener> listener = it->promote();
+        if (listener != 0) {
+            if (acquired) {
+                listener->onBufferAcquired(info);
+            } else {
+                listener->onBufferReleased(info);
+            }
+        }
+    }
 }
 
 bool Camera3Stream::hasOutstandingBuffers() const {
@@ -259,6 +320,55 @@
     return res;
 }
 
+status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *) {
+    ALOGE("%s: This type of stream does not support output", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+status_t Camera3Stream::returnBufferLocked(const camera3_stream_buffer &,
+                                           nsecs_t) {
+    ALOGE("%s: This type of stream does not support output", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+status_t Camera3Stream::getInputBufferLocked(camera3_stream_buffer *) {
+    ALOGE("%s: This type of stream does not support input", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+status_t Camera3Stream::returnInputBufferLocked(
+        const camera3_stream_buffer &) {
+    ALOGE("%s: This type of stream does not support input", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+
+void Camera3Stream::addBufferListener(
+        wp<Camera3StreamBufferListener> listener) {
+    Mutex::Autolock l(mLock);
+    mBufferListenerList.push_back(listener);
+}
+
+void Camera3Stream::removeBufferListener(
+        const sp<Camera3StreamBufferListener>& listener) {
+    Mutex::Autolock l(mLock);
+
+    bool erased = true;
+    List<wp<Camera3StreamBufferListener> >::iterator it, end;
+    for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();
+         it != end;
+         ) {
+
+        if (*it == listener) {
+            it = mBufferListenerList.erase(it);
+            erased = true;
+        } else {
+            ++it;
+        }
+    }
+
+    if (!erased) {
+        ALOGW("%s: Could not find listener to remove, already removed",
+              __FUNCTION__);
+    }
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/camera3/Camera3Stream.h
index 2364cfd..d992cfe 100644
--- a/services/camera/libcameraservice/camera3/Camera3Stream.h
+++ b/services/camera/libcameraservice/camera3/Camera3Stream.h
@@ -21,9 +21,13 @@
 #include <utils/RefBase.h>
 #include <utils/String8.h>
 #include <utils/String16.h>
+#include <utils/List.h>
 
 #include "hardware/camera3.h"
 
+#include "Camera3StreamBufferListener.h"
+#include "Camera3StreamInterface.h"
+
 namespace android {
 
 namespace camera3 {
@@ -81,7 +85,8 @@
  */
 class Camera3Stream :
         protected camera3_stream,
-        public LightRefBase<Camera3Stream> {
+        public virtual Camera3StreamInterface,
+        public virtual RefBase {
   public:
 
     virtual ~Camera3Stream();
@@ -157,6 +162,25 @@
             nsecs_t timestamp);
 
     /**
+     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * stream, to hand over to the HAL.
+     *
+     * This method may only be called once finishConfiguration has been called.
+     * For bidirectional streams, this method applies to the input-side
+     * buffers.
+     *
+     */
+    status_t         getInputBuffer(camera3_stream_buffer *buffer);
+
+    /**
+     * Return a buffer to the stream after use by the HAL.
+     *
+     * This method may only be called for buffers provided by getBuffer().
+     * For bidirectional streams, this method applies to the input-side buffers
+     */
+    status_t         returnInputBuffer(const camera3_stream_buffer &buffer);
+
+    /**
      * Whether any of the stream's buffers are currently in use by the HAL,
      * including buffers that have been returned but not yet had their
      * release fence signaled.
@@ -186,6 +210,11 @@
      */
     virtual void     dump(int fd, const Vector<String16> &args) const = 0;
 
+    void             addBufferListener(
+            wp<Camera3StreamBufferListener> listener);
+    void             removeBufferListener(
+            const sp<Camera3StreamBufferListener>& listener);
+
   protected:
     const int mId;
     const String8 mName;
@@ -215,9 +244,12 @@
     // cast to camera3_stream*, implementations must increment the
     // refcount of the stream manually in getBufferLocked, and decrement it in
     // returnBufferLocked.
-    virtual status_t getBufferLocked(camera3_stream_buffer *buffer) = 0;
+    virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
     virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
-            nsecs_t timestamp) = 0;
+            nsecs_t timestamp);
+    virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+    virtual status_t returnInputBufferLocked(
+            const camera3_stream_buffer &buffer);
     virtual bool     hasOutstandingBuffersLocked() const = 0;
     virtual status_t disconnectLocked() = 0;
 
@@ -239,6 +271,10 @@
     // Gets all buffers from endpoint and registers them with the HAL.
     status_t registerBuffersLocked(camera3_device *hal3Device);
 
+    void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
+                                  bool acquired, bool output);
+    List<wp<Camera3StreamBufferListener> > mBufferListenerList;
+
 }; // class Camera3Stream
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h
new file mode 100644
index 0000000..62ea6c0
--- /dev/null
+++ b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
+#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
+
+#include <gui/Surface.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+namespace camera3 {
+
+class Camera3StreamBufferListener : public virtual RefBase {
+public:
+
+    struct BufferInfo {
+        bool mOutput; // if false then input buffer
+        Rect mCrop;
+        uint32_t mTransform;
+        uint32_t mScalingMode;
+        int64_t mTimestamp;
+        uint64_t mFrameNumber;
+    };
+
+    // Buffer was acquired by the HAL
+    virtual void onBufferAcquired(const BufferInfo& bufferInfo) = 0;
+    // Buffer was released by the HAL
+    virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h
new file mode 100644
index 0000000..4768536
--- /dev/null
+++ b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
+
+#include <utils/RefBase.h>
+#include "Camera3StreamBufferListener.h"
+
+struct camera3_stream_buffer;
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * An interface for managing a single stream of input and/or output data from
+ * the camera device.
+ */
+class Camera3StreamInterface : public virtual RefBase {
+  public:
+    /**
+     * Get the stream's ID
+     */
+    virtual int      getId() const = 0;
+
+    /**
+     * Get the stream's dimensions and format
+     */
+    virtual uint32_t getWidth() const = 0;
+    virtual uint32_t getHeight() const = 0;
+    virtual int      getFormat() const = 0;
+
+    /**
+     * Start the stream configuration process. Returns a handle to the stream's
+     * information to be passed into the HAL device's configure_streams call.
+     *
+     * Until finishConfiguration() is called, no other methods on the stream may
+     * be called. The usage and max_buffers fields of camera3_stream may be
+     * modified between start/finishConfiguration, but may not be changed after
+     * that. The priv field of camera3_stream may be modified at any time after
+     * startConfiguration.
+     *
+     * Returns NULL in case of error starting configuration.
+     */
+    virtual camera3_stream* startConfiguration() = 0;
+
+    /**
+     * Check if the stream is mid-configuration (start has been called, but not
+     * finish).  Used for lazy completion of configuration.
+     */
+    virtual bool    isConfiguring() const = 0;
+
+    /**
+     * Completes the stream configuration process. During this call, the stream
+     * may call the device's register_stream_buffers() method. The stream
+     * information structure returned by startConfiguration() may no longer be
+     * modified after this call, but can still be read until the destruction of
+     * the stream.
+     *
+     * Returns:
+     *   OK on a successful configuration
+     *   NO_INIT in case of a serious error from the HAL device
+     *   NO_MEMORY in case of an error registering buffers
+     *   INVALID_OPERATION in case connecting to the consumer failed
+     */
+    virtual status_t finishConfiguration(camera3_device *hal3Device) = 0;
+
+    /**
+     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * stream, to hand over to the HAL.
+     *
+     * This method may only be called once finishConfiguration has been called.
+     * For bidirectional streams, this method applies to the output-side
+     * buffers.
+     *
+     */
+    virtual status_t getBuffer(camera3_stream_buffer *buffer) = 0;
+
+    /**
+     * Return a buffer to the stream after use by the HAL.
+     *
+     * This method may only be called for buffers provided by getBuffer().
+     * For bidirectional streams, this method applies to the output-side buffers
+     */
+    virtual status_t returnBuffer(const camera3_stream_buffer &buffer,
+            nsecs_t timestamp) = 0;
+
+    /**
+     * Fill in the camera3_stream_buffer with the next valid buffer for this
+     * stream, to hand over to the HAL.
+     *
+     * This method may only be called once finishConfiguration has been called.
+     * For bidirectional streams, this method applies to the input-side
+     * buffers.
+     *
+     */
+    virtual status_t getInputBuffer(camera3_stream_buffer *buffer) = 0;
+
+    /**
+     * Return a buffer to the stream after use by the HAL.
+     *
+     * This method may only be called for buffers provided by getBuffer().
+     * For bidirectional streams, this method applies to the input-side buffers
+     */
+    virtual status_t returnInputBuffer(const camera3_stream_buffer &buffer) = 0;
+
+    /**
+     * Whether any of the stream's buffers are currently in use by the HAL,
+     * including buffers that have been returned but not yet had their
+     * release fence signaled.
+     */
+    virtual bool     hasOutstandingBuffers() const = 0;
+
+    enum {
+        TIMEOUT_NEVER = -1
+    };
+    /**
+     * Wait until the HAL is done with all of this stream's buffers, including
+     * signalling all release fences. Returns TIMED_OUT if the timeout is
+     * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate
+     * an indefinite wait.
+     */
+    virtual status_t waitUntilIdle(nsecs_t timeout) = 0;
+
+    /**
+     * Disconnect stream from its non-HAL endpoint. After this,
+     * start/finishConfiguration must be called before the stream can be used
+     * again. This cannot be called if the stream has outstanding dequeued
+     * buffers.
+     */
+    virtual status_t disconnect() = 0;
+
+    /**
+     * Debug dump of the stream's state.
+     */
+    virtual void     dump(int fd, const Vector<String16> &args) const = 0;
+
+    virtual void     addBufferListener(
+            wp<Camera3StreamBufferListener> listener) = 0;
+    virtual void     removeBufferListener(
+            const sp<Camera3StreamBufferListener>& listener) = 0;
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
index e8a5ca6..0345d5b 100644
--- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
@@ -18,70 +18,652 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+// This is needed for stdint.h to define INT64_MAX in C++
+#define __STDC_LIMIT_MACROS
+
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include "Camera3ZslStream.h"
 
+#ifndef container_of
+#define container_of(ptr, type, member) \
+    (type *)((char*)(ptr) - offsetof(type, member))
+#endif
+
+typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
+
 namespace android {
 
 namespace camera3 {
 
+namespace {
+struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
+    typedef RingBufferConsumer::BufferInfo BufferInfo;
+
+    enum {
+        SELECT_I1 = -1,
+        SELECT_I2 = 1,
+        SELECT_NEITHER = 0,
+    };
+
+    TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {}
+    ~TimestampFinder() {}
+
+    template <typename T>
+    static void swap(T& a, T& b) {
+        T tmp = a;
+        a = b;
+        b = tmp;
+    }
+
+    /**
+     * Try to find the best candidate for a ZSL buffer.
+     * Match priority from best to worst:
+     *  1) Timestamps match.
+     *  2) Timestamp is closest to the needle (and lower).
+     *  3) Timestamp is closest to the needle (and higher).
+     *
+     */
+    virtual int compare(const BufferInfo *i1,
+                        const BufferInfo *i2) const {
+        // Try to select non-null object first.
+        if (i1 == NULL) {
+            return SELECT_I2;
+        } else if (i2 == NULL) {
+            return SELECT_I1;
+        }
+
+        // Best result: timestamp is identical
+        if (i1->mTimestamp == mTimestamp) {
+            return SELECT_I1;
+        } else if (i2->mTimestamp == mTimestamp) {
+            return SELECT_I2;
+        }
+
+        const BufferInfo* infoPtrs[2] = {
+            i1,
+            i2
+        };
+        int infoSelectors[2] = {
+            SELECT_I1,
+            SELECT_I2
+        };
+
+        // Order i1,i2 so that always i1.timestamp < i2.timestamp
+        if (i1->mTimestamp > i2->mTimestamp) {
+            swap(infoPtrs[0], infoPtrs[1]);
+            swap(infoSelectors[0], infoSelectors[1]);
+        }
+
+        // Second best: closest (lower) timestamp
+        if (infoPtrs[1]->mTimestamp < mTimestamp) {
+            return infoSelectors[1];
+        } else if (infoPtrs[0]->mTimestamp < mTimestamp) {
+            return infoSelectors[0];
+        }
+
+        // Worst: closest (higher) timestamp
+        return infoSelectors[0];
+
+        /**
+         * The above cases should cover all the possibilities,
+         * and we get an 'empty' result only if the ring buffer
+         * was empty itself
+         */
+    }
+
+    const nsecs_t mTimestamp;
+}; // struct TimestampFinder
+} // namespace anonymous
+
 Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height,
         int depth) :
         Camera3Stream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, 0,
                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
-        mDepth(depth) {
+        mDepth(depth),
+        mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL,
+                                         depth)),
+        mConsumer(new Surface(mProducer->getProducerInterface())),
+        //mTransform(0),
+        mTotalBufferCount(0),
+        mDequeuedBufferCount(0),
+        mFrameCount(0),
+        mLastTimestamp(0),
+        mCombinedFence(new Fence()) {
+}
+
+Camera3ZslStream::~Camera3ZslStream() {
+    disconnectLocked();
 }
 
 status_t Camera3ZslStream::getBufferLocked(camera3_stream_buffer *buffer) {
-    (void) buffer;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+    // same as output stream code
+    ATRACE_CALL();
+    status_t res;
+
+    // Allow dequeue during IN_[RE]CONFIG for registration
+    if (mState != STATE_CONFIGURED &&
+            mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) {
+        ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d",
+                __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+
+    // Only limit dequeue amount when fully configured
+    if (mState == STATE_CONFIGURED &&
+            mDequeuedBufferCount == camera3_stream::max_buffers) {
+        ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous"
+                " buffers (%d)", __FUNCTION__, mId,
+                camera3_stream::max_buffers);
+        return INVALID_OPERATION;
+    }
+
+    ANativeWindowBuffer* anb;
+    int fenceFd;
+
+    res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd);
+    if (res != OK) {
+        ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
+                __FUNCTION__, mId, strerror(-res), res);
+        return res;
+    }
+
+    // Handing out a raw pointer to this object. Increment internal refcount.
+    incStrong(this);
+    buffer->stream = this;
+    buffer->buffer = &(anb->handle);
+    buffer->acquire_fence = fenceFd;
+    buffer->release_fence = -1;
+    buffer->status = CAMERA3_BUFFER_STATUS_OK;
+
+    mDequeuedBufferCount++;
+
+    return OK;
 }
 
 status_t Camera3ZslStream::returnBufferLocked(
         const camera3_stream_buffer &buffer,
         nsecs_t timestamp) {
-    (void) buffer;
-    (void) timestamp;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+    // same as output stream code
+    ATRACE_CALL();
+    status_t res;
+
+    // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be
+    // decrementing the internal refcount next. In case this is the last ref, we
+    // might get destructed on the decStrong(), so keep an sp around until the
+    // end of the call - otherwise have to sprinkle the decStrong on all exit
+    // points.
+    sp<Camera3ZslStream> keepAlive(this);
+    decStrong(this);
+
+    // Allow buffers to be returned in the error state, to allow for disconnect
+    // and in the in-config states for registration
+    if (mState == STATE_CONSTRUCTED) {
+        ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d",
+                __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+    if (mDequeuedBufferCount == 0) {
+        ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
+                mId);
+        return INVALID_OPERATION;
+    }
+    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+        res = mConsumer->cancelBuffer(mConsumer.get(),
+                container_of(buffer.buffer, ANativeWindowBuffer, handle),
+                buffer.release_fence);
+        if (res != OK) {
+            ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
+                    " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+            return res;
+        }
+    } else {
+        res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
+        if (res != OK) {
+            ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+                    __FUNCTION__, mId, strerror(-res), res);
+            return res;
+        }
+
+        sp<Fence> releaseFence = new Fence(buffer.release_fence);
+        int anwReleaseFence = releaseFence->dup();
+
+        res = mConsumer->queueBuffer(mConsumer.get(),
+                container_of(buffer.buffer, ANativeWindowBuffer, handle),
+                anwReleaseFence);
+        if (res != OK) {
+            ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)",
+                    __FUNCTION__, mId, strerror(-res), res);
+            close(anwReleaseFence);
+            return res;
+        }
+
+        mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+    }
+
+    mDequeuedBufferCount--;
+    mBufferReturnedSignal.signal();
+    mLastTimestamp = timestamp;
+
+    return OK;
 }
 
 bool Camera3ZslStream::hasOutstandingBuffersLocked() const {
-    ALOGE("%s: Not implemented", __FUNCTION__);
+    // same as output stream
+    nsecs_t signalTime = mCombinedFence->getSignalTime();
+    ALOGV("%s: Stream %d: Has %d outstanding buffers,"
+            " buffer signal time is %lld",
+            __FUNCTION__, mId, mDequeuedBufferCount, signalTime);
+    if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) {
+        return true;
+    }
     return false;
 }
 
 status_t Camera3ZslStream::waitUntilIdle(nsecs_t timeout) {
-    (void) timeout;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+    // same as output stream
+    status_t res;
+    {
+        Mutex::Autolock l(mLock);
+        while (mDequeuedBufferCount > 0) {
+            if (timeout != TIMEOUT_NEVER) {
+                nsecs_t startTime = systemTime();
+                res = mBufferReturnedSignal.waitRelative(mLock, timeout);
+                if (res == TIMED_OUT) {
+                    return res;
+                } else if (res != OK) {
+                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                    return res;
+                }
+                nsecs_t deltaTime = systemTime() - startTime;
+                if (timeout <= deltaTime) {
+                    timeout = 0;
+                } else {
+                    timeout -= deltaTime;
+                }
+            } else {
+                res = mBufferReturnedSignal.wait(mLock);
+                if (res != OK) {
+                    ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                    return res;
+                }
+            }
+        }
+    }
+
+    // No lock
+
+    unsigned int timeoutMs;
+    if (timeout == TIMEOUT_NEVER) {
+        timeoutMs = Fence::TIMEOUT_NEVER;
+    } else if (timeout == 0) {
+        timeoutMs = 0;
+    } else {
+        // Round up to wait at least 1 ms
+        timeoutMs = (timeout + 999999) / 1000000;
+    }
+
+    return mCombinedFence->wait(timeoutMs);
+}
+
+status_t Camera3ZslStream::configureQueueLocked() {
+    status_t res;
+
+    switch (mState) {
+        case STATE_IN_RECONFIG:
+            res = disconnectLocked();
+            if (res != OK) {
+                return res;
+            }
+            break;
+        case STATE_IN_CONFIG:
+            // OK
+            break;
+        default:
+            ALOGE("%s: Bad state: %d", __FUNCTION__, mState);
+            return INVALID_OPERATION;
+    }
+
+    // Configure consumer-side ANativeWindow interface
+    res = native_window_api_connect(mConsumer.get(),
+            NATIVE_WINDOW_API_CAMERA);
+    if (res != OK) {
+        ALOGE("%s: Unable to connect to native window for stream %d",
+                __FUNCTION__, mId);
+        return res;
+    }
+
+    res = native_window_set_usage(mConsumer.get(), camera3_stream::usage);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure usage %08x for stream %d",
+                __FUNCTION__, camera3_stream::usage, mId);
+        return res;
+    }
+
+    res = native_window_set_scaling_mode(mConsumer.get(),
+            NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure stream scaling: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    if (mMaxSize == 0) {
+        // For buffers of known size
+        res = native_window_set_buffers_geometry(mConsumer.get(),
+                camera3_stream::width, camera3_stream::height,
+                camera3_stream::format);
+    } else {
+        // For buffers with bounded size
+        res = native_window_set_buffers_geometry(mConsumer.get(),
+                mMaxSize, 1,
+                camera3_stream::format);
+    }
+    if (res != OK) {
+        ALOGE("%s: Unable to configure stream buffer geometry"
+                " %d x %d, format %x for stream %d",
+                __FUNCTION__, camera3_stream::width, camera3_stream::height,
+                camera3_stream::format, mId);
+        return res;
+    }
+
+    int maxConsumerBuffers;
+    res = mConsumer->query(mConsumer.get(),
+            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
+    if (res != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mId);
+        return res;
+    }
+
+    ALOGV("%s: Consumer wants %d buffers", __FUNCTION__,
+            maxConsumerBuffers);
+
+    mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
+    mDequeuedBufferCount = 0;
+    mFrameCount = 0;
+    mLastTimestamp = 0;
+
+    res = native_window_set_buffer_count(mConsumer.get(),
+            mTotalBufferCount);
+    if (res != OK) {
+        ALOGE("%s: Unable to set buffer count for stream %d",
+                __FUNCTION__, mId);
+        return res;
+    }
+
+    return OK;
+}
+
+size_t Camera3ZslStream::getBufferCountLocked() {
+    return mTotalBufferCount;
 }
 
 status_t Camera3ZslStream::disconnectLocked() {
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+    status_t res;
+
+    switch (mState) {
+        case STATE_IN_RECONFIG:
+        case STATE_CONFIGURED:
+            // OK
+            break;
+        default:
+            // No connection, nothing to do
+            return OK;
+    }
+
+    if (mDequeuedBufferCount > 0) {
+        ALOGE("%s: Can't disconnect with %d buffers still dequeued!",
+                __FUNCTION__, mDequeuedBufferCount);
+        return INVALID_OPERATION;
+    }
+
+    res = native_window_api_disconnect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA);
+
+    /**
+     * This is not an error. if client calling process dies, the window will
+     * also die and all calls to it will return DEAD_OBJECT, thus it's already
+     * "disconnected"
+     */
+    if (res == DEAD_OBJECT) {
+        ALOGW("%s: While disconnecting stream %d from native window, the"
+                " native window died from under us", __FUNCTION__, mId);
+    }
+    else if (res != OK) {
+        ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)",
+                __FUNCTION__, mId, res, strerror(-res));
+        mState = STATE_ERROR;
+        return res;
+    }
+
+    mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED;
+    return OK;
 }
 
-status_t Camera3ZslStream::getInputBuffer(camera3_stream_buffer *buffer,
-        nsecs_t timestamp) {
-    (void) buffer;
-    (void) timestamp;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) {
+    ATRACE_CALL();
+
+    // TODO: potentially register from inputBufferLocked
+    // this should be ok, registerBuffersLocked only calls getBuffer for now
+    // register in output mode instead of input mode for ZSL streams.
+    if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
+        ALOGE("%s: Stream %d: Buffer registration for input streams"
+              " not implemented (state %d)",
+              __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+
+    // Allow dequeue during IN_[RE]CONFIG for registration
+    if (mState != STATE_CONFIGURED &&
+            mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) {
+        ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d",
+                __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+
+    // Only limit dequeue amount when fully configured
+    if (mState == STATE_CONFIGURED &&
+            mDequeuedBufferCount == camera3_stream::max_buffers) {
+        ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous"
+                " buffers (%d)", __FUNCTION__, mId,
+                camera3_stream::max_buffers);
+        return INVALID_OPERATION;
+    }
+
+    ANativeWindowBuffer* anb;
+    int fenceFd;
+
+    assert(mProducer != 0);
+
+    sp<PinnedBufferItem> bufferItem;
+    {
+        List<sp<RingBufferConsumer::PinnedBufferItem> >::iterator it, end;
+        it = mInputBufferQueue.begin();
+        end = mInputBufferQueue.end();
+
+        // Need to call enqueueInputBufferByTimestamp as a prerequisite
+        if (it == end) {
+            ALOGE("%s: Stream %d: No input buffer was queued",
+                    __FUNCTION__, mId);
+            return INVALID_OPERATION;
+        }
+        bufferItem = *it;
+        mInputBufferQueue.erase(it);
+    }
+
+    anb = bufferItem->getBufferItem().mGraphicBuffer->getNativeBuffer();
+    assert(anb != NULL);
+    fenceFd = bufferItem->getBufferItem().mFence->dup();
+
+    /**
+     * FenceFD now owned by HAL except in case of error,
+     * in which case we reassign it to acquire_fence
+     */
+
+    // Handing out a raw pointer to this object. Increment internal refcount.
+    incStrong(this);
+    buffer->stream = this;
+    buffer->buffer = &(anb->handle);
+    buffer->acquire_fence = fenceFd;
+    buffer->release_fence = -1;
+    buffer->status = CAMERA3_BUFFER_STATUS_OK;
+
+    mDequeuedBufferCount++;
+
+    mBuffersInFlight.push_back(bufferItem);
+
+    return OK;
 }
 
-status_t Camera3ZslStream::returnInputBuffer(const camera3_stream_buffer &buffer) {
-    (void) buffer;
-    ALOGE("%s: Not implemented", __FUNCTION__);
-    return INVALID_OPERATION;
+status_t Camera3ZslStream::returnInputBufferLocked(
+        const camera3_stream_buffer &buffer) {
+    ATRACE_CALL();
+
+    // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be
+    // decrementing the internal refcount next. In case this is the last ref, we
+    // might get destructed on the decStrong(), so keep an sp around until the
+    // end of the call - otherwise have to sprinkle the decStrong on all exit
+    // points.
+    sp<Camera3ZslStream> keepAlive(this);
+    decStrong(this);
+
+    // Allow buffers to be returned in the error state, to allow for disconnect
+    // and in the in-config states for registration
+    if (mState == STATE_CONSTRUCTED) {
+        ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d",
+                __FUNCTION__, mId, mState);
+        return INVALID_OPERATION;
+    }
+    if (mDequeuedBufferCount == 0) {
+        ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
+                mId);
+        return INVALID_OPERATION;
+    }
+
+    bool bufferFound = false;
+    sp<PinnedBufferItem> bufferItem;
+    {
+        // Find the buffer we are returning
+        Vector<sp<PinnedBufferItem> >::iterator it, end;
+        for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end();
+             it != end;
+             ++it) {
+
+            const sp<PinnedBufferItem>& tmp = *it;
+            ANativeWindowBuffer *anb =
+                    tmp->getBufferItem().mGraphicBuffer->getNativeBuffer();
+            if (anb != NULL && &(anb->handle) == buffer.buffer) {
+                bufferFound = true;
+                bufferItem = tmp;
+                mBuffersInFlight.erase(it);
+                mDequeuedBufferCount--;
+            }
+        }
+    }
+    if (!bufferFound) {
+        ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL",
+              __FUNCTION__, mId);
+        return INVALID_OPERATION;
+    }
+
+    int releaseFenceFd = buffer.release_fence;
+
+    if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+        if (buffer.release_fence != -1) {
+            ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when "
+                  "there is an error", __FUNCTION__, mId, buffer.release_fence);
+            close(buffer.release_fence);
+        }
+
+        /**
+         * Reassign release fence as the acquire fence incase of error
+         */
+        releaseFenceFd = buffer.acquire_fence;
+    }
+
+    /**
+     * Unconditionally return buffer to the buffer queue.
+     * - Fwk takes over the release_fence ownership
+     */
+    sp<Fence> releaseFence = new Fence(releaseFenceFd);
+    bufferItem->getBufferItem().mFence = releaseFence;
+    bufferItem.clear(); // dropping last reference unpins buffer
+
+    mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+
+    mBufferReturnedSignal.signal();
+
+    return OK;
+
 }
 
 void Camera3ZslStream::dump(int fd, const Vector<String16> &args) const {
-    (void) fd;
     (void) args;
-    ALOGE("%s: Not implemented", __FUNCTION__);
+
+    String8 lines;
+    lines.appendFormat("    Stream[%d]: ZSL\n", mId);
+    lines.appendFormat("      State: %d\n", mState);
+    lines.appendFormat("      Dims: %d x %d, format 0x%x\n",
+            camera3_stream::width, camera3_stream::height,
+            camera3_stream::format);
+    lines.appendFormat("      Usage: %d, max HAL buffers: %d\n",
+            camera3_stream::usage, camera3_stream::max_buffers);
+    lines.appendFormat("      Frames produced: %d, last timestamp: %lld ns\n",
+            mFrameCount, mLastTimestamp);
+    lines.appendFormat("      Total buffers: %d, currently dequeued: %d\n",
+            mTotalBufferCount, mDequeuedBufferCount);
+    lines.appendFormat("      Input buffers pending: %d, in flight %d\n",
+            mInputBufferQueue.size(), mBuffersInFlight.size());
+    write(fd, lines.string(), lines.size());
+}
+
+status_t Camera3ZslStream::enqueueInputBufferByTimestamp(
+        nsecs_t timestamp,
+        nsecs_t* actualTimestamp) {
+
+    Mutex::Autolock l(mLock);
+
+    TimestampFinder timestampFinder = TimestampFinder(timestamp);
+
+    sp<RingBufferConsumer::PinnedBufferItem> pinnedBuffer =
+            mProducer->pinSelectedBuffer(timestampFinder,
+                                        /*waitForFence*/false);
+
+    if (pinnedBuffer == 0) {
+        ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__);
+        return NO_BUFFER_AVAILABLE;
+    }
+
+    nsecs_t actual = pinnedBuffer->getBufferItem().mTimestamp;
+
+    if (actual != timestamp) {
+        ALOGW("%s: ZSL buffer candidate search didn't find an exact match --"
+              " requested timestamp = %lld, actual timestamp = %lld",
+              __FUNCTION__, timestamp, actual);
+    }
+
+    mInputBufferQueue.push_back(pinnedBuffer);
+
+    if (actualTimestamp != NULL) {
+        *actualTimestamp = actual;
+    }
+
+    return OK;
+}
+
+status_t Camera3ZslStream::clearInputRingBuffer() {
+    Mutex::Autolock l(mLock);
+
+    mInputBufferQueue.clear();
+
+    return mProducer->clear();
+}
+
+status_t Camera3ZslStream::setTransform(int /*transform*/) {
+    ALOGV("%s: Not implemented", __FUNCTION__);
+    return INVALID_OPERATION;
 }
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/camera3/Camera3ZslStream.h
index 39d5995..b863e7f 100644
--- a/services/camera/libcameraservice/camera3/Camera3ZslStream.h
+++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.h
@@ -19,8 +19,10 @@
 
 #include <utils/RefBase.h>
 #include <gui/Surface.h>
+#include <gui/RingBufferConsumer.h>
 
 #include "Camera3Stream.h"
+#include "Camera3OutputStreamInterface.h"
 
 namespace android {
 
@@ -32,32 +34,62 @@
  * most output buffers, and when directed, pushes a buffer back to the HAL for
  * processing.
  */
-class Camera3ZslStream: public Camera3Stream {
+class Camera3ZslStream :
+        public Camera3Stream,
+        public Camera3OutputStreamInterface {
   public:
     /**
      * Set up a ZSL stream of a given resolution. Depth is the number of buffers
      * cached within the stream that can be retrieved for input.
      */
     Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth);
+    ~Camera3ZslStream();
 
     virtual status_t waitUntilIdle(nsecs_t timeout);
     virtual void     dump(int fd, const Vector<String16> &args) const;
 
-    /**
-     * Get an input buffer matching a specific timestamp. If no buffer matching
-     * the timestamp is available, NO_MEMORY is returned.
-     */
-    status_t getInputBuffer(camera3_stream_buffer *buffer, nsecs_t timestamp);
+    enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE };
 
     /**
-     * Return input buffer from HAL. The buffer is then marked as unfilled, and
-     * returned to the output-side stream for refilling.
+     * Locate a buffer matching this timestamp in the RingBufferConsumer,
+     * and mark it to be queued at the next getInputBufferLocked invocation.
+     *
+     * Errors: Returns NO_BUFFER_AVAILABLE if we could not find a match.
+     *
      */
-    status_t returnInputBuffer(const camera3_stream_buffer &buffer);
+    status_t enqueueInputBufferByTimestamp(nsecs_t timestamp,
+                                           nsecs_t* actualTimestamp);
+
+    /**
+     * Clears the buffers that can be used by enqueueInputBufferByTimestamp
+     */
+    status_t clearInputRingBuffer();
+
+    /**
+     * Camera3OutputStreamInterface implementation
+     */
+    status_t setTransform(int transform);
 
   private:
 
     int mDepth;
+    // Input buffers pending to be queued into HAL
+    List<sp<RingBufferConsumer::PinnedBufferItem> > mInputBufferQueue;
+    sp<RingBufferConsumer>                          mProducer;
+    sp<ANativeWindow>                               mConsumer;
+
+    // Input buffers in flight to HAL
+    Vector<sp<RingBufferConsumer::PinnedBufferItem> > mBuffersInFlight;
+    size_t                                          mTotalBufferCount;
+    // sum of input and output buffers that are currently acquired by HAL
+    size_t                                          mDequeuedBufferCount;
+    Condition                                       mBufferReturnedSignal;
+    uint32_t                                        mFrameCount;
+    // Last received output buffer's timestamp
+    nsecs_t                                         mLastTimestamp;
+
+    // The merged release fence for all returned buffers
+    sp<Fence>                                       mCombinedFence;
 
     /**
      * Camera3Stream interface
@@ -67,9 +99,18 @@
     virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
     virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
             nsecs_t timestamp);
+    // getInputBuffer/returnInputBuffer operate the input stream side of the
+    // ZslStream.
+    virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+    virtual status_t returnInputBufferLocked(
+            const camera3_stream_buffer &buffer);
+
     virtual bool     hasOutstandingBuffersLocked() const;
     virtual status_t disconnectLocked();
 
+    virtual status_t configureQueueLocked();
+    virtual size_t   getBufferCountLocked();
+
 }; // class Camera3ZslStream
 
 }; // namespace camera3
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
new file mode 100644
index 0000000..1b2a717
--- /dev/null
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef NDEBUG
+#include <cassert>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RingBufferConsumer"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+#include <utils/Log.h>
+
+#include <gui/RingBufferConsumer.h>
+
+#define BI_LOGV(x, ...) ALOGV("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGD(x, ...) ALOGD("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGI(x, ...) ALOGI("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGW(x, ...) ALOGW("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGE(x, ...) ALOGE("[%s] "x, mName.string(), ##__VA_ARGS__)
+
+typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
+
+namespace android {
+
+RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage,
+        int bufferCount) :
+    ConsumerBase(new BufferQueue(true)),
+    mBufferCount(bufferCount)
+{
+    mBufferQueue->setConsumerUsageBits(consumerUsage);
+    mBufferQueue->setSynchronousMode(true);
+    mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+
+    assert(bufferCount > 0);
+}
+
+RingBufferConsumer::~RingBufferConsumer() {
+}
+
+void RingBufferConsumer::setName(const String8& name) {
+    Mutex::Autolock _l(mMutex);
+    mName = name;
+    mBufferQueue->setConsumerName(name);
+}
+
+sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
+        const RingBufferComparator& filter,
+        bool waitForFence) {
+
+    sp<PinnedBufferItem> pinnedBuffer;
+
+    {
+        List<RingBufferItem>::iterator it, end, accIt;
+        BufferInfo acc, cur;
+        BufferInfo* accPtr = NULL;
+
+        Mutex::Autolock _l(mMutex);
+
+        for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+             it != end;
+             ++it) {
+
+            const RingBufferItem& item = *it;
+
+            cur.mCrop = item.mCrop;
+            cur.mTransform = item.mTransform;
+            cur.mScalingMode = item.mScalingMode;
+            cur.mTimestamp = item.mTimestamp;
+            cur.mFrameNumber = item.mFrameNumber;
+            cur.mPinned = item.mPinCount > 0;
+
+            int ret = filter.compare(accPtr, &cur);
+
+            if (ret == 0) {
+                accPtr = NULL;
+            } else if (ret > 0) {
+                acc = cur;
+                accPtr = &acc;
+                accIt = it;
+            } // else acc = acc
+        }
+
+        if (!accPtr) {
+            return NULL;
+        }
+
+        pinnedBuffer = new PinnedBufferItem(this, *accIt);
+        pinBufferLocked(pinnedBuffer->getBufferItem());
+
+    } // end scope of mMutex autolock
+
+    if (pinnedBuffer != 0) {
+        BI_LOGV("Pinned buffer frame %lld, timestamp %lld",
+                pinnedBuffer->getBufferItem().mFrameNumber,
+                pinnedBuffer->getBufferItem().mTimestamp);
+    }
+
+    if (waitForFence) {
+        status_t err = pinnedBuffer->getBufferItem().mFence->waitForever(1000,
+                "RingBufferConsumer::pinSelectedBuffer");
+        if (err != OK) {
+            BI_LOGE("Failed to wait for fence of acquired buffer: %s (%d)",
+                    strerror(-err), err);
+        }
+    }
+
+    return pinnedBuffer;
+}
+
+status_t RingBufferConsumer::clear() {
+
+    status_t err;
+    Mutex::Autolock _l(mMutex);
+
+    BI_LOGV("%s", __FUNCTION__);
+
+    // Avoid annoying log warnings by returning early
+    if (mBufferItemList.size() == 0) {
+        return OK;
+    }
+
+    do {
+        size_t pinnedFrames = 0;
+        err = releaseOldestBufferLocked(&pinnedFrames);
+
+        if (err == NO_BUFFER_AVAILABLE) {
+            assert(pinnedFrames == mBufferItemList.size());
+            break;
+        }
+
+        if (err == NOT_ENOUGH_DATA) {
+            // Fine. Empty buffer item list.
+            break;
+        }
+
+        if (err != OK) {
+            BI_LOGE("Clear failed, could not release buffer");
+            return err;
+        }
+
+    } while(true);
+
+    return OK;
+}
+
+void RingBufferConsumer::pinBufferLocked(const BufferItem& item) {
+    List<RingBufferItem>::iterator it, end;
+
+    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+         it != end;
+         ++it) {
+
+        RingBufferItem& find = *it;
+        if (item.mGraphicBuffer == find.mGraphicBuffer) {
+            find.mPinCount++;
+            break;
+        }
+    }
+
+    if (it == end) {
+        BI_LOGE("Failed to pin buffer (timestamp %lld, framenumber %lld)",
+                 item.mTimestamp, item.mFrameNumber);
+    }
+}
+
+status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
+    status_t err = OK;
+
+    List<RingBufferItem>::iterator it, end, accIt;
+
+    it = mBufferItemList.begin();
+    end = mBufferItemList.end();
+    accIt = it;
+
+    if (it == end) {
+        /**
+         * This is fine. We really care about being able to acquire a buffer
+         * successfully after this function completes, not about it releasing
+         * some buffer.
+         */
+        BI_LOGV("%s: No buffers yet acquired, can't release anything",
+              __FUNCTION__);
+        return NOT_ENOUGH_DATA;
+    }
+
+    for (; it != end; ++it) {
+        RingBufferItem& find = *it;
+        if (find.mTimestamp < accIt->mTimestamp && find.mPinCount <= 0) {
+            accIt = it;
+        }
+
+        if (find.mPinCount > 0 && pinnedFrames != NULL) {
+            ++(*pinnedFrames);
+        }
+    }
+
+    if (accIt != end) {
+        RingBufferItem& item = *accIt;
+
+        // In case the object was never pinned, pass the acquire fence
+        // back to the release fence. If the fence was already waited on,
+        // it'll just be a no-op to wait on it again.
+        err = addReleaseFenceLocked(item.mBuf, item.mFence);
+
+        if (err != OK) {
+            BI_LOGE("Failed to add release fence to buffer "
+                    "(timestamp %lld, framenumber %lld",
+                    item.mTimestamp, item.mFrameNumber);
+            return err;
+        }
+
+        BI_LOGV("Attempting to release buffer timestamp %lld, frame %lld",
+                item.mTimestamp, item.mFrameNumber);
+
+        err = releaseBufferLocked(item.mBuf,
+                                  EGL_NO_DISPLAY,
+                                  EGL_NO_SYNC_KHR);
+        if (err != OK) {
+            BI_LOGE("Failed to release buffer: %s (%d)",
+                    strerror(-err), err);
+            return err;
+        }
+
+        BI_LOGV("Buffer timestamp %lld, frame %lld evicted",
+                item.mTimestamp, item.mFrameNumber);
+
+        size_t currentSize = mBufferItemList.size();
+        mBufferItemList.erase(accIt);
+        assert(mBufferItemList.size() == currentSize - 1);
+    } else {
+        BI_LOGW("All buffers pinned, could not find any to release");
+        return NO_BUFFER_AVAILABLE;
+
+    }
+
+    return OK;
+}
+
+void RingBufferConsumer::onFrameAvailable() {
+    status_t err;
+
+    {
+        Mutex::Autolock _l(mMutex);
+
+        /**
+         * Release oldest frame
+         */
+        if (mBufferItemList.size() >= (size_t)mBufferCount) {
+            err = releaseOldestBufferLocked(/*pinnedFrames*/NULL);
+            assert(err != NOT_ENOUGH_DATA);
+
+            // TODO: implement the case for NO_BUFFER_AVAILABLE
+            assert(err != NO_BUFFER_AVAILABLE);
+            if (err != OK) {
+                return;
+            }
+            // TODO: in unpinBuffer rerun this routine if we had buffers
+            // we could've locked but didn't because there was no space
+        }
+
+        RingBufferItem& item = *mBufferItemList.insert(mBufferItemList.end(),
+                                                       RingBufferItem());
+
+        /**
+         * Acquire new frame
+         */
+        err = acquireBufferLocked(&item);
+        if (err != OK) {
+            if (err != NO_BUFFER_AVAILABLE) {
+                BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err);
+            }
+
+            mBufferItemList.erase(--mBufferItemList.end());
+            return;
+        }
+
+        BI_LOGV("New buffer acquired (timestamp %lld), "
+                "buffer items %u out of %d",
+                item.mTimestamp,
+                mBufferItemList.size(), mBufferCount);
+
+        item.mGraphicBuffer = mSlots[item.mBuf].mGraphicBuffer;
+    } // end of mMutex lock
+
+    ConsumerBase::onFrameAvailable();
+}
+
+void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
+    Mutex::Autolock _l(mMutex);
+
+    List<RingBufferItem>::iterator it, end, accIt;
+
+    for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+         it != end;
+         ++it) {
+
+        RingBufferItem& find = *it;
+        if (item.mGraphicBuffer == find.mGraphicBuffer) {
+            status_t res = addReleaseFenceLocked(item.mBuf, item.mFence);
+
+            if (res != OK) {
+                BI_LOGE("Failed to add release fence to buffer "
+                        "(timestamp %lld, framenumber %lld",
+                        item.mTimestamp, item.mFrameNumber);
+                return;
+            }
+
+            find.mPinCount--;
+            break;
+        }
+    }
+
+    if (it == end) {
+        BI_LOGE("Failed to unpin buffer (timestamp %lld, framenumber %lld",
+                 item.mTimestamp, item.mFrameNumber);
+    }
+}
+
+status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
+    Mutex::Autolock _l(mMutex);
+    return mBufferQueue->setDefaultBufferSize(w, h);
+}
+
+status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
+    Mutex::Autolock _l(mMutex);
+    return mBufferQueue->setDefaultBufferFormat(defaultFormat);
+}
+
+status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) {
+    Mutex::Autolock _l(mMutex);
+    return mBufferQueue->setConsumerUsageBits(usage);
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
new file mode 100644
index 0000000..454fbae
--- /dev/null
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H
+#define ANDROID_GUI_RINGBUFFERCONSUMER_H
+
+#include <gui/ConsumerBase.h>
+
+#include <ui/GraphicBuffer.h>
+
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include <utils/threads.h>
+#include <utils/List.h>
+
+#define ANDROID_GRAPHICS_RINGBUFFERCONSUMER_JNI_ID "mRingBufferConsumer"
+
+namespace android {
+
+/**
+ * The RingBufferConsumer maintains a ring buffer of BufferItem objects,
+ * (which are 'acquired' as long as they are part of the ring buffer, and
+ *  'released' when they leave the ring buffer).
+ *
+ * When new buffers are produced, the oldest non-pinned buffer item is immediately
+ * dropped from the ring buffer, and overridden with the newest buffer.
+ *
+ * Users can only access a buffer item after pinning it (which also guarantees
+ * that during its duration it will not be released back into the BufferQueue).
+ *
+ * Note that the 'oldest' buffer is the one with the smallest timestamp.
+ *
+ * Edge cases:
+ *  - If ringbuffer is not full, no drops occur when a buffer is produced.
+ *  - If all the buffers get filled or pinned then there will be no empty
+ *    buffers left, so the producer will block on dequeue.
+ */
+class RingBufferConsumer : public ConsumerBase,
+                           public ConsumerBase::FrameAvailableListener
+{
+  public:
+    typedef ConsumerBase::FrameAvailableListener FrameAvailableListener;
+
+    typedef BufferQueue::BufferItem BufferItem;
+
+    enum { INVALID_BUFFER_SLOT = BufferQueue::INVALID_BUFFER_SLOT };
+    enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE };
+
+    // Create a new ring buffer consumer. The consumerUsage parameter determines
+    // the consumer usage flags passed to the graphics allocator. The
+    // bufferCount parameter specifies how many buffers can be pinned for user
+    // access at the same time.
+    RingBufferConsumer(uint32_t consumerUsage,
+            int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS);
+
+    virtual ~RingBufferConsumer();
+
+    // set the name of the RingBufferConsumer that will be used to identify it in
+    // log messages.
+    void setName(const String8& name);
+
+    sp<IGraphicBufferProducer> getProducerInterface() const { return getBufferQueue(); }
+
+    // setDefaultBufferSize is used to set the size of buffers returned by
+    // requestBuffers when a with and height of zero is requested.
+    status_t setDefaultBufferSize(uint32_t w, uint32_t h);
+
+    // setDefaultBufferFormat allows the BufferQueue to create
+    // GraphicBuffers of a defaultFormat if no format is specified
+    // by the producer endpoint.
+    status_t setDefaultBufferFormat(uint32_t defaultFormat);
+
+    // setConsumerUsage allows the BufferQueue consumer usage to be
+    // set at a later time after construction.
+    status_t setConsumerUsage(uint32_t usage);
+
+    // Buffer info, minus the graphics buffer/slot itself.
+    struct BufferInfo {
+        // mCrop is the current crop rectangle for this buffer slot.
+        Rect mCrop;
+
+        // mTransform is the current transform flags for this buffer slot.
+        uint32_t mTransform;
+
+        // mScalingMode is the current scaling mode for this buffer slot.
+        uint32_t mScalingMode;
+
+        // mTimestamp is the current timestamp for this buffer slot. This gets
+        // to set by queueBuffer each time this slot is queued.
+        int64_t mTimestamp;
+
+        // mFrameNumber is the number of the queued frame for this slot.
+        uint64_t mFrameNumber;
+
+        // mPinned is whether or not the buffer has been pinned already.
+        bool mPinned;
+    };
+
+    struct RingBufferComparator {
+        // Return < 0 to select i1, > 0 to select i2, 0 for neither
+        // i1 or i2 can be NULL.
+        //
+        // The comparator has to implement a total ordering. Otherwise
+        // a linear scan won't find the most preferred buffer.
+        virtual int compare(const BufferInfo* i1,
+                            const BufferInfo* i2) const = 0;
+
+        virtual ~RingBufferComparator() {}
+    };
+
+    struct PinnedBufferItem : public LightRefBase<PinnedBufferItem> {
+        PinnedBufferItem(wp<RingBufferConsumer> consumer,
+                         const BufferItem& item) :
+                mConsumer(consumer),
+                mBufferItem(item) {
+        }
+
+        ~PinnedBufferItem() {
+            sp<RingBufferConsumer> consumer = mConsumer.promote();
+            if (consumer != NULL) {
+                consumer->unpinBuffer(mBufferItem);
+            }
+        }
+
+        bool isEmpty() {
+            return mBufferItem.mBuf == BufferQueue::INVALID_BUFFER_SLOT;
+        }
+
+        BufferItem& getBufferItem() { return mBufferItem; }
+        const BufferItem& getBufferItem() const { return mBufferItem; }
+
+      private:
+        wp<RingBufferConsumer> mConsumer;
+        BufferItem             mBufferItem;
+    };
+
+    // Find a buffer using the filter, then pin it before returning it.
+    //
+    // The filter will be invoked on each buffer item in the ring buffer,
+    // passing the item that was selected from each previous iteration,
+    // as well as the current iteration's item.
+    //
+    // Pinning will ensure that the buffer will not be dropped when a new
+    // frame is available.
+    sp<PinnedBufferItem> pinSelectedBuffer(const RingBufferComparator& filter,
+                                           bool waitForFence = true);
+
+    // Release all the non-pinned buffers in the ring buffer
+    status_t clear();
+
+  private:
+
+    // Override ConsumerBase::onFrameAvailable
+    virtual void onFrameAvailable();
+
+    void pinBufferLocked(const BufferItem& item);
+    void unpinBuffer(const BufferItem& item);
+
+    // Releases oldest buffer. Returns NO_BUFFER_AVAILABLE
+    // if all the buffers were pinned.
+    // Returns NOT_ENOUGH_DATA if list was empty.
+    status_t releaseOldestBufferLocked(size_t* pinnedFrames);
+
+    struct RingBufferItem : public BufferItem {
+        RingBufferItem() : BufferItem(), mPinCount(0) {}
+        int mPinCount;
+    };
+
+    // List of acquired buffers in our ring buffer
+    List<RingBufferItem>       mBufferItemList;
+    const int                  mBufferCount;
+};
+
+} // namespace android
+
+#endif // ANDROID_GUI_CPUCONSUMER_H