QCamera3: Support YUV outputs

Configure all YUV output streams for HDR+ client.

Add function to register and unregister YUV buffers in order
to use the buffer def to submit HDR+ requests.

Handle multiple outputs in a HDR+ result.

With this CL, HAL will produce 1 JPEG or 1 YUV output to
framework.

Test: TestingCamera2 and CTS in HDR+ mode
Bug: 36693254
Change-Id: If68f41cfda96ebc21e3015e0817ae0432690dac8
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
index 067fc47..8120397 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
@@ -152,6 +152,7 @@
 std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
 bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
 bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
+bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
 
 // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
 bool gEaselBypassOnly;
@@ -2236,6 +2237,7 @@
             stream_info->stream = newStream;
             stream_info->status = VALID;
             stream_info->channel = NULL;
+            stream_info->id = i;
             mStreamInfo.push_back(stream_info);
         }
         /* Covers Opaque ZSL and API1 F/W ZSL */
@@ -4781,8 +4783,8 @@
 }
 
 status_t QCamera3HardwareInterface::fillPbStreamConfig(
-        pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
-        QCamera3Channel *channel, uint32_t streamIndex) {
+        pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
+        uint32_t streamIndex) {
     if (config == nullptr) {
         LOGE("%s: config is null", __FUNCTION__);
         return BAD_VALUE;
@@ -4809,14 +4811,30 @@
     config->image.width = streamInfo->dim.width;
     config->image.height = streamInfo->dim.height;
     config->image.padding = 0;
-    config->image.format = pbStreamFormat;
+
+    int bytesPerPixel = 0;
+
+    switch (streamInfo->fmt) {
+        case CAM_FORMAT_YUV_420_NV21:
+            config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+            bytesPerPixel = 1;
+            break;
+        case CAM_FORMAT_YUV_420_NV12:
+        case CAM_FORMAT_YUV_420_NV12_VENUS:
+            config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+            bytesPerPixel = 1;
+            break;
+        default:
+            ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
+            return BAD_VALUE;
+    }
 
     uint32_t totalPlaneSize = 0;
 
     // Fill plane information.
     for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
         pbcamera::PlaneConfiguration plane;
-        plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
+        plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
         plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
         config->image.planes.push_back(plane);
 
@@ -10974,7 +10992,8 @@
         // If Easel is present, power on Easel and suspend it immediately.
         status_t res = gEaselManagerClient->open();
         if (res != OK) {
-            ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+            ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
+                    res);
             return res;
         }
 
@@ -10987,6 +11006,8 @@
 
         gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
         gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
+        gEnableMultipleHdrplusOutputs =
+                property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
 
         // Expose enableZsl key only when HDR+ mode is enabled.
         gExposeEnableZslKey = !gEaselBypassOnly;
@@ -14852,22 +14873,67 @@
         return false;
     }
 
+
     // TODO (b/36693254, b/36690506): support other outputs.
-    if (request.num_output_buffers != 1 ||
-            request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
-        ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
-        for (uint32_t i = 0; i < request.num_output_buffers; i++) {
-            ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
-                    request.output_buffers[0].stream->width,
-                    request.output_buffers[0].stream->height,
-                    request.output_buffers[0].stream->format);
-        }
+    if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
+        ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
         return false;
     }
 
+    switch (request.output_buffers[0].stream->format) {
+        case HAL_PIXEL_FORMAT_BLOB:
+            break;
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            // TODO (b/36693254): Only support full size.
+            if (!gEnableMultipleHdrplusOutputs) {
+                if (static_cast<int>(request.output_buffers[0].stream->width) !=
+                        gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
+                    static_cast<int>(request.output_buffers[0].stream->height) !=
+                        gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
+                    ALOGV("%s: Only full size is supported.", __FUNCTION__);
+                    return false;
+                }
+            }
+            break;
+        default:
+            ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
+            for (uint32_t i = 0; i < request.num_output_buffers; i++) {
+                ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
+                        request.output_buffers[0].stream->width,
+                        request.output_buffers[0].stream->height,
+                        request.output_buffers[0].stream->format);
+            }
+            return false;
+    }
+
     return true;
 }
 
+void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
+    if (hdrPlusRequest == nullptr) return;
+
+    for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
+        // Find the stream for this buffer.
+        for (auto streamInfo : mStreamInfo) {
+            if (streamInfo->id == outputBufferIter.first) {
+                if (streamInfo->channel == mPictureChannel) {
+                    // For picture channel, this buffer is internally allocated so return this
+                    // buffer to picture channel.
+                    mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
+                } else {
+                    // Unregister this buffer for other channels.
+                    streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
+                }
+                break;
+            }
+        }
+    }
+
+    hdrPlusRequest->outputBuffers.clear();
+    hdrPlusRequest->frameworkOutputBuffers.clear();
+}
+
 bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
         HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
         const CameraMetadata &metadata)
@@ -14875,37 +14941,85 @@
     if (hdrPlusRequest == nullptr) return false;
     if (!isRequestHdrPlusCompatible(request, metadata)) return false;
 
-    // Get a YUV buffer from pic channel.
-    QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
-    auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
-    status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
-    if (res != OK) {
-        ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return false;
-    }
-
-    pbcamera::StreamBuffer buffer;
-    buffer.streamId = kPbYuvOutputStreamId;
-    buffer.dmaBufFd = yuvBuffer->fd;
-    buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
-    buffer.dataSize = yuvBuffer->frame_len;
-
+    status_t res = OK;
     pbcamera::CaptureRequest pbRequest;
     pbRequest.id = request.frame_number;
-    pbRequest.outputBuffers.push_back(buffer);
+    // Iterate through all requested output buffers and add them to an HDR+ request.
+    for (uint32_t i = 0; i < request.num_output_buffers; i++) {
+        // Find the index of the stream in mStreamInfo.
+        uint32_t pbStreamId = 0;
+        bool found = false;
+        for (auto streamInfo : mStreamInfo) {
+            if (streamInfo->stream == request.output_buffers[i].stream) {
+                pbStreamId = streamInfo->id;
+                found = true;
+                break;
+            }
+        }
+
+        if (!found) {
+            ALOGE("%s: requested stream was not configured.", __FUNCTION__);
+            abortPendingHdrplusRequest(hdrPlusRequest);
+            return false;
+        }
+        auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
+        switch (request.output_buffers[i].stream->format) {
+            case HAL_PIXEL_FORMAT_BLOB:
+            {
+                // For jpeg output, get a YUV buffer from pic channel.
+                QCamera3PicChannel *picChannel =
+                        (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
+                res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
+                if (res != OK) {
+                    ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                    abortPendingHdrplusRequest(hdrPlusRequest);
+                    return false;
+                }
+                break;
+            }
+            case HAL_PIXEL_FORMAT_YCbCr_420_888:
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            {
+                // For YUV output, register the buffer and get the buffer def from the channel.
+                QCamera3ProcessingChannel *channel =
+                        (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
+                res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
+                        outBuffer.get());
+                if (res != OK) {
+                    ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
+                            strerror(-res), res);
+                    abortPendingHdrplusRequest(hdrPlusRequest);
+                    return false;
+                }
+                break;
+            }
+            default:
+                abortPendingHdrplusRequest(hdrPlusRequest);
+                return false;
+        }
+
+        pbcamera::StreamBuffer buffer;
+        buffer.streamId = pbStreamId;
+        buffer.dmaBufFd = outBuffer->fd;
+        buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
+        buffer.dataSize = outBuffer->frame_len;
+
+        pbRequest.outputBuffers.push_back(buffer);
+
+        hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
+        hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
+    }
 
     // Submit an HDR+ capture request to HDR+ service.
     res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
     if (res != OK) {
         ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
                 strerror(-res), res);
+        abortPendingHdrplusRequest(hdrPlusRequest);
         return false;
     }
 
-    hdrPlusRequest->yuvBuffer = yuvBuffer;
-    hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
-
     return true;
 }
 
@@ -15000,13 +15114,20 @@
 
 bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
 {
-    // Check if mPictureChannel is valid.
-    // TODO: Support YUV (b/36693254) and RAW (b/36690506)
-    if (mPictureChannel == nullptr) {
-        return false;
+    // Check that at least one YUV or one JPEG output is configured.
+    // TODO: Support RAW (b/36690506)
+    for (auto streamInfo : mStreamInfo) {
+        if (streamInfo != nullptr && streamInfo->stream != nullptr) {
+            if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
+                    (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
+                     streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
+                     streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+                return true;
+            }
+        }
     }
 
-    return true;
+    return false;
 }
 
 status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
@@ -15015,59 +15136,50 @@
     std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
     status_t res = OK;
 
-    // Configure HDR+ client streams.
-    // Get input config.
-    if (mHdrPlusRawSrcChannel) {
-        // HDR+ input buffers will be provided by HAL.
-        res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
-                HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
-        if (res != OK) {
-            LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-            return res;
-        }
-
-        inputConfig.isSensorInput = false;
-    } else {
-        // Sensor MIPI will send data to Easel.
-        inputConfig.isSensorInput = true;
-        inputConfig.sensorMode.cameraId = mCameraId;
-        inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
-        inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
-        inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
-        inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
-        inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
-        inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
-        if (mSensorModeInfo.num_raw_bits != 10) {
-            ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
-                    mSensorModeInfo.num_raw_bits);
-            return BAD_VALUE;
-        }
-
-        inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
+    // Sensor MIPI will send data to Easel.
+    inputConfig.isSensorInput = true;
+    inputConfig.sensorMode.cameraId = mCameraId;
+    inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
+    inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
+    inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
+    inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
+    inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
+    inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
+    if (mSensorModeInfo.num_raw_bits != 10) {
+        ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
+                mSensorModeInfo.num_raw_bits);
+        return BAD_VALUE;
     }
 
-    // Get output configurations.
-    // Easel may need to output RAW16 buffers if mRawChannel was created.
-    // TODO: handle RAW16 outputs.
+    inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
 
-    // Easel may need to output YUV output buffers if mPictureChannel was created.
-    pbcamera::StreamConfiguration yuvOutputConfig;
-    if (mPictureChannel != nullptr) {
-        res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
-                HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
-        if (res != OK) {
-            LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
+    // Iterate through configured output streams in HAL and configure those streams in HDR+
+    // service.
+    for (auto streamInfo : mStreamInfo) {
+        pbcamera::StreamConfiguration outputConfig;
+        if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
+            switch (streamInfo->stream->format) {
+                case HAL_PIXEL_FORMAT_BLOB:
+                case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                    res = fillPbStreamConfig(&outputConfig, streamInfo->id,
+                            streamInfo->channel, /*stream index*/0);
+                    if (res != OK) {
+                        LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
 
-            return res;
+                        return res;
+                    }
+
+                    outputStreamConfigs.push_back(outputConfig);
+                    break;
+                default:
+                    // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
+                    break;
+            }
         }
-
-        outputStreamConfigs.push_back(yuvOutputConfig);
     }
 
-    // TODO: consider other channels for YUV output buffers.
-
     res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
     if (res != OK) {
         LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
@@ -15264,101 +15376,128 @@
 void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
         const camera_metadata_t &resultMetadata)
 {
-    if (result != nullptr) {
-        if (result->outputBuffers.size() != 1) {
-            ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
-                result->outputBuffers.size());
-            return;
+    if (result == nullptr) {
+        ALOGE("%s: result is nullptr.", __FUNCTION__);
+        return;
+    }
+
+
+    // TODO (b/34854987): initiate this from HDR+ service.
+    onNextCaptureReady(result->requestId);
+
+    // Find the pending HDR+ request.
+    HdrPlusPendingRequest pendingRequest;
+    {
+        Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+        auto req = mHdrPlusPendingRequests.find(result->requestId);
+        pendingRequest = req->second;
+    }
+
+    // Update the result metadata with the settings of the HDR+ still capture request because
+    // the result metadata belongs to a ZSL buffer.
+    CameraMetadata metadata;
+    metadata = &resultMetadata;
+    updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
+    camera_metadata_t* updatedResultMetadata = metadata.release();
+
+    uint32_t halSnapshotStreamId = 0;
+    if (mPictureChannel != nullptr) {
+        halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
+    }
+
+    auto halMetadata = std::make_shared<metadata_buffer_t>();
+    clear_metadata_buffer(halMetadata.get());
+
+    // Convert updated result metadata to HAL metadata.
+    status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
+            halSnapshotStreamId, /*minFrameDuration*/0);
+    if (res != 0) {
+        ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+    }
+
+    for (auto &outputBuffer : result->outputBuffers) {
+        uint32_t streamId = outputBuffer.streamId;
+
+        // Find the framework output buffer in the pending request.
+        auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
+        if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
+            ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
+                    streamId);
+            continue;
         }
 
-        if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
-            ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
-                result->outputBuffers[0].streamId);
-            return;
+        camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
+
+        // Find the channel for the output buffer.
+        QCamera3ProcessingChannel *channel =
+                (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
+
+        // Find the output buffer def.
+        auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
+        if (outputBufferIter == pendingRequest.outputBuffers.end()) {
+            ALOGE("%s: Cannot find output buffer", __FUNCTION__);
+            continue;
         }
 
-        // TODO (b/34854987): initiate this from HDR+ service.
-        onNextCaptureReady(result->requestId);
+        std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
 
-        // Find the pending HDR+ request.
-        HdrPlusPendingRequest pendingRequest;
-        {
-            Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
-            auto req = mHdrPlusPendingRequests.find(result->requestId);
-            pendingRequest = req->second;
-        }
+        // Check whether to dump the buffer.
+        if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
+                frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
+            char prop[PROPERTY_VALUE_MAX];
+            property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
+            bool dumpYuvOutput = atoi(prop);
 
-        // Update the result metadata with the settings of the HDR+ still capture request because
-        // the result metadata belongs to a ZSL buffer.
-        CameraMetadata metadata;
-        metadata = &resultMetadata;
-        updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
-        camera_metadata_t* updatedResultMetadata = metadata.release();
+            if (dumpYuvOutput) {
+                // Dump yuv buffer to a ppm file.
+                pbcamera::StreamConfiguration outputConfig;
+                status_t rc = fillPbStreamConfig(&outputConfig, streamId,
+                        channel, /*stream index*/0);
+                if (rc == OK) {
+                    char buf[FILENAME_MAX] = {};
+                    snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
+                            result->requestId, streamId,
+                            outputConfig.image.width, outputConfig.image.height);
 
-        QCamera3PicChannel *picChannel =
-            (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
-
-        // Check if dumping HDR+ YUV output is enabled.
-        char prop[PROPERTY_VALUE_MAX];
-        property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
-        bool dumpYuvOutput = atoi(prop);
-
-        if (dumpYuvOutput) {
-            // Dump yuv buffer to a ppm file.
-            pbcamera::StreamConfiguration outputConfig;
-            status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
-                    HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
-            if (rc == OK) {
-                char buf[FILENAME_MAX] = {};
-                snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
-                        result->requestId, result->outputBuffers[0].streamId,
-                        outputConfig.image.width, outputConfig.image.height);
-
-                hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
-            } else {
-                LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
-                        __FUNCTION__, strerror(-rc), rc);
+                    hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
+                } else {
+                    LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
+                            "%s (%d).", __FUNCTION__, strerror(-rc), rc);
+                }
             }
         }
 
-        uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
-        auto halMetadata = std::make_shared<metadata_buffer_t>();
-        clear_metadata_buffer(halMetadata.get());
-
-        // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
-        // encoding.
-        status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
-                halStreamId, /*minFrameDuration*/0);
-        if (res == OK) {
+        if (channel == mPictureChannel) {
             // Return the buffer to pic channel for encoding.
-            picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
-                    pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
+            mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
+                    frameworkOutputBuffer->buffer, result->requestId,
                     halMetadata);
         } else {
-            // Return the buffer without encoding.
-            // TODO: This should not happen but we may want to report an error buffer to camera
-            // service.
-            picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
-            ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
-                    strerror(-res), res);
-        }
-
-        // Send HDR+ metadata to framework.
-        {
+            // Return the buffer to camera framework.
             pthread_mutex_lock(&mMutex);
-
-            // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
-            handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
+            handleBufferWithLock(frameworkOutputBuffer, result->requestId);
             pthread_mutex_unlock(&mMutex);
-        }
 
-        // Remove the HDR+ pending request.
-        {
-            Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
-            auto req = mHdrPlusPendingRequests.find(result->requestId);
-            mHdrPlusPendingRequests.erase(req);
+            channel->unregisterBuffer(outputBufferDef.get());
         }
     }
+
+    // Send HDR+ metadata to framework.
+    {
+        pthread_mutex_lock(&mMutex);
+
+        // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
+        handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
+        pthread_mutex_unlock(&mMutex);
+    }
+
+    // Remove the HDR+ pending request.
+    {
+        Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+        auto req = mHdrPlusPendingRequests.find(result->requestId);
+        mHdrPlusPendingRequests.erase(req);
+    }
 }
 
 void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
@@ -15370,17 +15509,58 @@
 
     ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
 
-    // Remove the pending HDR+ request.
+    // Find the pending HDR+ request.
+    HdrPlusPendingRequest pendingRequest;
     {
         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
-        auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
+        auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
+        if (req == mHdrPlusPendingRequests.end()) {
+            ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
+            return;
+        }
+        pendingRequest = req->second;
+    }
 
-        // Return the buffer to pic channel.
-        QCamera3PicChannel *picChannel =
-                (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
-        picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
+    for (auto &outputBuffer : failedResult->outputBuffers) {
+        uint32_t streamId = outputBuffer.streamId;
 
-        mHdrPlusPendingRequests.erase(pendingRequest);
+        // Find the channel
+        // Find the framework output buffer in the pending request.
+        auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
+        if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
+            ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
+                    streamId);
+            continue;
+        }
+
+        camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
+
+        // Find the channel for the output buffer.
+        QCamera3ProcessingChannel *channel =
+                (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
+
+        // Find the output buffer def.
+        auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
+        if (outputBufferIter == pendingRequest.outputBuffers.end()) {
+            ALOGE("%s: Cannot find output buffer", __FUNCTION__);
+            continue;
+        }
+
+        std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
+
+        if (channel == mPictureChannel) {
+            // Return the buffer to pic channel.
+            mPictureChannel->returnYuvBuffer(outputBufferDef.get());
+        } else {
+            channel->unregisterBuffer(outputBufferDef.get());
+        }
+    }
+
+    // Remove the HDR+ pending request.
+    {
+        Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+        auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
+        mHdrPlusPendingRequests.erase(req);
     }
 
     pthread_mutex_lock(&mMutex);