Merge "QCamera2: HAL3: Add scene distance metadata" into oc-mr1-dev
diff --git a/msm8998/QCamera2/Android.mk b/msm8998/QCamera2/Android.mk
index 8bfa474..e8a6811 100755
--- a/msm8998/QCamera2/Android.mk
+++ b/msm8998/QCamera2/Android.mk
@@ -118,9 +118,12 @@
LOCAL_CFLAGS += -DVENUS_PRESENT
endif
+# Disable UBWC for Easel HDR+.
+ifeq ($(TARGET_USES_EASEL), false)
ifneq (,$(filter msm8996 msmcobalt sdm660 msm8998,$(TARGET_BOARD_PLATFORM)))
LOCAL_CFLAGS += -DUBWC_PRESENT
endif
+endif
ifneq (,$(filter msm8996,$(TARGET_BOARD_PLATFORM)))
LOCAL_CFLAGS += -DTARGET_MSM8996
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
index 2b21ab4..fb04c9a 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
@@ -1211,6 +1211,58 @@
return rc;
}
+int32_t QCamera3ProcessingChannel::registerBufferAndGetBufDef(buffer_handle_t *buffer,
+ mm_camera_buf_def_t *frame)
+{
+ if (buffer == nullptr || frame == nullptr) {
+ ALOGE("%s: buffer and frame cannot be nullptr.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ status_t rc;
+
+ // Get the buffer index.
+ int index = mMemory.getMatchBufIndex((void*)buffer);
+ if(index < 0) {
+ // Register the buffer if it was not registered.
+ rc = registerBuffer(buffer, mIsType);
+ if (rc != OK) {
+ ALOGE("%s: Regitering buffer failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+ return rc;
+ }
+
+ index = mMemory.getMatchBufIndex((void*)buffer);
+ if (index < 0) {
+ ALOGE("%s: Could not find object among registered buffers", __FUNCTION__);
+ return DEAD_OBJECT;
+ }
+ }
+
+ cam_frame_len_offset_t offset = {};
+ mStreams[0]->getFrameOffset(offset);
+
+ // Get the buffer def.
+ rc = mMemory.getBufDef(offset, *frame, index, mMapStreamBuffers);
+ if (rc != 0) {
+ ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
+ return rc;
+ }
+
+ // Set the frame's stream ID because it's not set in getBufDef.
+ frame->stream_id = mStreams[0]->getMyHandle();
+ return 0;
+}
+
+void QCamera3ProcessingChannel::unregisterBuffer(mm_camera_buf_def_t *frame)
+{
+ if (frame == nullptr) {
+ ALOGE("%s: frame is nullptr", __FUNCTION__);
+ return;
+ }
+
+ mMemory.unregisterBuffer(frame->buf_idx);
+}
+
/*===========================================================================
* FUNCTION : setFwkInputPPData
*
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.h b/msm8998/QCamera2/HAL3/QCamera3Channel.h
index e3c1eaf..a23acd5 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.h
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.h
@@ -220,7 +220,10 @@
virtual QCamera3StreamMem *getStreamBufs(uint32_t len);
virtual void putStreamBufs();
virtual int32_t registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType);
-
+ // Register a buffer and get the buffer def for the registered buffer.
+ virtual int32_t registerBufferAndGetBufDef(buffer_handle_t *buffer, mm_camera_buf_def_t *frame);
+ // Unregister a buffer.
+ virtual void unregisterBuffer(mm_camera_buf_def_t *frame);
virtual int32_t stop();
virtual reprocess_type_t getReprocessType() = 0;
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
index 3ff6dc4..589a094 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
@@ -152,6 +152,7 @@
std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
+bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
bool gEaselBypassOnly;
@@ -2237,6 +2238,7 @@
stream_info->stream = newStream;
stream_info->status = VALID;
stream_info->channel = NULL;
+ stream_info->id = i;
mStreamInfo.push_back(stream_info);
}
/* Covers Opaque ZSL and API1 F/W ZSL */
@@ -4782,8 +4784,8 @@
}
status_t QCamera3HardwareInterface::fillPbStreamConfig(
- pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
- QCamera3Channel *channel, uint32_t streamIndex) {
+ pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
+ uint32_t streamIndex) {
if (config == nullptr) {
LOGE("%s: config is null", __FUNCTION__);
return BAD_VALUE;
@@ -4810,14 +4812,30 @@
config->image.width = streamInfo->dim.width;
config->image.height = streamInfo->dim.height;
config->image.padding = 0;
- config->image.format = pbStreamFormat;
+
+ int bytesPerPixel = 0;
+
+ switch (streamInfo->fmt) {
+ case CAM_FORMAT_YUV_420_NV21:
+ config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+ bytesPerPixel = 1;
+ break;
+ case CAM_FORMAT_YUV_420_NV12:
+ case CAM_FORMAT_YUV_420_NV12_VENUS:
+ config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+ bytesPerPixel = 1;
+ break;
+ default:
+ ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
+ return BAD_VALUE;
+ }
uint32_t totalPlaneSize = 0;
// Fill plane information.
for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
pbcamera::PlaneConfiguration plane;
- plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
+ plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
config->image.planes.push_back(plane);
@@ -10994,7 +11012,8 @@
// If Easel is present, power on Easel and suspend it immediately.
status_t res = gEaselManagerClient->open();
if (res != OK) {
- ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
return res;
}
@@ -11007,6 +11026,8 @@
gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
+ gEnableMultipleHdrplusOutputs =
+ property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
// Expose enableZsl key only when HDR+ mode is enabled.
gExposeEnableZslKey = !gEaselBypassOnly;
@@ -14872,22 +14893,67 @@
return false;
}
+
// TODO (b/36693254, b/36690506): support other outputs.
- if (request.num_output_buffers != 1 ||
- request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
- ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
- for (uint32_t i = 0; i < request.num_output_buffers; i++) {
- ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
- request.output_buffers[0].stream->width,
- request.output_buffers[0].stream->height,
- request.output_buffers[0].stream->format);
- }
+ if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
+ ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
return false;
}
+ switch (request.output_buffers[0].stream->format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ break;
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ // TODO (b/36693254): Only support full size.
+ if (!gEnableMultipleHdrplusOutputs) {
+ if (static_cast<int>(request.output_buffers[0].stream->width) !=
+ gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
+ static_cast<int>(request.output_buffers[0].stream->height) !=
+ gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
+ ALOGV("%s: Only full size is supported.", __FUNCTION__);
+ return false;
+ }
+ }
+ break;
+ default:
+ ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
+ for (uint32_t i = 0; i < request.num_output_buffers; i++) {
+ ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
+ request.output_buffers[0].stream->width,
+ request.output_buffers[0].stream->height,
+ request.output_buffers[0].stream->format);
+ }
+ return false;
+ }
+
return true;
}
+void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
+ if (hdrPlusRequest == nullptr) return;
+
+ for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
+ // Find the stream for this buffer.
+ for (auto streamInfo : mStreamInfo) {
+ if (streamInfo->id == outputBufferIter.first) {
+ if (streamInfo->channel == mPictureChannel) {
+ // For picture channel, this buffer is internally allocated so return this
+ // buffer to picture channel.
+ mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
+ } else {
+ // Unregister this buffer for other channels.
+ streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
+ }
+ break;
+ }
+ }
+ }
+
+ hdrPlusRequest->outputBuffers.clear();
+ hdrPlusRequest->frameworkOutputBuffers.clear();
+}
+
bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
const CameraMetadata &metadata)
@@ -14895,37 +14961,85 @@
if (hdrPlusRequest == nullptr) return false;
if (!isRequestHdrPlusCompatible(request, metadata)) return false;
- // Get a YUV buffer from pic channel.
- QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
- auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
- status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
- if (res != OK) {
- ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return false;
- }
-
- pbcamera::StreamBuffer buffer;
- buffer.streamId = kPbYuvOutputStreamId;
- buffer.dmaBufFd = yuvBuffer->fd;
- buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
- buffer.dataSize = yuvBuffer->frame_len;
-
+ status_t res = OK;
pbcamera::CaptureRequest pbRequest;
pbRequest.id = request.frame_number;
- pbRequest.outputBuffers.push_back(buffer);
+ // Iterate through all requested output buffers and add them to an HDR+ request.
+ for (uint32_t i = 0; i < request.num_output_buffers; i++) {
+ // Find the index of the stream in mStreamInfo.
+ uint32_t pbStreamId = 0;
+ bool found = false;
+ for (auto streamInfo : mStreamInfo) {
+ if (streamInfo->stream == request.output_buffers[i].stream) {
+ pbStreamId = streamInfo->id;
+ found = true;
+ break;
+ }
+ }
+
+ if (!found) {
+ ALOGE("%s: requested stream was not configured.", __FUNCTION__);
+ abortPendingHdrplusRequest(hdrPlusRequest);
+ return false;
+ }
+ auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
+ switch (request.output_buffers[i].stream->format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ {
+ // For jpeg output, get a YUV buffer from pic channel.
+ QCamera3PicChannel *picChannel =
+ (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
+ res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
+ if (res != OK) {
+ ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ abortPendingHdrplusRequest(hdrPlusRequest);
+ return false;
+ }
+ break;
+ }
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ {
+ // For YUV output, register the buffer and get the buffer def from the channel.
+ QCamera3ProcessingChannel *channel =
+ (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
+ res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
+ outBuffer.get());
+ if (res != OK) {
+ ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ abortPendingHdrplusRequest(hdrPlusRequest);
+ return false;
+ }
+ break;
+ }
+ default:
+ abortPendingHdrplusRequest(hdrPlusRequest);
+ return false;
+ }
+
+ pbcamera::StreamBuffer buffer;
+ buffer.streamId = pbStreamId;
+ buffer.dmaBufFd = outBuffer->fd;
+ buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
+ buffer.dataSize = outBuffer->frame_len;
+
+ pbRequest.outputBuffers.push_back(buffer);
+
+ hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
+ hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
+ }
// Submit an HDR+ capture request to HDR+ service.
res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
if (res != OK) {
ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
strerror(-res), res);
+ abortPendingHdrplusRequest(hdrPlusRequest);
return false;
}
- hdrPlusRequest->yuvBuffer = yuvBuffer;
- hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
-
return true;
}
@@ -15020,13 +15134,20 @@
bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
{
- // Check if mPictureChannel is valid.
- // TODO: Support YUV (b/36693254) and RAW (b/36690506)
- if (mPictureChannel == nullptr) {
- return false;
+ // Check that at least one YUV or one JPEG output is configured.
+ // TODO: Support RAW (b/36690506)
+ for (auto streamInfo : mStreamInfo) {
+ if (streamInfo != nullptr && streamInfo->stream != nullptr) {
+ if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
+ (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
+ streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
+ streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+ return true;
+ }
+ }
}
- return true;
+ return false;
}
status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
@@ -15035,59 +15156,50 @@
std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
status_t res = OK;
- // Configure HDR+ client streams.
- // Get input config.
- if (mHdrPlusRawSrcChannel) {
- // HDR+ input buffers will be provided by HAL.
- res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
- HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
- if (res != OK) {
- LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- return res;
- }
-
- inputConfig.isSensorInput = false;
- } else {
- // Sensor MIPI will send data to Easel.
- inputConfig.isSensorInput = true;
- inputConfig.sensorMode.cameraId = mCameraId;
- inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
- inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
- inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
- inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
- inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
- inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
- if (mSensorModeInfo.num_raw_bits != 10) {
- ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
- mSensorModeInfo.num_raw_bits);
- return BAD_VALUE;
- }
-
- inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
+ // Sensor MIPI will send data to Easel.
+ inputConfig.isSensorInput = true;
+ inputConfig.sensorMode.cameraId = mCameraId;
+ inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
+ inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
+ inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
+ inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
+ inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
+ inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
+ if (mSensorModeInfo.num_raw_bits != 10) {
+ ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
+ mSensorModeInfo.num_raw_bits);
+ return BAD_VALUE;
}
- // Get output configurations.
- // Easel may need to output RAW16 buffers if mRawChannel was created.
- // TODO: handle RAW16 outputs.
+ inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
- // Easel may need to output YUV output buffers if mPictureChannel was created.
- pbcamera::StreamConfiguration yuvOutputConfig;
- if (mPictureChannel != nullptr) {
- res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
- HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
- if (res != OK) {
- LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
- __FUNCTION__, strerror(-res), res);
+ // Iterate through configured output streams in HAL and configure those streams in HDR+
+ // service.
+ for (auto streamInfo : mStreamInfo) {
+ pbcamera::StreamConfiguration outputConfig;
+ if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
+ switch (streamInfo->stream->format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ res = fillPbStreamConfig(&outputConfig, streamInfo->id,
+ streamInfo->channel, /*stream index*/0);
+ if (res != OK) {
+ LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
- return res;
+ return res;
+ }
+
+ outputStreamConfigs.push_back(outputConfig);
+ break;
+ default:
+ // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
+ break;
+ }
}
-
- outputStreamConfigs.push_back(yuvOutputConfig);
}
- // TODO: consider other channels for YUV output buffers.
-
res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
if (res != OK) {
LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
@@ -15284,101 +15396,124 @@
void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
const camera_metadata_t &resultMetadata)
{
- if (result != nullptr) {
- if (result->outputBuffers.size() != 1) {
- ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
- result->outputBuffers.size());
- return;
+ if (result == nullptr) {
+ ALOGE("%s: result is nullptr.", __FUNCTION__);
+ return;
+ }
+
+ // Find the pending HDR+ request.
+ HdrPlusPendingRequest pendingRequest;
+ {
+ Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+ auto req = mHdrPlusPendingRequests.find(result->requestId);
+ pendingRequest = req->second;
+ }
+
+ // Update the result metadata with the settings of the HDR+ still capture request because
+ // the result metadata belongs to a ZSL buffer.
+ CameraMetadata metadata;
+ metadata = &resultMetadata;
+ updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
+ camera_metadata_t* updatedResultMetadata = metadata.release();
+
+ uint32_t halSnapshotStreamId = 0;
+ if (mPictureChannel != nullptr) {
+ halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
+ }
+
+ auto halMetadata = std::make_shared<metadata_buffer_t>();
+ clear_metadata_buffer(halMetadata.get());
+
+ // Convert updated result metadata to HAL metadata.
+ status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
+ halSnapshotStreamId, /*minFrameDuration*/0);
+ if (res != 0) {
+ ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ }
+
+ for (auto &outputBuffer : result->outputBuffers) {
+ uint32_t streamId = outputBuffer.streamId;
+
+ // Find the framework output buffer in the pending request.
+ auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
+ if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
+ ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
+ streamId);
+ continue;
}
- if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
- ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
- result->outputBuffers[0].streamId);
- return;
+ camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
+
+ // Find the channel for the output buffer.
+ QCamera3ProcessingChannel *channel =
+ (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
+
+ // Find the output buffer def.
+ auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
+ if (outputBufferIter == pendingRequest.outputBuffers.end()) {
+ ALOGE("%s: Cannot find output buffer", __FUNCTION__);
+ continue;
}
- // TODO (b/34854987): initiate this from HDR+ service.
- onNextCaptureReady(result->requestId);
+ std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
- // Find the pending HDR+ request.
- HdrPlusPendingRequest pendingRequest;
- {
- Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
- auto req = mHdrPlusPendingRequests.find(result->requestId);
- pendingRequest = req->second;
- }
+ // Check whether to dump the buffer.
+ if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
+ frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
+ // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
+ char prop[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
+ bool dumpYuvOutput = atoi(prop);
- // Update the result metadata with the settings of the HDR+ still capture request because
- // the result metadata belongs to a ZSL buffer.
- CameraMetadata metadata;
- metadata = &resultMetadata;
- updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
- camera_metadata_t* updatedResultMetadata = metadata.release();
+ if (dumpYuvOutput) {
+ // Dump yuv buffer to a ppm file.
+ pbcamera::StreamConfiguration outputConfig;
+ status_t rc = fillPbStreamConfig(&outputConfig, streamId,
+ channel, /*stream index*/0);
+ if (rc == OK) {
+ char buf[FILENAME_MAX] = {};
+ snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
+ result->requestId, streamId,
+ outputConfig.image.width, outputConfig.image.height);
- QCamera3PicChannel *picChannel =
- (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
-
- // Check if dumping HDR+ YUV output is enabled.
- char prop[PROPERTY_VALUE_MAX];
- property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
- bool dumpYuvOutput = atoi(prop);
-
- if (dumpYuvOutput) {
- // Dump yuv buffer to a ppm file.
- pbcamera::StreamConfiguration outputConfig;
- status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
- HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
- if (rc == OK) {
- char buf[FILENAME_MAX] = {};
- snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
- result->requestId, result->outputBuffers[0].streamId,
- outputConfig.image.width, outputConfig.image.height);
-
- hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
- } else {
- LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
- __FUNCTION__, strerror(-rc), rc);
+ hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
+ } else {
+ LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
+ "%s (%d).", __FUNCTION__, strerror(-rc), rc);
+ }
}
}
- uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
- auto halMetadata = std::make_shared<metadata_buffer_t>();
- clear_metadata_buffer(halMetadata.get());
-
- // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
- // encoding.
- status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
- halStreamId, /*minFrameDuration*/0);
- if (res == OK) {
+ if (channel == mPictureChannel) {
// Return the buffer to pic channel for encoding.
- picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
- pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
+ mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
+ frameworkOutputBuffer->buffer, result->requestId,
halMetadata);
} else {
- // Return the buffer without encoding.
- // TODO: This should not happen but we may want to report an error buffer to camera
- // service.
- picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
- ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
- strerror(-res), res);
- }
-
- // Send HDR+ metadata to framework.
- {
+ // Return the buffer to camera framework.
pthread_mutex_lock(&mMutex);
-
- // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
- handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
+ handleBufferWithLock(frameworkOutputBuffer, result->requestId);
pthread_mutex_unlock(&mMutex);
- }
- // Remove the HDR+ pending request.
- {
- Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
- auto req = mHdrPlusPendingRequests.find(result->requestId);
- mHdrPlusPendingRequests.erase(req);
+ channel->unregisterBuffer(outputBufferDef.get());
}
}
+
+ // Send HDR+ metadata to framework.
+ {
+ pthread_mutex_lock(&mMutex);
+
+ // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
+ handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
+ pthread_mutex_unlock(&mMutex);
+ }
+
+ // Remove the HDR+ pending request.
+ {
+ Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+ auto req = mHdrPlusPendingRequests.find(result->requestId);
+ mHdrPlusPendingRequests.erase(req);
+ }
}
void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
@@ -15390,17 +15525,58 @@
ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
- // Remove the pending HDR+ request.
+ // Find the pending HDR+ request.
+ HdrPlusPendingRequest pendingRequest;
{
Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
- auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
+ auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
+ if (req == mHdrPlusPendingRequests.end()) {
+ ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
+ return;
+ }
+ pendingRequest = req->second;
+ }
- // Return the buffer to pic channel.
- QCamera3PicChannel *picChannel =
- (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
- picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
+ for (auto &outputBuffer : failedResult->outputBuffers) {
+ uint32_t streamId = outputBuffer.streamId;
- mHdrPlusPendingRequests.erase(pendingRequest);
+ // Find the channel
+ // Find the framework output buffer in the pending request.
+ auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
+ if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
+ ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
+ streamId);
+ continue;
+ }
+
+ camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
+
+ // Find the channel for the output buffer.
+ QCamera3ProcessingChannel *channel =
+ (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
+
+ // Find the output buffer def.
+ auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
+ if (outputBufferIter == pendingRequest.outputBuffers.end()) {
+ ALOGE("%s: Cannot find output buffer", __FUNCTION__);
+ continue;
+ }
+
+ std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
+
+ if (channel == mPictureChannel) {
+ // Return the buffer to pic channel.
+ mPictureChannel->returnYuvBuffer(outputBufferDef.get());
+ } else {
+ channel->unregisterBuffer(outputBufferDef.get());
+ }
+ }
+
+ // Remove the HDR+ pending request.
+ {
+ Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+ auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
+ mHdrPlusPendingRequests.erase(req);
}
pthread_mutex_lock(&mMutex);
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.h b/msm8998/QCamera2/HAL3/QCamera3HWI.h
index fe7828b..8df7ea4 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.h
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.h
@@ -108,6 +108,7 @@
stream_status_t status;
int registered;
QCamera3ProcessingChannel *channel;
+ uint32_t id; // unique ID
} stream_info_t;
typedef struct {
@@ -771,23 +772,17 @@
Mutex mFlushLock;
bool m60HzZone;
- // Stream IDs used in stream configuration with HDR+ client.
- const static uint32_t kPbRaw10InputStreamId = 0;
- const static uint32_t kPbYuvOutputStreamId = 1;
- const static uint32_t kPbRaw16OutputStreamId = 2;
-
// Issue an additional RAW for every 10 requests to control RAW capture rate. Requesting RAW
// too often will cause frame drops due to latency of sending RAW to HDR+ service.
const static uint32_t kHdrPlusRawPeriod = 10;
// Define a pending HDR+ request submitted to HDR+ service and not yet received by HAL.
struct HdrPlusPendingRequest {
- // YUV buffer from QCamera3PicChannel to be filled by HDR+ client with an HDR+ processed
- // frame.
- std::shared_ptr<mm_camera_buf_def_t> yuvBuffer;
+ // HDR+ stream ID -> output buffer to be filled by HDR+ client with an HDR+ processed frame.
+ std::map<uint32_t, std::shared_ptr<mm_camera_buf_def_t>> outputBuffers;
- // Output buffers in camera framework's request.
- std::vector<camera3_stream_buffer_t> frameworkOutputBuffers;
+ // HDR+ stream ID -> output buffers in camera framework's request.
+ std::map<uint32_t, camera3_stream_buffer_t> frameworkOutputBuffers;
// Settings in camera framework's request.
std::shared_ptr<metadata_buffer_t> settings;
@@ -795,7 +790,7 @@
// Fill pbcamera::StreamConfiguration based on the channel stream.
status_t fillPbStreamConfig(pbcamera::StreamConfiguration *config, uint32_t pbStreamId,
- int pbStreamFormat, QCamera3Channel *channel, uint32_t streamIndex);
+ QCamera3Channel *channel, uint32_t streamIndex);
// Open HDR+ client asynchronously.
status_t openHdrPlusClientAsyncLocked();
@@ -822,6 +817,10 @@
bool trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest *hdrPlusRequest,
const camera3_capture_request_t &request, const CameraMetadata &metadata);
+ // Abort an HDR+ request that was not submitted successfully in
+ // trySubmittingHdrPlusRequestLocked.
+ void abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest);
+
// Update HDR+ result metadata with the still capture's request settings.
void updateHdrPlusResultMetadata(CameraMetadata &resultMetadata,
std::shared_ptr<metadata_buffer_t> settings);
@@ -839,7 +838,7 @@
const camera_metadata_t &resultMetadata) override;
void onFailedCaptureResult(pbcamera::CaptureResult *failedResult) override;
void onShutter(uint32_t requestId, int64_t apSensorTimestampNs) override;
- void onNextCaptureReady(uint32_t requestId);
+ void onNextCaptureReady(uint32_t requestId) override;
void onPostview(uint32_t requestId, std::unique_ptr<std::vector<uint8_t>> postview,
uint32_t width, uint32_t height, uint32_t stride, int32_t format) override;