Merge "QCamera2: HAL3: Extend available request/result/charact. keys" into oc-mr1-dev
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
index 5865a16..bb12822 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
@@ -2454,6 +2454,8 @@
uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
+ uint8_t first_quintuple[5];
+ memcpy(first_quintuple, raw16_buffer, sizeof(first_quintuple));
// In-place format conversion.
// Raw16 format always occupy more memory than opaque raw10.
@@ -2476,6 +2478,12 @@
raw16_buffer[y*raw16_stride+x] = raw16_pixel;
}
}
+
+ // Re-convert the first 2 pixels of the buffer because the loop above messes
+ // them up by reading the first quintuple while modifying it.
+ raw16_buffer[0] = ((uint16_t)first_quintuple[0]<<2) | (first_quintuple[4] & 0x3);
+ raw16_buffer[1] = ((uint16_t)first_quintuple[1]<<2) | ((first_quintuple[4] >> 2) & 0x3);
+
} else {
LOGE("Could not find stream");
}
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
index 1030760..124405d 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
@@ -152,7 +152,6 @@
std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
-bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
bool gEaselBypassOnly;
@@ -511,6 +510,7 @@
mAecSkipDisplayFrameBound(0),
mInstantAecFrameIdxCount(0),
mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
+ mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
mCurrFeatureState(0),
mLdafCalibExist(false),
mLastCustIntentFrmNum(-1),
@@ -2243,7 +2243,7 @@
stream_info->stream = newStream;
stream_info->status = VALID;
stream_info->channel = NULL;
- stream_info->id = i;
+ stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
mStreamInfo.push_back(stream_info);
}
/* Covers Opaque ZSL and API1 F/W ZSL */
@@ -3734,9 +3734,10 @@
i->frame_number, urgent_frame_number);
if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
- (i->partial_result_cnt == 0)) {
+ (i->partial_result_cnt == 0)) {
LOGE("Error: HAL missed urgent metadata for frame number %d",
i->frame_number);
+ i->partialResultDropped = true;
i->partial_result_cnt++;
}
@@ -3835,7 +3836,13 @@
for (auto & pendingRequest : mPendingRequestsList) {
// Find the pending request with the frame number.
- if (pendingRequest.frame_number == frame_number) {
+ if (pendingRequest.frame_number < frame_number) {
+ // Workaround for case where shutter is missing due to dropped
+ // metadata
+ if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
+ mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
+ }
+ } else if (pendingRequest.frame_number == frame_number) {
// Update the sensor timestamp.
pendingRequest.timestamp = capture_time;
@@ -4226,6 +4233,34 @@
}
}
+void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
+ camera_metadata_t *resultMetadata) {
+ CameraMetadata metadata;
+ metadata.acquire(resultMetadata);
+
+ // Remove len shading map if it's not requested.
+ if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
+ metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
+ metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF).data.u8[0] !=
+ ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
+ metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
+ metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ &requestIter->requestedLensShadingMapMode, 1);
+ }
+
+ // Remove face information if it's not requested.
+ if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
+ metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
+ metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
+ ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
+ metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
+ metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
+ &requestIter->requestedFaceDetectMode, 1);
+ }
+
+ requestIter->resultMetadata = metadata.release();
+}
+
void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
camera_metadata_t *resultMetadata)
{
@@ -4268,15 +4303,8 @@
}
}
- // Remove len shading map if it's not requested.
- if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
- CameraMetadata metadata;
- metadata.acquire(resultMetadata);
- metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
- metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
- &requestIter->requestedLensShadingMapMode, 1);
-
- requestIter->resultMetadata = metadata.release();
+ if (requestIter->input_buffer == nullptr) {
+ removeUnrequestedMetadata(requestIter, resultMetadata);
}
dispatchResultMetadataWithLock(frameNumber, liveRequest);
@@ -4300,6 +4328,7 @@
}
bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
+ bool errorResult = false;
camera3_capture_result_t result = {};
result.frame_number = iter->frame_number;
@@ -4316,30 +4345,27 @@
iter++;
continue;
}
+ // Notify ERROR_RESULT if partial result was dropped.
+ errorResult = iter->partialResultDropped;
} else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
// If the result metadata belongs to a live request, notify errors for previous pending
// live requests.
mPendingLiveRequest--;
- CameraMetadata dummyMetadata;
- dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
- result.result = dummyMetadata.release();
-
- notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
-
- // partial_result should be PARTIAL_RESULT_CNT in case of
- // ERROR_RESULT.
- iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
- result.partial_result = PARTIAL_RESULT_COUNT;
+ LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
+ errorResult = true;
} else {
iter++;
continue;
}
- result.output_buffers = nullptr;
- result.num_output_buffers = 0;
- orchestrateResult(&result);
-
+ if (errorResult) {
+ notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
+ } else {
+ result.output_buffers = nullptr;
+ result.num_output_buffers = 0;
+ orchestrateResult(&result);
+ }
// For reprocessing, result metadata is the same as settings so do not free it here to
// avoid double free.
if (result.result != iter->settings) {
@@ -5439,6 +5465,11 @@
requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
}
+ if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
+ mLastRequestedFaceDetectMode =
+ meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
+ }
+
bool hdrPlusRequest = false;
HdrPlusPendingRequest pendingHdrPlusRequest = {};
@@ -5478,12 +5509,14 @@
}
{
- // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
- // will be reported in result metadata.
+ // If HDR+ mode is enabled, override the following modes so the necessary metadata
+ // will be included in the result metadata sent to Easel HDR+.
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (mHdrPlusModeEnabled) {
ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
+ ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
}
}
}
@@ -5547,6 +5580,7 @@
pendingRequest.blob_request = blob_request;
pendingRequest.timestamp = 0;
pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
+ pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
if (request->input_buffer) {
pendingRequest.input_buffer =
(camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
@@ -8203,16 +8237,10 @@
// OIS Data
IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
- camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
- &(frame_ois_data->frame_sof_timestamp_vsync), 1);
camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
&(frame_ois_data->frame_sof_timestamp_boottime), 1);
camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
- camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
- frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
- camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
- frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
@@ -8681,6 +8709,13 @@
}
mStreamInfo = newStreamInfo;
+
+ // Make sure that stream IDs are unique.
+ uint32_t id = 0;
+ for (auto streamInfo : mStreamInfo) {
+ streamInfo->id = id++;
+ }
+
}
/*===========================================================================
@@ -11141,8 +11176,6 @@
gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
- gEnableMultipleHdrplusOutputs =
- property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
// Expose enableZsl key only when HDR+ mode is enabled.
gExposeEnableZslKey = !gEaselBypassOnly;
@@ -12436,6 +12469,8 @@
rc = BAD_VALUE;
}
}
+ } else {
+ LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
}
} else {
uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
@@ -14874,29 +14909,41 @@
IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
}
IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
String8 str((const char *)gps_methods);
resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
}
IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
}
IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
}
IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_QUALITY);
}
IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
}
IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
@@ -14904,11 +14951,15 @@
fwk_thumb_size[0] = thumb_size->width;
fwk_thumb_size[1] = thumb_size->height;
resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
+ } else {
+ resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
}
IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
uint8_t fwk_intent = intent[0];
resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
+ } else {
+ resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
}
}
@@ -14970,6 +15021,13 @@
return false;
}
+ // TODO (b/66500626): support AE compensation.
+ if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
+ metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
+ ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
+ return false;
+ }
+
// TODO (b/32585046): support non-ZSL.
if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
@@ -14990,28 +15048,10 @@
return false;
}
-
- // TODO (b/36693254, b/36690506): support other outputs.
- if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
- ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
- return false;
- }
-
switch (request.output_buffers[0].stream->format) {
case HAL_PIXEL_FORMAT_BLOB:
- break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
- // TODO (b/36693254): Only support full size.
- if (!gEnableMultipleHdrplusOutputs) {
- if (static_cast<int>(request.output_buffers[0].stream->width) !=
- gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
- static_cast<int>(request.output_buffers[0].stream->height) !=
- gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
- ALOGV("%s: Only full size is supported.", __FUNCTION__);
- return false;
- }
- }
break;
default:
ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
@@ -15262,6 +15302,8 @@
inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
+ inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
+
if (mSensorModeInfo.num_raw_bits != 10) {
ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
mSensorModeInfo.num_raw_bits);
@@ -15612,9 +15654,8 @@
// Return the buffer to camera framework.
pthread_mutex_lock(&mMutex);
handleBufferWithLock(frameworkOutputBuffer, result->requestId);
- pthread_mutex_unlock(&mMutex);
-
channel->unregisterBuffer(outputBufferDef.get());
+ pthread_mutex_unlock(&mMutex);
}
}
@@ -15792,6 +15833,11 @@
shutters = &mShutters;
}
+ if (shutter->second.ready) {
+ // If shutter is already ready, don't update timestamp again.
+ return;
+ }
+
// Make this frame's shutter ready.
shutter->second.ready = true;
shutter->second.timestamp = timestamp;
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.h b/msm8998/QCamera2/HAL3/QCamera3HWI.h
index 52cbac0..4eaf8a8 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.h
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.h
@@ -607,6 +607,8 @@
bool enableZsl; // If ZSL is enabled.
bool hdrplus; // If this is an HDR+ request.
uint8_t requestedLensShadingMapMode; // Lens shading map mode for this request.
+ uint8_t requestedFaceDetectMode; // Face detect mode for this request.
+ bool partialResultDropped; // Whether partial metadata is dropped.
} PendingRequestInfo;
typedef struct {
uint32_t frame_number;
@@ -696,6 +698,8 @@
QCamera3CropRegionMapper mCropRegionMapper;
// Last lens shading map mode framework requsted.
uint8_t mLastRequestedLensShadingMapMode;
+ // Last face detect mode framework requsted.
+ uint8_t mLastRequestedFaceDetectMode;
cam_feature_mask_t mCurrFeatureState;
/* Ldaf calibration data */
@@ -748,6 +752,11 @@
static const QCameraPropMap CDS_MAP[];
pendingRequestIterator erasePendingRequest(pendingRequestIterator i);
+
+ // Remove unrequested metadata due to Easel HDR+.
+ void removeUnrequestedMetadata(pendingRequestIterator requestIter,
+ camera_metadata_t *resultMetadata);
+
//GPU library to read buffer padding details.
void *lib_surface_utils;
int (*LINK_get_surface_pixel_alignment)();
diff --git a/msm8998/QCamera2/HAL3/QCamera3Stream.cpp b/msm8998/QCamera2/HAL3/QCamera3Stream.cpp
index b2f7e2d..85887fd 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Stream.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Stream.cpp
@@ -714,10 +714,10 @@
break;
case CAMERA_CMD_TYPE_EXIT:
LOGH("Exit");
+ pme->flushFreeBatchBufQ();
/* flush data buf queue */
pme->mDataQ.flush();
pme->mTimeoutFrameQ.clear();
- pme->flushFreeBatchBufQ();
running = 0;
break;
default:
diff --git a/msm8998/QCamera2/stack/common/cam_types.h b/msm8998/QCamera2/stack/common/cam_types.h
index 8d2dfef..82c5f2c 100644
--- a/msm8998/QCamera2/stack/common/cam_types.h
+++ b/msm8998/QCamera2/stack/common/cam_types.h
@@ -619,6 +619,7 @@
uint32_t op_pixel_clk; // Sensor output rate.
uint32_t num_raw_bits; // Number of bits for RAW. 0 if not RAW.
int64_t timestamp_offset; // Timestamp offset with gyro sensor. 0 if uncalibrated.
+ int64_t timestamp_crop_offset; // Timestamp offset due to crop on top of active array.
} cam_sensor_mode_info_t;
typedef struct {
@@ -998,12 +999,9 @@
} cam_ois_data_t;
typedef struct {
- int64_t frame_sof_timestamp_vsync;
int64_t frame_sof_timestamp_boottime;
int32_t num_ois_sample;
int64_t ois_sample_timestamp_boottime[MAX_OIS_SAMPLE_NUM_PER_FRAME];
- int32_t ois_sample_shift_x[MAX_OIS_SAMPLE_NUM_PER_FRAME];
- int32_t ois_sample_shift_y[MAX_OIS_SAMPLE_NUM_PER_FRAME];
float ois_sample_shift_pixel_x[MAX_OIS_SAMPLE_NUM_PER_FRAME];
float ois_sample_shift_pixel_y[MAX_OIS_SAMPLE_NUM_PER_FRAME];
} cam_frame_ois_info_t;