blob: 9ce8d53f29fe98792e934086dcf3e99252a095c1 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
47
48// Display dependencies
49#include "qdMetaData.h"
50
51// Camera dependencies
52#include "android/QCamera3External.h"
53#include "util/QCameraFlash.h"
54#include "QCamera3HWI.h"
55#include "QCamera3VendorTags.h"
56#include "QCameraTrace.h"
57
58extern "C" {
59#include "mm_camera_dbg.h"
60}
61
62using namespace android;
63
64namespace qcamera {
65
66#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
67
68#define EMPTY_PIPELINE_DELAY 2
69#define PARTIAL_RESULT_COUNT 2
70#define FRAME_SKIP_DELAY 0
71
72#define MAX_VALUE_8BIT ((1<<8)-1)
73#define MAX_VALUE_10BIT ((1<<10)-1)
74#define MAX_VALUE_12BIT ((1<<12)-1)
75
76#define VIDEO_4K_WIDTH 3840
77#define VIDEO_4K_HEIGHT 2160
78
79#define MAX_EIS_WIDTH 1920
80#define MAX_EIS_HEIGHT 1080
81
82#define MAX_RAW_STREAMS 1
83#define MAX_STALLING_STREAMS 1
84#define MAX_PROCESSED_STREAMS 3
85/* Batch mode is enabled only if FPS set is equal to or greater than this */
86#define MIN_FPS_FOR_BATCH_MODE (120)
87#define PREVIEW_FPS_FOR_HFR (30)
88#define DEFAULT_VIDEO_FPS (30.0)
89#define MAX_HFR_BATCH_SIZE (8)
90#define REGIONS_TUPLE_COUNT 5
91#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
92#define BURST_REPROCESS_PERF_TIME_OUT (1000) // milliseconds
93// Set a threshold for detection of missing buffers //seconds
94#define MISSING_REQUEST_BUF_TIMEOUT 3
95#define FLUSH_TIMEOUT 3
96#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
97
98#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
99 CAM_QCOM_FEATURE_CROP |\
100 CAM_QCOM_FEATURE_ROTATION |\
101 CAM_QCOM_FEATURE_SHARPNESS |\
102 CAM_QCOM_FEATURE_SCALE |\
103 CAM_QCOM_FEATURE_CAC |\
104 CAM_QCOM_FEATURE_CDS )
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115 {"On", CAM_CDS_MODE_ON},
116 {"Off", CAM_CDS_MODE_OFF},
117 {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121 camera_metadata_enum_android_control_effect_mode_t,
122 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
124 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
125 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
126 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
127 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
128 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
129 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135 camera_metadata_enum_android_control_awb_mode_t,
136 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
138 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
139 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
140 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
141 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
143 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
145 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149 camera_metadata_enum_android_control_scene_mode_t,
150 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
152 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
153 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
154 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
155 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
156 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
158 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
159 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
160 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
161 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
162 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
163 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
164 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
165 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
166 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_control_af_mode_t,
171 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
173 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
174 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
175 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
176 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
177 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_color_correction_aberration_mode_t,
183 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185 CAM_COLOR_CORRECTION_ABERRATION_OFF },
186 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187 CAM_COLOR_CORRECTION_ABERRATION_FAST },
188 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193 camera_metadata_enum_android_control_ae_antibanding_mode_t,
194 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
196 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_ae_mode_t,
203 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
205 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
206 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
207 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
208 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212 camera_metadata_enum_android_flash_mode_t,
213 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
215 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_statistics_face_detect_mode_t,
221 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
223 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
224 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231 CAM_FOCUS_UNCALIBRATED },
232 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233 CAM_FOCUS_APPROXIMATE },
234 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235 CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_lens_state_t,
240 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
242 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246 176, 144,
247 240, 144,
248 256, 144,
249 240, 160,
250 256, 154,
251 240, 240,
252 320, 240};
253
254const QCamera3HardwareInterface::QCameraMap<
255 camera_metadata_enum_android_sensor_test_pattern_mode_t,
256 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
257 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
258 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
259 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
260 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
261 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
262 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
263};
264
265/* Since there is no mapping for all the options some Android enum are not listed.
266 * Also, the order in this list is important because while mapping from HAL to Android it will
267 * traverse from lower to higher index which means that for HAL values that are map to different
268 * Android values, the traverse logic will select the first one found.
269 */
270const QCamera3HardwareInterface::QCameraMap<
271 camera_metadata_enum_android_sensor_reference_illuminant1_t,
272 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
273 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
274 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
276 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
277 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
278 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
279 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
280 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
281 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
282 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
283 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
284 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
285 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
286 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
287 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
288 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
293 { 60, CAM_HFR_MODE_60FPS},
294 { 90, CAM_HFR_MODE_90FPS},
295 { 120, CAM_HFR_MODE_120FPS},
296 { 150, CAM_HFR_MODE_150FPS},
297 { 180, CAM_HFR_MODE_180FPS},
298 { 210, CAM_HFR_MODE_210FPS},
299 { 240, CAM_HFR_MODE_240FPS},
300 { 480, CAM_HFR_MODE_480FPS},
301};
302
303camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
304 .initialize = QCamera3HardwareInterface::initialize,
305 .configure_streams = QCamera3HardwareInterface::configure_streams,
306 .register_stream_buffers = NULL,
307 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
308 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
309 .get_metadata_vendor_tag_ops = NULL,
310 .dump = QCamera3HardwareInterface::dump,
311 .flush = QCamera3HardwareInterface::flush,
312 .reserved = {0},
313};
314
315// initialise to some default value
316uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
317
318/*===========================================================================
319 * FUNCTION : QCamera3HardwareInterface
320 *
321 * DESCRIPTION: constructor of QCamera3HardwareInterface
322 *
323 * PARAMETERS :
324 * @cameraId : camera ID
325 *
326 * RETURN : none
327 *==========================================================================*/
328QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
329 const camera_module_callbacks_t *callbacks)
330 : mCameraId(cameraId),
331 mCameraHandle(NULL),
332 mCameraInitialized(false),
333 mCallbackOps(NULL),
334 mMetadataChannel(NULL),
335 mPictureChannel(NULL),
336 mRawChannel(NULL),
337 mSupportChannel(NULL),
338 mAnalysisChannel(NULL),
339 mRawDumpChannel(NULL),
340 mDummyBatchChannel(NULL),
341 m_perfLock(),
342 mCommon(),
343 mChannelHandle(0),
344 mFirstConfiguration(true),
345 mFlush(false),
346 mFlushPerf(false),
347 mParamHeap(NULL),
348 mParameters(NULL),
349 mPrevParameters(NULL),
350 m_bIsVideo(false),
351 m_bIs4KVideo(false),
352 m_bEisSupportedSize(false),
353 m_bEisEnable(false),
354 m_MobicatMask(0),
355 mMinProcessedFrameDuration(0),
356 mMinJpegFrameDuration(0),
357 mMinRawFrameDuration(0),
358 mMetaFrameCount(0U),
359 mUpdateDebugLevel(false),
360 mCallbacks(callbacks),
361 mCaptureIntent(0),
362 mCacMode(0),
363 mBatchSize(0),
364 mToBeQueuedVidBufs(0),
365 mHFRVideoFps(DEFAULT_VIDEO_FPS),
366 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
367 mFirstFrameNumberInBatch(0),
368 mNeedSensorRestart(false),
369 mLdafCalibExist(false),
370 mPowerHintEnabled(false),
371 mLastCustIntentFrmNum(-1),
372 mState(CLOSED),
373 mIsDeviceLinked(false),
374 mIsMainCamera(true),
375 mLinkedCameraId(0),
376 m_pRelCamSyncHeap(NULL),
377 m_pRelCamSyncBuf(NULL)
378{
379 getLogLevel();
380 m_perfLock.lock_init();
381 mCommon.init(gCamCapability[cameraId]);
382 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
383 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
384 mCameraDevice.common.close = close_camera_device;
385 mCameraDevice.ops = &mCameraOps;
386 mCameraDevice.priv = this;
387 gCamCapability[cameraId]->version = CAM_HAL_V3;
388 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
389 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
390 gCamCapability[cameraId]->min_num_pp_bufs = 3;
391
392 pthread_cond_init(&mBuffersCond, NULL);
393
394 pthread_cond_init(&mRequestCond, NULL);
395 mPendingLiveRequest = 0;
396 mCurrentRequestId = -1;
397 pthread_mutex_init(&mMutex, NULL);
398
399 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
400 mDefaultMetadata[i] = NULL;
401
402 // Getting system props of different kinds
403 char prop[PROPERTY_VALUE_MAX];
404 memset(prop, 0, sizeof(prop));
405 property_get("persist.camera.raw.dump", prop, "0");
406 mEnableRawDump = atoi(prop);
407 if (mEnableRawDump)
408 LOGD("Raw dump from Camera HAL enabled");
409
410 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
411 memset(mLdafCalib, 0, sizeof(mLdafCalib));
412
413 memset(prop, 0, sizeof(prop));
414 property_get("persist.camera.tnr.preview", prop, "0");
415 m_bTnrPreview = (uint8_t)atoi(prop);
416
417 memset(prop, 0, sizeof(prop));
418 property_get("persist.camera.tnr.video", prop, "0");
419 m_bTnrVideo = (uint8_t)atoi(prop);
420
421 memset(prop, 0, sizeof(prop));
422 property_get("persist.camera.avtimer.debug", prop, "0");
423 m_debug_avtimer = (uint8_t)atoi(prop);
424
425 //Load and read GPU library.
426 lib_surface_utils = NULL;
427 LINK_get_surface_pixel_alignment = NULL;
428 mSurfaceStridePadding = CAM_PAD_TO_32;
429 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
430 if (lib_surface_utils) {
431 *(void **)&LINK_get_surface_pixel_alignment =
432 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
433 if (LINK_get_surface_pixel_alignment) {
434 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
435 }
436 dlclose(lib_surface_utils);
437 }
438}
439
440/*===========================================================================
441 * FUNCTION : ~QCamera3HardwareInterface
442 *
443 * DESCRIPTION: destructor of QCamera3HardwareInterface
444 *
445 * PARAMETERS : none
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::~QCamera3HardwareInterface()
450{
451 LOGD("E");
452
453 /* Turn off current power hint before acquiring perfLock in case they
454 * conflict with each other */
455 disablePowerHint();
456
457 m_perfLock.lock_acq();
458
459 /* We need to stop all streams before deleting any stream */
460 if (mRawDumpChannel) {
461 mRawDumpChannel->stop();
462 }
463
464 // NOTE: 'camera3_stream_t *' objects are already freed at
465 // this stage by the framework
466 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
467 it != mStreamInfo.end(); it++) {
468 QCamera3ProcessingChannel *channel = (*it)->channel;
469 if (channel) {
470 channel->stop();
471 }
472 }
473 if (mSupportChannel)
474 mSupportChannel->stop();
475
476 if (mAnalysisChannel) {
477 mAnalysisChannel->stop();
478 }
479 if (mMetadataChannel) {
480 mMetadataChannel->stop();
481 }
482 if (mChannelHandle) {
483 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
484 mChannelHandle);
485 LOGD("stopping channel %d", mChannelHandle);
486 }
487
488 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
489 it != mStreamInfo.end(); it++) {
490 QCamera3ProcessingChannel *channel = (*it)->channel;
491 if (channel)
492 delete channel;
493 free (*it);
494 }
495 if (mSupportChannel) {
496 delete mSupportChannel;
497 mSupportChannel = NULL;
498 }
499
500 if (mAnalysisChannel) {
501 delete mAnalysisChannel;
502 mAnalysisChannel = NULL;
503 }
504 if (mRawDumpChannel) {
505 delete mRawDumpChannel;
506 mRawDumpChannel = NULL;
507 }
508 if (mDummyBatchChannel) {
509 delete mDummyBatchChannel;
510 mDummyBatchChannel = NULL;
511 }
512
513 mPictureChannel = NULL;
514
515 if (mMetadataChannel) {
516 delete mMetadataChannel;
517 mMetadataChannel = NULL;
518 }
519
520 /* Clean up all channels */
521 if (mCameraInitialized) {
522 if(!mFirstConfiguration){
523 //send the last unconfigure
524 cam_stream_size_info_t stream_config_info;
525 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
526 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
527 stream_config_info.buffer_info.max_buffers =
528 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
529 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
530 stream_config_info);
531 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
532 if (rc < 0) {
533 LOGE("set_parms failed for unconfigure");
534 }
535 }
536 deinitParameters();
537 }
538
539 if (mChannelHandle) {
540 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
541 mChannelHandle);
542 LOGH("deleting channel %d", mChannelHandle);
543 mChannelHandle = 0;
544 }
545
546 if (mState != CLOSED)
547 closeCamera();
548
549 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
550 req.mPendingBufferList.clear();
551 }
552 mPendingBuffersMap.mPendingBuffersInRequest.clear();
553 mPendingReprocessResultList.clear();
554 for (pendingRequestIterator i = mPendingRequestsList.begin();
555 i != mPendingRequestsList.end();) {
556 i = erasePendingRequest(i);
557 }
558 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
559 if (mDefaultMetadata[i])
560 free_camera_metadata(mDefaultMetadata[i]);
561
562 m_perfLock.lock_rel();
563 m_perfLock.lock_deinit();
564
565 pthread_cond_destroy(&mRequestCond);
566
567 pthread_cond_destroy(&mBuffersCond);
568
569 pthread_mutex_destroy(&mMutex);
570 LOGD("X");
571}
572
573/*===========================================================================
574 * FUNCTION : erasePendingRequest
575 *
576 * DESCRIPTION: function to erase a desired pending request after freeing any
577 * allocated memory
578 *
579 * PARAMETERS :
580 * @i : iterator pointing to pending request to be erased
581 *
582 * RETURN : iterator pointing to the next request
583 *==========================================================================*/
584QCamera3HardwareInterface::pendingRequestIterator
585 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
586{
587 if (i->input_buffer != NULL) {
588 free(i->input_buffer);
589 i->input_buffer = NULL;
590 }
591 if (i->settings != NULL)
592 free_camera_metadata((camera_metadata_t*)i->settings);
593 return mPendingRequestsList.erase(i);
594}
595
596/*===========================================================================
597 * FUNCTION : camEvtHandle
598 *
599 * DESCRIPTION: Function registered to mm-camera-interface to handle events
600 *
601 * PARAMETERS :
602 * @camera_handle : interface layer camera handle
603 * @evt : ptr to event
604 * @user_data : user data ptr
605 *
606 * RETURN : none
607 *==========================================================================*/
608void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
609 mm_camera_event_t *evt,
610 void *user_data)
611{
612 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
613 if (obj && evt) {
614 switch(evt->server_event_type) {
615 case CAM_EVENT_TYPE_DAEMON_DIED:
616 pthread_mutex_lock(&obj->mMutex);
617 obj->mState = ERROR;
618 pthread_mutex_unlock(&obj->mMutex);
619 LOGE("Fatal, camera daemon died");
620 break;
621
622 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
623 LOGD("HAL got request pull from Daemon");
624 pthread_mutex_lock(&obj->mMutex);
625 obj->mWokenUpByDaemon = true;
626 obj->unblockRequestIfNecessary();
627 pthread_mutex_unlock(&obj->mMutex);
628 break;
629
630 default:
631 LOGW("Warning: Unhandled event %d",
632 evt->server_event_type);
633 break;
634 }
635 } else {
636 LOGE("NULL user_data/evt");
637 }
638}
639
640/*===========================================================================
641 * FUNCTION : openCamera
642 *
643 * DESCRIPTION: open camera
644 *
645 * PARAMETERS :
646 * @hw_device : double ptr for camera device struct
647 *
648 * RETURN : int32_t type of status
649 * NO_ERROR -- success
650 * none-zero failure code
651 *==========================================================================*/
652int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
653{
654 int rc = 0;
655 if (mState != CLOSED) {
656 *hw_device = NULL;
657 return PERMISSION_DENIED;
658 }
659
660 m_perfLock.lock_acq();
661 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
662 mCameraId);
663
664 rc = openCamera();
665 if (rc == 0) {
666 *hw_device = &mCameraDevice.common;
667 } else
668 *hw_device = NULL;
669
670 m_perfLock.lock_rel();
671 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
672 mCameraId, rc);
673
674 if (rc == NO_ERROR) {
675 mState = OPENED;
676 }
677 return rc;
678}
679
680/*===========================================================================
681 * FUNCTION : openCamera
682 *
683 * DESCRIPTION: open camera
684 *
685 * PARAMETERS : none
686 *
687 * RETURN : int32_t type of status
688 * NO_ERROR -- success
689 * none-zero failure code
690 *==========================================================================*/
691int QCamera3HardwareInterface::openCamera()
692{
693 int rc = 0;
694 char value[PROPERTY_VALUE_MAX];
695
696 KPI_ATRACE_CALL();
697 if (mCameraHandle) {
698 LOGE("Failure: Camera already opened");
699 return ALREADY_EXISTS;
700 }
701
702 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
703 if (rc < 0) {
704 LOGE("Failed to reserve flash for camera id: %d",
705 mCameraId);
706 return UNKNOWN_ERROR;
707 }
708
709 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
710 if (rc) {
711 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
712 return rc;
713 }
714
715 if (!mCameraHandle) {
716 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
717 return -ENODEV;
718 }
719
720 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
721 camEvtHandle, (void *)this);
722
723 if (rc < 0) {
724 LOGE("Error, failed to register event callback");
725 /* Not closing camera here since it is already handled in destructor */
726 return FAILED_TRANSACTION;
727 }
728
729 mExifParams.debug_params =
730 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
731 if (mExifParams.debug_params) {
732 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
733 } else {
734 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
735 return NO_MEMORY;
736 }
737 mFirstConfiguration = true;
738
739 //Notify display HAL that a camera session is active.
740 //But avoid calling the same during bootup because camera service might open/close
741 //cameras at boot time during its initialization and display service will also internally
742 //wait for camera service to initialize first while calling this display API, resulting in a
743 //deadlock situation. Since boot time camera open/close calls are made only to fetch
744 //capabilities, no need of this display bw optimization.
745 //Use "service.bootanim.exit" property to know boot status.
746 property_get("service.bootanim.exit", value, "0");
747 if (atoi(value) == 1) {
748 pthread_mutex_lock(&gCamLock);
749 if (gNumCameraSessions++ == 0) {
750 setCameraLaunchStatus(true);
751 }
752 pthread_mutex_unlock(&gCamLock);
753 }
754
755 //fill the session id needed while linking dual cam
756 pthread_mutex_lock(&gCamLock);
757 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
758 &sessionId[mCameraId]);
759 pthread_mutex_unlock(&gCamLock);
760
761 if (rc < 0) {
762 LOGE("Error, failed to get sessiion id");
763 return UNKNOWN_ERROR;
764 } else {
765 //Allocate related cam sync buffer
766 //this is needed for the payload that goes along with bundling cmd for related
767 //camera use cases
768 m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
769 rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
770 if(rc != OK) {
771 rc = NO_MEMORY;
772 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
773 return NO_MEMORY;
774 }
775
776 //Map memory for related cam sync buffer
777 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
778 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
779 m_pRelCamSyncHeap->getFd(0),
780 sizeof(cam_sync_related_sensors_event_info_t),
781 m_pRelCamSyncHeap->getPtr(0));
782 if(rc < 0) {
783 LOGE("Dualcam: failed to map Related cam sync buffer");
784 rc = FAILED_TRANSACTION;
785 return NO_MEMORY;
786 }
787 m_pRelCamSyncBuf =
788 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
789 }
790
791 LOGH("mCameraId=%d",mCameraId);
792
793 return NO_ERROR;
794}
795
796/*===========================================================================
797 * FUNCTION : closeCamera
798 *
799 * DESCRIPTION: close camera
800 *
801 * PARAMETERS : none
802 *
803 * RETURN : int32_t type of status
804 * NO_ERROR -- success
805 * none-zero failure code
806 *==========================================================================*/
807int QCamera3HardwareInterface::closeCamera()
808{
809 KPI_ATRACE_CALL();
810 int rc = NO_ERROR;
811 char value[PROPERTY_VALUE_MAX];
812
813 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
814 mCameraId);
815 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
816 mCameraHandle = NULL;
817
818 //reset session id to some invalid id
819 pthread_mutex_lock(&gCamLock);
820 sessionId[mCameraId] = 0xDEADBEEF;
821 pthread_mutex_unlock(&gCamLock);
822
823 //Notify display HAL that there is no active camera session
824 //but avoid calling the same during bootup. Refer to openCamera
825 //for more details.
826 property_get("service.bootanim.exit", value, "0");
827 if (atoi(value) == 1) {
828 pthread_mutex_lock(&gCamLock);
829 if (--gNumCameraSessions == 0) {
830 setCameraLaunchStatus(false);
831 }
832 pthread_mutex_unlock(&gCamLock);
833 }
834
835 if (NULL != m_pRelCamSyncHeap) {
836 m_pRelCamSyncHeap->deallocate();
837 delete m_pRelCamSyncHeap;
838 m_pRelCamSyncHeap = NULL;
839 m_pRelCamSyncBuf = NULL;
840 }
841
842 if (mExifParams.debug_params) {
843 free(mExifParams.debug_params);
844 mExifParams.debug_params = NULL;
845 }
846 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
847 LOGW("Failed to release flash for camera id: %d",
848 mCameraId);
849 }
850 mState = CLOSED;
851 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
852 mCameraId, rc);
853 return rc;
854}
855
856/*===========================================================================
857 * FUNCTION : initialize
858 *
859 * DESCRIPTION: Initialize frameworks callback functions
860 *
861 * PARAMETERS :
862 * @callback_ops : callback function to frameworks
863 *
864 * RETURN :
865 *
866 *==========================================================================*/
867int QCamera3HardwareInterface::initialize(
868 const struct camera3_callback_ops *callback_ops)
869{
870 ATRACE_CALL();
871 int rc;
872
873 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
874 pthread_mutex_lock(&mMutex);
875
876 // Validate current state
877 switch (mState) {
878 case OPENED:
879 /* valid state */
880 break;
881 default:
882 LOGE("Invalid state %d", mState);
883 rc = -ENODEV;
884 goto err1;
885 }
886
887 rc = initParameters();
888 if (rc < 0) {
889 LOGE("initParamters failed %d", rc);
890 goto err1;
891 }
892 mCallbackOps = callback_ops;
893
894 mChannelHandle = mCameraHandle->ops->add_channel(
895 mCameraHandle->camera_handle, NULL, NULL, this);
896 if (mChannelHandle == 0) {
897 LOGE("add_channel failed");
898 rc = -ENOMEM;
899 pthread_mutex_unlock(&mMutex);
900 return rc;
901 }
902
903 pthread_mutex_unlock(&mMutex);
904 mCameraInitialized = true;
905 mState = INITIALIZED;
906 LOGI("X");
907 return 0;
908
909err1:
910 pthread_mutex_unlock(&mMutex);
911 return rc;
912}
913
914/*===========================================================================
915 * FUNCTION : validateStreamDimensions
916 *
917 * DESCRIPTION: Check if the configuration requested are those advertised
918 *
919 * PARAMETERS :
920 * @stream_list : streams to be configured
921 *
922 * RETURN :
923 *
924 *==========================================================================*/
925int QCamera3HardwareInterface::validateStreamDimensions(
926 camera3_stream_configuration_t *streamList)
927{
928 int rc = NO_ERROR;
929 size_t count = 0;
930
931 camera3_stream_t *inputStream = NULL;
932 /*
933 * Loop through all streams to find input stream if it exists*
934 */
935 for (size_t i = 0; i< streamList->num_streams; i++) {
936 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
937 if (inputStream != NULL) {
938 LOGE("Error, Multiple input streams requested");
939 return -EINVAL;
940 }
941 inputStream = streamList->streams[i];
942 }
943 }
944 /*
945 * Loop through all streams requested in configuration
946 * Check if unsupported sizes have been requested on any of them
947 */
948 for (size_t j = 0; j < streamList->num_streams; j++) {
949 bool sizeFound = false;
950 camera3_stream_t *newStream = streamList->streams[j];
951
952 uint32_t rotatedHeight = newStream->height;
953 uint32_t rotatedWidth = newStream->width;
954 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
955 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
956 rotatedHeight = newStream->width;
957 rotatedWidth = newStream->height;
958 }
959
960 /*
961 * Sizes are different for each type of stream format check against
962 * appropriate table.
963 */
964 switch (newStream->format) {
965 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
966 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
967 case HAL_PIXEL_FORMAT_RAW10:
968 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
969 for (size_t i = 0; i < count; i++) {
970 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
971 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
972 sizeFound = true;
973 break;
974 }
975 }
976 break;
977 case HAL_PIXEL_FORMAT_BLOB:
978 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
979 /* Verify set size against generated sizes table */
980 for (size_t i = 0; i < count; i++) {
981 if (((int32_t)rotatedWidth ==
982 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
983 ((int32_t)rotatedHeight ==
984 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
985 sizeFound = true;
986 break;
987 }
988 }
989 break;
990 case HAL_PIXEL_FORMAT_YCbCr_420_888:
991 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
992 default:
993 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
994 || newStream->stream_type == CAMERA3_STREAM_INPUT
995 || IS_USAGE_ZSL(newStream->usage)) {
996 if (((int32_t)rotatedWidth ==
997 gCamCapability[mCameraId]->active_array_size.width) &&
998 ((int32_t)rotatedHeight ==
999 gCamCapability[mCameraId]->active_array_size.height)) {
1000 sizeFound = true;
1001 break;
1002 }
1003 /* We could potentially break here to enforce ZSL stream
1004 * set from frameworks always is full active array size
1005 * but it is not clear from the spc if framework will always
1006 * follow that, also we have logic to override to full array
1007 * size, so keeping the logic lenient at the moment
1008 */
1009 }
1010 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1011 MAX_SIZES_CNT);
1012 for (size_t i = 0; i < count; i++) {
1013 if (((int32_t)rotatedWidth ==
1014 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1015 ((int32_t)rotatedHeight ==
1016 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1017 sizeFound = true;
1018 break;
1019 }
1020 }
1021 break;
1022 } /* End of switch(newStream->format) */
1023
1024 /* We error out even if a single stream has unsupported size set */
1025 if (!sizeFound) {
1026 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1027 rotatedWidth, rotatedHeight, newStream->format,
1028 gCamCapability[mCameraId]->active_array_size.width,
1029 gCamCapability[mCameraId]->active_array_size.height);
1030 rc = -EINVAL;
1031 break;
1032 }
1033 } /* End of for each stream */
1034 return rc;
1035}
1036
1037/*==============================================================================
1038 * FUNCTION : isSupportChannelNeeded
1039 *
1040 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1041 *
1042 * PARAMETERS :
1043 * @stream_list : streams to be configured
1044 * @stream_config_info : the config info for streams to be configured
1045 *
1046 * RETURN : Boolen true/false decision
1047 *
1048 *==========================================================================*/
1049bool QCamera3HardwareInterface::isSupportChannelNeeded(
1050 camera3_stream_configuration_t *streamList,
1051 cam_stream_size_info_t stream_config_info)
1052{
1053 uint32_t i;
1054 bool pprocRequested = false;
1055 /* Check for conditions where PProc pipeline does not have any streams*/
1056 for (i = 0; i < stream_config_info.num_streams; i++) {
1057 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1058 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1059 pprocRequested = true;
1060 break;
1061 }
1062 }
1063
1064 if (pprocRequested == false )
1065 return true;
1066
1067 /* Dummy stream needed if only raw or jpeg streams present */
1068 for (i = 0; i < streamList->num_streams; i++) {
1069 switch(streamList->streams[i]->format) {
1070 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1071 case HAL_PIXEL_FORMAT_RAW10:
1072 case HAL_PIXEL_FORMAT_RAW16:
1073 case HAL_PIXEL_FORMAT_BLOB:
1074 break;
1075 default:
1076 return false;
1077 }
1078 }
1079 return true;
1080}
1081
1082/*==============================================================================
1083 * FUNCTION : getSensorOutputSize
1084 *
1085 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1086 *
1087 * PARAMETERS :
1088 * @sensor_dim : sensor output dimension (output)
1089 *
1090 * RETURN : int32_t type of status
1091 * NO_ERROR -- success
1092 * none-zero failure code
1093 *
1094 *==========================================================================*/
1095int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1096{
1097 int32_t rc = NO_ERROR;
1098
1099 cam_dimension_t max_dim = {0, 0};
1100 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1101 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1102 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1103 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1104 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1105 }
1106
1107 clear_metadata_buffer(mParameters);
1108
1109 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1110 max_dim);
1111 if (rc != NO_ERROR) {
1112 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1113 return rc;
1114 }
1115
1116 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1117 if (rc != NO_ERROR) {
1118 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1119 return rc;
1120 }
1121
1122 clear_metadata_buffer(mParameters);
1123 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1124
1125 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1126 mParameters);
1127 if (rc != NO_ERROR) {
1128 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1129 return rc;
1130 }
1131
1132 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1133 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1134
1135 return rc;
1136}
1137
1138/*==============================================================================
1139 * FUNCTION : enablePowerHint
1140 *
1141 * DESCRIPTION: enable single powerhint for preview and different video modes.
1142 *
1143 * PARAMETERS :
1144 *
1145 * RETURN : NULL
1146 *
1147 *==========================================================================*/
1148void QCamera3HardwareInterface::enablePowerHint()
1149{
1150 if (!mPowerHintEnabled) {
1151 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1152 mPowerHintEnabled = true;
1153 }
1154}
1155
1156/*==============================================================================
1157 * FUNCTION : disablePowerHint
1158 *
1159 * DESCRIPTION: disable current powerhint.
1160 *
1161 * PARAMETERS :
1162 *
1163 * RETURN : NULL
1164 *
1165 *==========================================================================*/
1166void QCamera3HardwareInterface::disablePowerHint()
1167{
1168 if (mPowerHintEnabled) {
1169 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1170 mPowerHintEnabled = false;
1171 }
1172}
1173
1174/*==============================================================================
1175 * FUNCTION : addToPPFeatureMask
1176 *
1177 * DESCRIPTION: add additional features to pp feature mask based on
1178 * stream type and usecase
1179 *
1180 * PARAMETERS :
1181 * @stream_format : stream type for feature mask
1182 * @stream_idx : stream idx within postprocess_mask list to change
1183 *
1184 * RETURN : NULL
1185 *
1186 *==========================================================================*/
1187void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1188 uint32_t stream_idx)
1189{
1190 char feature_mask_value[PROPERTY_VALUE_MAX];
1191 cam_feature_mask_t feature_mask;
1192 int args_converted;
1193 int property_len;
1194
1195 /* Get feature mask from property */
1196 property_len = property_get("persist.camera.hal3.feature",
1197 feature_mask_value, "0");
1198 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1199 (feature_mask_value[1] == 'x')) {
1200 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1201 } else {
1202 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1203 }
1204 if (1 != args_converted) {
1205 feature_mask = 0;
1206 LOGE("Wrong feature mask %s", feature_mask_value);
1207 return;
1208 }
1209
1210 switch (stream_format) {
1211 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1212 /* Add LLVD to pp feature mask only if video hint is enabled */
1213 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1214 mStreamConfigInfo.postprocess_mask[stream_idx]
1215 |= CAM_QTI_FEATURE_SW_TNR;
1216 LOGH("Added SW TNR to pp feature mask");
1217 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1218 mStreamConfigInfo.postprocess_mask[stream_idx]
1219 |= CAM_QCOM_FEATURE_LLVD;
1220 LOGH("Added LLVD SeeMore to pp feature mask");
1221 }
1222 break;
1223 }
1224 default:
1225 break;
1226 }
1227 LOGD("PP feature mask %llx",
1228 mStreamConfigInfo.postprocess_mask[stream_idx]);
1229}
1230
1231/*==============================================================================
1232 * FUNCTION : updateFpsInPreviewBuffer
1233 *
1234 * DESCRIPTION: update FPS information in preview buffer.
1235 *
1236 * PARAMETERS :
1237 * @metadata : pointer to metadata buffer
1238 * @frame_number: frame_number to look for in pending buffer list
1239 *
1240 * RETURN : None
1241 *
1242 *==========================================================================*/
1243void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1244 uint32_t frame_number)
1245{
1246 // Mark all pending buffers for this particular request
1247 // with corresponding framerate information
1248 for (List<PendingBuffersInRequest>::iterator req =
1249 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1250 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1251 for(List<PendingBufferInfo>::iterator j =
1252 req->mPendingBufferList.begin();
1253 j != req->mPendingBufferList.end(); j++) {
1254 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1255 if ((req->frame_number == frame_number) &&
1256 (channel->getStreamTypeMask() &
1257 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1258 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1259 CAM_INTF_PARM_FPS_RANGE, metadata) {
1260 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1261 struct private_handle_t *priv_handle =
1262 (struct private_handle_t *)(*(j->buffer));
1263 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1264 }
1265 }
1266 }
1267 }
1268}
1269
1270/*===========================================================================
1271 * FUNCTION : configureStreams
1272 *
1273 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1274 * and output streams.
1275 *
1276 * PARAMETERS :
1277 * @stream_list : streams to be configured
1278 *
1279 * RETURN :
1280 *
1281 *==========================================================================*/
1282int QCamera3HardwareInterface::configureStreams(
1283 camera3_stream_configuration_t *streamList)
1284{
1285 ATRACE_CALL();
1286 int rc = 0;
1287
1288 // Acquire perfLock before configure streams
1289 m_perfLock.lock_acq();
1290 rc = configureStreamsPerfLocked(streamList);
1291 m_perfLock.lock_rel();
1292
1293 return rc;
1294}
1295
1296/*===========================================================================
1297 * FUNCTION : configureStreamsPerfLocked
1298 *
1299 * DESCRIPTION: configureStreams while perfLock is held.
1300 *
1301 * PARAMETERS :
1302 * @stream_list : streams to be configured
1303 *
1304 * RETURN : int32_t type of status
1305 * NO_ERROR -- success
1306 * none-zero failure code
1307 *==========================================================================*/
1308int QCamera3HardwareInterface::configureStreamsPerfLocked(
1309 camera3_stream_configuration_t *streamList)
1310{
1311 ATRACE_CALL();
1312 int rc = 0;
1313
1314 // Sanity check stream_list
1315 if (streamList == NULL) {
1316 LOGE("NULL stream configuration");
1317 return BAD_VALUE;
1318 }
1319 if (streamList->streams == NULL) {
1320 LOGE("NULL stream list");
1321 return BAD_VALUE;
1322 }
1323
1324 if (streamList->num_streams < 1) {
1325 LOGE("Bad number of streams requested: %d",
1326 streamList->num_streams);
1327 return BAD_VALUE;
1328 }
1329
1330 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1331 LOGE("Maximum number of streams %d exceeded: %d",
1332 MAX_NUM_STREAMS, streamList->num_streams);
1333 return BAD_VALUE;
1334 }
1335
1336 mOpMode = streamList->operation_mode;
1337 LOGD("mOpMode: %d", mOpMode);
1338
1339 /* first invalidate all the steams in the mStreamList
1340 * if they appear again, they will be validated */
1341 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1342 it != mStreamInfo.end(); it++) {
1343 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1344 if (channel) {
1345 channel->stop();
1346 }
1347 (*it)->status = INVALID;
1348 }
1349
1350 if (mRawDumpChannel) {
1351 mRawDumpChannel->stop();
1352 delete mRawDumpChannel;
1353 mRawDumpChannel = NULL;
1354 }
1355
1356 if (mSupportChannel)
1357 mSupportChannel->stop();
1358
1359 if (mAnalysisChannel) {
1360 mAnalysisChannel->stop();
1361 }
1362 if (mMetadataChannel) {
1363 /* If content of mStreamInfo is not 0, there is metadata stream */
1364 mMetadataChannel->stop();
1365 }
1366 if (mChannelHandle) {
1367 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1368 mChannelHandle);
1369 LOGD("stopping channel %d", mChannelHandle);
1370 }
1371
1372 pthread_mutex_lock(&mMutex);
1373
1374 // Check state
1375 switch (mState) {
1376 case INITIALIZED:
1377 case CONFIGURED:
1378 case STARTED:
1379 /* valid state */
1380 break;
1381 default:
1382 LOGE("Invalid state %d", mState);
1383 pthread_mutex_unlock(&mMutex);
1384 return -ENODEV;
1385 }
1386
1387 /* Check whether we have video stream */
1388 m_bIs4KVideo = false;
1389 m_bIsVideo = false;
1390 m_bEisSupportedSize = false;
1391 m_bTnrEnabled = false;
1392 bool isZsl = false;
1393 uint32_t videoWidth = 0U;
1394 uint32_t videoHeight = 0U;
1395 size_t rawStreamCnt = 0;
1396 size_t stallStreamCnt = 0;
1397 size_t processedStreamCnt = 0;
1398 // Number of streams on ISP encoder path
1399 size_t numStreamsOnEncoder = 0;
1400 size_t numYuv888OnEncoder = 0;
1401 bool bYuv888OverrideJpeg = false;
1402 cam_dimension_t largeYuv888Size = {0, 0};
1403 cam_dimension_t maxViewfinderSize = {0, 0};
1404 bool bJpegExceeds4K = false;
1405 bool bJpegOnEncoder = false;
1406 bool bUseCommonFeatureMask = false;
1407 cam_feature_mask_t commonFeatureMask = 0;
1408 bool bSmallJpegSize = false;
1409 uint32_t width_ratio;
1410 uint32_t height_ratio;
1411 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1412 camera3_stream_t *inputStream = NULL;
1413 bool isJpeg = false;
1414 cam_dimension_t jpegSize = {0, 0};
1415
1416 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1417
1418 /*EIS configuration*/
1419 bool eisSupported = false;
1420 bool oisSupported = false;
1421 int32_t margin_index = -1;
1422 uint8_t eis_prop_set;
1423 uint32_t maxEisWidth = 0;
1424 uint32_t maxEisHeight = 0;
1425
1426 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1427
1428 size_t count = IS_TYPE_MAX;
1429 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1430 for (size_t i = 0; i < count; i++) {
1431 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
1432 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0))
1433 {
1434 eisSupported = true;
1435 margin_index = (int32_t)i;
1436 break;
1437 }
1438 }
1439
1440 count = CAM_OPT_STAB_MAX;
1441 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1442 for (size_t i = 0; i < count; i++) {
1443 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1444 oisSupported = true;
1445 break;
1446 }
1447 }
1448
1449 if (eisSupported) {
1450 maxEisWidth = MAX_EIS_WIDTH;
1451 maxEisHeight = MAX_EIS_HEIGHT;
1452 }
1453
1454 /* EIS setprop control */
1455 char eis_prop[PROPERTY_VALUE_MAX];
1456 memset(eis_prop, 0, sizeof(eis_prop));
1457 property_get("persist.camera.eis.enable", eis_prop, "0");
1458 eis_prop_set = (uint8_t)atoi(eis_prop);
1459
1460 m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1461 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1462
1463 /* stream configurations */
1464 for (size_t i = 0; i < streamList->num_streams; i++) {
1465 camera3_stream_t *newStream = streamList->streams[i];
1466 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1467 "height = %d, rotation = %d, usage = 0x%x",
1468 i, newStream->stream_type, newStream->format,
1469 newStream->width, newStream->height, newStream->rotation,
1470 newStream->usage);
1471 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1472 newStream->stream_type == CAMERA3_STREAM_INPUT){
1473 isZsl = true;
1474 }
1475 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1476 inputStream = newStream;
1477 }
1478
1479 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1480 isJpeg = true;
1481 jpegSize.width = newStream->width;
1482 jpegSize.height = newStream->height;
1483 if (newStream->width > VIDEO_4K_WIDTH ||
1484 newStream->height > VIDEO_4K_HEIGHT)
1485 bJpegExceeds4K = true;
1486 }
1487
1488 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1489 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1490 m_bIsVideo = true;
1491 videoWidth = newStream->width;
1492 videoHeight = newStream->height;
1493 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1494 (VIDEO_4K_HEIGHT <= newStream->height)) {
1495 m_bIs4KVideo = true;
1496 }
1497 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1498 (newStream->height <= maxEisHeight);
1499 }
1500 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1501 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1502 switch (newStream->format) {
1503 case HAL_PIXEL_FORMAT_BLOB:
1504 stallStreamCnt++;
1505 if (isOnEncoder(maxViewfinderSize, newStream->width,
1506 newStream->height)) {
1507 numStreamsOnEncoder++;
1508 bJpegOnEncoder = true;
1509 }
1510 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1511 newStream->width);
1512 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1513 newStream->height);;
1514 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1515 "FATAL: max_downscale_factor cannot be zero and so assert");
1516 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1517 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1518 LOGH("Setting small jpeg size flag to true");
1519 bSmallJpegSize = true;
1520 }
1521 break;
1522 case HAL_PIXEL_FORMAT_RAW10:
1523 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1524 case HAL_PIXEL_FORMAT_RAW16:
1525 rawStreamCnt++;
1526 break;
1527 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1528 processedStreamCnt++;
1529 if (isOnEncoder(maxViewfinderSize, newStream->width,
1530 newStream->height)) {
1531 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1532 !IS_USAGE_ZSL(newStream->usage)) {
1533 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1534 }
1535 numStreamsOnEncoder++;
1536 }
1537 break;
1538 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1539 processedStreamCnt++;
1540 if (isOnEncoder(maxViewfinderSize, newStream->width,
1541 newStream->height)) {
1542 // If Yuv888 size is not greater than 4K, set feature mask
1543 // to SUPERSET so that it support concurrent request on
1544 // YUV and JPEG.
1545 if (newStream->width <= VIDEO_4K_WIDTH &&
1546 newStream->height <= VIDEO_4K_HEIGHT) {
1547 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1548 }
1549 numStreamsOnEncoder++;
1550 numYuv888OnEncoder++;
1551 largeYuv888Size.width = newStream->width;
1552 largeYuv888Size.height = newStream->height;
1553 }
1554 break;
1555 default:
1556 processedStreamCnt++;
1557 if (isOnEncoder(maxViewfinderSize, newStream->width,
1558 newStream->height)) {
1559 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1560 numStreamsOnEncoder++;
1561 }
1562 break;
1563 }
1564
1565 }
1566 }
1567
1568 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1569 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1570 !m_bIsVideo) {
1571 m_bEisEnable = false;
1572 }
1573
1574 /* Logic to enable/disable TNR based on specific config size/etc.*/
1575 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1576 ((videoWidth == 1920 && videoHeight == 1080) ||
1577 (videoWidth == 1280 && videoHeight == 720)) &&
1578 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1579 m_bTnrEnabled = true;
1580
1581 /* Check if num_streams is sane */
1582 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1583 rawStreamCnt > MAX_RAW_STREAMS ||
1584 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1585 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1586 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1587 pthread_mutex_unlock(&mMutex);
1588 return -EINVAL;
1589 }
1590 /* Check whether we have zsl stream or 4k video case */
1591 if (isZsl && m_bIsVideo) {
1592 LOGE("Currently invalid configuration ZSL&Video!");
1593 pthread_mutex_unlock(&mMutex);
1594 return -EINVAL;
1595 }
1596 /* Check if stream sizes are sane */
1597 if (numStreamsOnEncoder > 2) {
1598 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1599 pthread_mutex_unlock(&mMutex);
1600 return -EINVAL;
1601 } else if (1 < numStreamsOnEncoder){
1602 bUseCommonFeatureMask = true;
1603 LOGH("Multiple streams above max viewfinder size, common mask needed");
1604 }
1605
1606 /* Check if BLOB size is greater than 4k in 4k recording case */
1607 if (m_bIs4KVideo && bJpegExceeds4K) {
1608 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1609 pthread_mutex_unlock(&mMutex);
1610 return -EINVAL;
1611 }
1612
1613 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1614 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1615 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1616 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1617 // configurations:
1618 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1619 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1620 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1621 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1622 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1623 __func__);
1624 pthread_mutex_unlock(&mMutex);
1625 return -EINVAL;
1626 }
1627
1628 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1629 // the YUV stream's size is greater or equal to the JPEG size, set common
1630 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1631 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1632 jpegSize.width, jpegSize.height) &&
1633 largeYuv888Size.width > jpegSize.width &&
1634 largeYuv888Size.height > jpegSize.height) {
1635 bYuv888OverrideJpeg = true;
1636 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1637 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1638 }
1639
1640 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1641 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1642 commonFeatureMask);
1643 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1644 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1645
1646 rc = validateStreamDimensions(streamList);
1647 if (rc == NO_ERROR) {
1648 rc = validateStreamRotations(streamList);
1649 }
1650 if (rc != NO_ERROR) {
1651 LOGE("Invalid stream configuration requested!");
1652 pthread_mutex_unlock(&mMutex);
1653 return rc;
1654 }
1655
1656 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1657 for (size_t i = 0; i < streamList->num_streams; i++) {
1658 camera3_stream_t *newStream = streamList->streams[i];
1659 LOGH("newStream type = %d, stream format = %d "
1660 "stream size : %d x %d, stream rotation = %d",
1661 newStream->stream_type, newStream->format,
1662 newStream->width, newStream->height, newStream->rotation);
1663 //if the stream is in the mStreamList validate it
1664 bool stream_exists = false;
1665 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1666 it != mStreamInfo.end(); it++) {
1667 if ((*it)->stream == newStream) {
1668 QCamera3ProcessingChannel *channel =
1669 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1670 stream_exists = true;
1671 if (channel)
1672 delete channel;
1673 (*it)->status = VALID;
1674 (*it)->stream->priv = NULL;
1675 (*it)->channel = NULL;
1676 }
1677 }
1678 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1679 //new stream
1680 stream_info_t* stream_info;
1681 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1682 if (!stream_info) {
1683 LOGE("Could not allocate stream info");
1684 rc = -ENOMEM;
1685 pthread_mutex_unlock(&mMutex);
1686 return rc;
1687 }
1688 stream_info->stream = newStream;
1689 stream_info->status = VALID;
1690 stream_info->channel = NULL;
1691 mStreamInfo.push_back(stream_info);
1692 }
1693 /* Covers Opaque ZSL and API1 F/W ZSL */
1694 if (IS_USAGE_ZSL(newStream->usage)
1695 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1696 if (zslStream != NULL) {
1697 LOGE("Multiple input/reprocess streams requested!");
1698 pthread_mutex_unlock(&mMutex);
1699 return BAD_VALUE;
1700 }
1701 zslStream = newStream;
1702 }
1703 /* Covers YUV reprocess */
1704 if (inputStream != NULL) {
1705 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1706 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1707 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1708 && inputStream->width == newStream->width
1709 && inputStream->height == newStream->height) {
1710 if (zslStream != NULL) {
1711 /* This scenario indicates multiple YUV streams with same size
1712 * as input stream have been requested, since zsl stream handle
1713 * is solely use for the purpose of overriding the size of streams
1714 * which share h/w streams we will just make a guess here as to
1715 * which of the stream is a ZSL stream, this will be refactored
1716 * once we make generic logic for streams sharing encoder output
1717 */
1718 LOGH("Warning, Multiple ip/reprocess streams requested!");
1719 }
1720 zslStream = newStream;
1721 }
1722 }
1723 }
1724
1725 /* If a zsl stream is set, we know that we have configured at least one input or
1726 bidirectional stream */
1727 if (NULL != zslStream) {
1728 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1729 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1730 mInputStreamInfo.format = zslStream->format;
1731 mInputStreamInfo.usage = zslStream->usage;
1732 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1733 mInputStreamInfo.dim.width,
1734 mInputStreamInfo.dim.height,
1735 mInputStreamInfo.format, mInputStreamInfo.usage);
1736 }
1737
1738 cleanAndSortStreamInfo();
1739 if (mMetadataChannel) {
1740 delete mMetadataChannel;
1741 mMetadataChannel = NULL;
1742 }
1743 if (mSupportChannel) {
1744 delete mSupportChannel;
1745 mSupportChannel = NULL;
1746 }
1747
1748 if (mAnalysisChannel) {
1749 delete mAnalysisChannel;
1750 mAnalysisChannel = NULL;
1751 }
1752
1753 if (mDummyBatchChannel) {
1754 delete mDummyBatchChannel;
1755 mDummyBatchChannel = NULL;
1756 }
1757
1758 //Create metadata channel and initialize it
1759 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1760 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1761 gCamCapability[mCameraId]->color_arrangement);
1762 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1763 mChannelHandle, mCameraHandle->ops, captureResultCb,
1764 &padding_info, metadataFeatureMask, this);
1765 if (mMetadataChannel == NULL) {
1766 LOGE("failed to allocate metadata channel");
1767 rc = -ENOMEM;
1768 pthread_mutex_unlock(&mMutex);
1769 return rc;
1770 }
1771 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1772 if (rc < 0) {
1773 LOGE("metadata channel initialization failed");
1774 delete mMetadataChannel;
1775 mMetadataChannel = NULL;
1776 pthread_mutex_unlock(&mMutex);
1777 return rc;
1778 }
1779
1780 // Create analysis stream all the time, even when h/w support is not available
1781 {
1782 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1783 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1784 gCamCapability[mCameraId]->color_arrangement);
1785 cam_analysis_info_t analysisInfo;
1786 rc = mCommon.getAnalysisInfo(
1787 FALSE,
1788 TRUE,
1789 analysisFeatureMask,
1790 &analysisInfo);
1791 if (rc != NO_ERROR) {
1792 LOGE("getAnalysisInfo failed, ret = %d", rc);
1793 }
1794 if (rc == NO_ERROR) {
1795 mAnalysisChannel = new QCamera3SupportChannel(
1796 mCameraHandle->camera_handle,
1797 mChannelHandle,
1798 mCameraHandle->ops,
1799 &analysisInfo.analysis_padding_info,
1800 analysisFeatureMask,
1801 CAM_STREAM_TYPE_ANALYSIS,
1802 &analysisInfo.analysis_max_res,
1803 (analysisInfo.analysis_format
1804 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1805 : CAM_FORMAT_YUV_420_NV21),
1806 analysisInfo.hw_analysis_supported,
1807 gCamCapability[mCameraId]->color_arrangement,
1808 this,
1809 0); // force buffer count to 0
1810 if (!mAnalysisChannel) {
1811 LOGE("H/W Analysis channel cannot be created");
1812 pthread_mutex_unlock(&mMutex);
1813 return -ENOMEM;
1814 }
1815 }
1816 }
1817
1818 bool isRawStreamRequested = false;
1819 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1820 /* Allocate channel objects for the requested streams */
1821 for (size_t i = 0; i < streamList->num_streams; i++) {
1822 camera3_stream_t *newStream = streamList->streams[i];
1823 uint32_t stream_usage = newStream->usage;
1824 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1825 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1826 struct camera_info *p_info = NULL;
1827 pthread_mutex_lock(&gCamLock);
1828 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1829 pthread_mutex_unlock(&gCamLock);
1830 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1831 || IS_USAGE_ZSL(newStream->usage)) &&
1832 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1833 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1834 if (bUseCommonFeatureMask) {
1835 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1836 commonFeatureMask;
1837 } else {
1838 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1839 CAM_QCOM_FEATURE_NONE;
1840 }
1841
1842 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1843 LOGH("Input stream configured, reprocess config");
1844 } else {
1845 //for non zsl streams find out the format
1846 switch (newStream->format) {
1847 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1848 {
1849 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1850 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1851 /* add additional features to pp feature mask */
1852 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1853 mStreamConfigInfo.num_streams);
1854
1855 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1856 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1857 CAM_STREAM_TYPE_VIDEO;
1858 if (m_bTnrEnabled && m_bTnrVideo) {
1859 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1860 CAM_QCOM_FEATURE_CPP_TNR;
1861 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1862 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1863 ~CAM_QCOM_FEATURE_CDS;
1864 }
1865 } else {
1866 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1867 CAM_STREAM_TYPE_PREVIEW;
1868 if (m_bTnrEnabled && m_bTnrPreview) {
1869 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1870 CAM_QCOM_FEATURE_CPP_TNR;
1871 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1872 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1873 ~CAM_QCOM_FEATURE_CDS;
1874 }
1875 padding_info.width_padding = mSurfaceStridePadding;
1876 padding_info.height_padding = CAM_PAD_TO_2;
1877 }
1878 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1879 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1880 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1881 newStream->height;
1882 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1883 newStream->width;
1884 }
1885 }
1886 break;
1887 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1888 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1889 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1890 if (bUseCommonFeatureMask)
1891 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1892 commonFeatureMask;
1893 else
1894 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1895 CAM_QCOM_FEATURE_NONE;
1896 } else {
1897 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1898 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1899 }
1900 break;
1901 case HAL_PIXEL_FORMAT_BLOB:
1902 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1903 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1904 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1905 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1906 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1907 } else {
1908 if (bUseCommonFeatureMask &&
1909 isOnEncoder(maxViewfinderSize, newStream->width,
1910 newStream->height)) {
1911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1912 } else {
1913 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1914 }
1915 }
1916 if (isZsl) {
1917 if (zslStream) {
1918 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1919 (int32_t)zslStream->width;
1920 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1921 (int32_t)zslStream->height;
1922 } else {
1923 LOGE("Error, No ZSL stream identified");
1924 pthread_mutex_unlock(&mMutex);
1925 return -EINVAL;
1926 }
1927 } else if (m_bIs4KVideo) {
1928 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1929 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1930 } else if (bYuv888OverrideJpeg) {
1931 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1932 (int32_t)largeYuv888Size.width;
1933 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1934 (int32_t)largeYuv888Size.height;
1935 }
1936 break;
1937 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1938 case HAL_PIXEL_FORMAT_RAW16:
1939 case HAL_PIXEL_FORMAT_RAW10:
1940 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1941 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1942 isRawStreamRequested = true;
1943 break;
1944 default:
1945 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1946 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1947 break;
1948 }
1949 }
1950
1951 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1952 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1953 gCamCapability[mCameraId]->color_arrangement);
1954
1955 if (newStream->priv == NULL) {
1956 //New stream, construct channel
1957 switch (newStream->stream_type) {
1958 case CAMERA3_STREAM_INPUT:
1959 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1960 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1961 break;
1962 case CAMERA3_STREAM_BIDIRECTIONAL:
1963 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1964 GRALLOC_USAGE_HW_CAMERA_WRITE;
1965 break;
1966 case CAMERA3_STREAM_OUTPUT:
1967 /* For video encoding stream, set read/write rarely
1968 * flag so that they may be set to un-cached */
1969 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1970 newStream->usage |=
1971 (GRALLOC_USAGE_SW_READ_RARELY |
1972 GRALLOC_USAGE_SW_WRITE_RARELY |
1973 GRALLOC_USAGE_HW_CAMERA_WRITE);
1974 else if (IS_USAGE_ZSL(newStream->usage))
1975 {
1976 LOGD("ZSL usage flag skipping");
1977 }
1978 else if (newStream == zslStream
1979 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1980 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1981 } else
1982 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1983 break;
1984 default:
1985 LOGE("Invalid stream_type %d", newStream->stream_type);
1986 break;
1987 }
1988
1989 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1990 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1991 QCamera3ProcessingChannel *channel = NULL;
1992 switch (newStream->format) {
1993 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1994 if ((newStream->usage &
1995 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1996 (streamList->operation_mode ==
1997 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1998 ) {
1999 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2000 mChannelHandle, mCameraHandle->ops, captureResultCb,
2001 &gCamCapability[mCameraId]->padding_info,
2002 this,
2003 newStream,
2004 (cam_stream_type_t)
2005 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2006 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2007 mMetadataChannel,
2008 0); //heap buffers are not required for HFR video channel
2009 if (channel == NULL) {
2010 LOGE("allocation of channel failed");
2011 pthread_mutex_unlock(&mMutex);
2012 return -ENOMEM;
2013 }
2014 //channel->getNumBuffers() will return 0 here so use
2015 //MAX_INFLIGH_HFR_REQUESTS
2016 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2017 newStream->priv = channel;
2018 LOGI("num video buffers in HFR mode: %d",
2019 MAX_INFLIGHT_HFR_REQUESTS);
2020 } else {
2021 /* Copy stream contents in HFR preview only case to create
2022 * dummy batch channel so that sensor streaming is in
2023 * HFR mode */
2024 if (!m_bIsVideo && (streamList->operation_mode ==
2025 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2026 mDummyBatchStream = *newStream;
2027 }
2028 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2029 mChannelHandle, mCameraHandle->ops, captureResultCb,
2030 &gCamCapability[mCameraId]->padding_info,
2031 this,
2032 newStream,
2033 (cam_stream_type_t)
2034 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2035 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2036 mMetadataChannel,
2037 MAX_INFLIGHT_REQUESTS);
2038 if (channel == NULL) {
2039 LOGE("allocation of channel failed");
2040 pthread_mutex_unlock(&mMutex);
2041 return -ENOMEM;
2042 }
2043 newStream->max_buffers = channel->getNumBuffers();
2044 newStream->priv = channel;
2045 }
2046 break;
2047 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2048 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2049 mChannelHandle,
2050 mCameraHandle->ops, captureResultCb,
2051 &padding_info,
2052 this,
2053 newStream,
2054 (cam_stream_type_t)
2055 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2056 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2057 mMetadataChannel);
2058 if (channel == NULL) {
2059 LOGE("allocation of YUV channel failed");
2060 pthread_mutex_unlock(&mMutex);
2061 return -ENOMEM;
2062 }
2063 newStream->max_buffers = channel->getNumBuffers();
2064 newStream->priv = channel;
2065 break;
2066 }
2067 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2068 case HAL_PIXEL_FORMAT_RAW16:
2069 case HAL_PIXEL_FORMAT_RAW10:
2070 mRawChannel = new QCamera3RawChannel(
2071 mCameraHandle->camera_handle, mChannelHandle,
2072 mCameraHandle->ops, captureResultCb,
2073 &padding_info,
2074 this, newStream,
2075 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2076 mMetadataChannel,
2077 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2078 if (mRawChannel == NULL) {
2079 LOGE("allocation of raw channel failed");
2080 pthread_mutex_unlock(&mMutex);
2081 return -ENOMEM;
2082 }
2083 newStream->max_buffers = mRawChannel->getNumBuffers();
2084 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2085 break;
2086 case HAL_PIXEL_FORMAT_BLOB:
2087 // Max live snapshot inflight buffer is 1. This is to mitigate
2088 // frame drop issues for video snapshot. The more buffers being
2089 // allocated, the more frame drops there are.
2090 mPictureChannel = new QCamera3PicChannel(
2091 mCameraHandle->camera_handle, mChannelHandle,
2092 mCameraHandle->ops, captureResultCb,
2093 &padding_info, this, newStream,
2094 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2095 m_bIs4KVideo, isZsl, mMetadataChannel,
2096 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2097 if (mPictureChannel == NULL) {
2098 LOGE("allocation of channel failed");
2099 pthread_mutex_unlock(&mMutex);
2100 return -ENOMEM;
2101 }
2102 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2103 newStream->max_buffers = mPictureChannel->getNumBuffers();
2104 mPictureChannel->overrideYuvSize(
2105 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2106 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2107 break;
2108
2109 default:
2110 LOGE("not a supported format 0x%x", newStream->format);
2111 break;
2112 }
2113 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2114 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2115 } else {
2116 LOGE("Error, Unknown stream type");
2117 pthread_mutex_unlock(&mMutex);
2118 return -EINVAL;
2119 }
2120
2121 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2122 if (channel != NULL && channel->isUBWCEnabled()) {
2123 cam_format_t fmt = channel->getStreamDefaultFormat(
2124 mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2125 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2126 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2127 }
2128 }
2129
2130 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2131 it != mStreamInfo.end(); it++) {
2132 if ((*it)->stream == newStream) {
2133 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2134 break;
2135 }
2136 }
2137 } else {
2138 // Channel already exists for this stream
2139 // Do nothing for now
2140 }
2141 padding_info = gCamCapability[mCameraId]->padding_info;
2142
2143 /* Do not add entries for input stream in metastream info
2144 * since there is no real stream associated with it
2145 */
2146 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2147 mStreamConfigInfo.num_streams++;
2148 }
2149
2150 //RAW DUMP channel
2151 if (mEnableRawDump && isRawStreamRequested == false){
2152 cam_dimension_t rawDumpSize;
2153 rawDumpSize = getMaxRawSize(mCameraId);
2154 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2155 setPAAFSupport(rawDumpFeatureMask,
2156 CAM_STREAM_TYPE_RAW,
2157 gCamCapability[mCameraId]->color_arrangement);
2158 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2159 mChannelHandle,
2160 mCameraHandle->ops,
2161 rawDumpSize,
2162 &padding_info,
2163 this, rawDumpFeatureMask);
2164 if (!mRawDumpChannel) {
2165 LOGE("Raw Dump channel cannot be created");
2166 pthread_mutex_unlock(&mMutex);
2167 return -ENOMEM;
2168 }
2169 }
2170
2171
2172 if (mAnalysisChannel) {
2173 cam_analysis_info_t analysisInfo;
2174 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2175 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2176 CAM_STREAM_TYPE_ANALYSIS;
2177 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2178 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2179 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2180 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2181 gCamCapability[mCameraId]->color_arrangement);
2182 rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2183 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2184 &analysisInfo);
2185 if (rc != NO_ERROR) {
2186 LOGE("getAnalysisInfo failed, ret = %d", rc);
2187 pthread_mutex_unlock(&mMutex);
2188 return rc;
2189 }
2190 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2191 analysisInfo.analysis_max_res;
2192 mStreamConfigInfo.num_streams++;
2193 }
2194
2195 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2196 cam_analysis_info_t supportInfo;
2197 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2198 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2199 setPAAFSupport(callbackFeatureMask,
2200 CAM_STREAM_TYPE_CALLBACK,
2201 gCamCapability[mCameraId]->color_arrangement);
2202 rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2203 if (rc != NO_ERROR) {
2204 LOGE("getAnalysisInfo failed, ret = %d", rc);
2205 pthread_mutex_unlock(&mMutex);
2206 return rc;
2207 }
2208 mSupportChannel = new QCamera3SupportChannel(
2209 mCameraHandle->camera_handle,
2210 mChannelHandle,
2211 mCameraHandle->ops,
2212 &gCamCapability[mCameraId]->padding_info,
2213 callbackFeatureMask,
2214 CAM_STREAM_TYPE_CALLBACK,
2215 &QCamera3SupportChannel::kDim,
2216 CAM_FORMAT_YUV_420_NV21,
2217 supportInfo.hw_analysis_supported,
2218 gCamCapability[mCameraId]->color_arrangement,
2219 this);
2220 if (!mSupportChannel) {
2221 LOGE("dummy channel cannot be created");
2222 pthread_mutex_unlock(&mMutex);
2223 return -ENOMEM;
2224 }
2225 }
2226
2227 if (mSupportChannel) {
2228 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2229 QCamera3SupportChannel::kDim;
2230 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2231 CAM_STREAM_TYPE_CALLBACK;
2232 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2233 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2234 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2235 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2236 gCamCapability[mCameraId]->color_arrangement);
2237 mStreamConfigInfo.num_streams++;
2238 }
2239
2240 if (mRawDumpChannel) {
2241 cam_dimension_t rawSize;
2242 rawSize = getMaxRawSize(mCameraId);
2243 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2244 rawSize;
2245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2246 CAM_STREAM_TYPE_RAW;
2247 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2248 CAM_QCOM_FEATURE_NONE;
2249 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2250 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2251 gCamCapability[mCameraId]->color_arrangement);
2252 mStreamConfigInfo.num_streams++;
2253 }
2254 /* In HFR mode, if video stream is not added, create a dummy channel so that
2255 * ISP can create a batch mode even for preview only case. This channel is
2256 * never 'start'ed (no stream-on), it is only 'initialized' */
2257 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2258 !m_bIsVideo) {
2259 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2260 setPAAFSupport(dummyFeatureMask,
2261 CAM_STREAM_TYPE_VIDEO,
2262 gCamCapability[mCameraId]->color_arrangement);
2263 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2264 mChannelHandle,
2265 mCameraHandle->ops, captureResultCb,
2266 &gCamCapability[mCameraId]->padding_info,
2267 this,
2268 &mDummyBatchStream,
2269 CAM_STREAM_TYPE_VIDEO,
2270 dummyFeatureMask,
2271 mMetadataChannel);
2272 if (NULL == mDummyBatchChannel) {
2273 LOGE("creation of mDummyBatchChannel failed."
2274 "Preview will use non-hfr sensor mode ");
2275 }
2276 }
2277 if (mDummyBatchChannel) {
2278 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2279 mDummyBatchStream.width;
2280 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2281 mDummyBatchStream.height;
2282 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2283 CAM_STREAM_TYPE_VIDEO;
2284 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2285 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2286 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2287 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2288 gCamCapability[mCameraId]->color_arrangement);
2289 mStreamConfigInfo.num_streams++;
2290 }
2291
2292 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2293 mStreamConfigInfo.buffer_info.max_buffers =
2294 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2295
2296 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2297 for (pendingRequestIterator i = mPendingRequestsList.begin();
2298 i != mPendingRequestsList.end();) {
2299 i = erasePendingRequest(i);
2300 }
2301 mPendingFrameDropList.clear();
2302 // Initialize/Reset the pending buffers list
2303 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2304 req.mPendingBufferList.clear();
2305 }
2306 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2307
2308 mPendingReprocessResultList.clear();
2309
2310 mCurJpegMeta.clear();
2311 //Get min frame duration for this streams configuration
2312 deriveMinFrameDuration();
2313
2314 // Update state
2315 mState = CONFIGURED;
2316
2317 pthread_mutex_unlock(&mMutex);
2318
2319 return rc;
2320}
2321
2322/*===========================================================================
2323 * FUNCTION : validateCaptureRequest
2324 *
2325 * DESCRIPTION: validate a capture request from camera service
2326 *
2327 * PARAMETERS :
2328 * @request : request from framework to process
2329 *
2330 * RETURN :
2331 *
2332 *==========================================================================*/
2333int QCamera3HardwareInterface::validateCaptureRequest(
2334 camera3_capture_request_t *request)
2335{
2336 ssize_t idx = 0;
2337 const camera3_stream_buffer_t *b;
2338 CameraMetadata meta;
2339
2340 /* Sanity check the request */
2341 if (request == NULL) {
2342 LOGE("NULL capture request");
2343 return BAD_VALUE;
2344 }
2345
2346 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2347 /*settings cannot be null for the first request*/
2348 return BAD_VALUE;
2349 }
2350
2351 uint32_t frameNumber = request->frame_number;
2352 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2353 LOGE("Request %d: No output buffers provided!",
2354 __FUNCTION__, frameNumber);
2355 return BAD_VALUE;
2356 }
2357 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2358 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2359 request->num_output_buffers, MAX_NUM_STREAMS);
2360 return BAD_VALUE;
2361 }
2362 if (request->input_buffer != NULL) {
2363 b = request->input_buffer;
2364 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2365 LOGE("Request %d: Buffer %ld: Status not OK!",
2366 frameNumber, (long)idx);
2367 return BAD_VALUE;
2368 }
2369 if (b->release_fence != -1) {
2370 LOGE("Request %d: Buffer %ld: Has a release fence!",
2371 frameNumber, (long)idx);
2372 return BAD_VALUE;
2373 }
2374 if (b->buffer == NULL) {
2375 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2376 frameNumber, (long)idx);
2377 return BAD_VALUE;
2378 }
2379 }
2380
2381 // Validate all buffers
2382 b = request->output_buffers;
2383 do {
2384 QCamera3ProcessingChannel *channel =
2385 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2386 if (channel == NULL) {
2387 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2388 frameNumber, (long)idx);
2389 return BAD_VALUE;
2390 }
2391 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2392 LOGE("Request %d: Buffer %ld: Status not OK!",
2393 frameNumber, (long)idx);
2394 return BAD_VALUE;
2395 }
2396 if (b->release_fence != -1) {
2397 LOGE("Request %d: Buffer %ld: Has a release fence!",
2398 frameNumber, (long)idx);
2399 return BAD_VALUE;
2400 }
2401 if (b->buffer == NULL) {
2402 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2403 frameNumber, (long)idx);
2404 return BAD_VALUE;
2405 }
2406 if (*(b->buffer) == NULL) {
2407 LOGE("Request %d: Buffer %ld: NULL private handle!",
2408 frameNumber, (long)idx);
2409 return BAD_VALUE;
2410 }
2411 idx++;
2412 b = request->output_buffers + idx;
2413 } while (idx < (ssize_t)request->num_output_buffers);
2414
2415 return NO_ERROR;
2416}
2417
2418/*===========================================================================
2419 * FUNCTION : deriveMinFrameDuration
2420 *
2421 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2422 * on currently configured streams.
2423 *
2424 * PARAMETERS : NONE
2425 *
2426 * RETURN : NONE
2427 *
2428 *==========================================================================*/
2429void QCamera3HardwareInterface::deriveMinFrameDuration()
2430{
2431 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2432
2433 maxJpegDim = 0;
2434 maxProcessedDim = 0;
2435 maxRawDim = 0;
2436
2437 // Figure out maximum jpeg, processed, and raw dimensions
2438 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2439 it != mStreamInfo.end(); it++) {
2440
2441 // Input stream doesn't have valid stream_type
2442 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2443 continue;
2444
2445 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2446 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2447 if (dimension > maxJpegDim)
2448 maxJpegDim = dimension;
2449 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2450 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2451 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2452 if (dimension > maxRawDim)
2453 maxRawDim = dimension;
2454 } else {
2455 if (dimension > maxProcessedDim)
2456 maxProcessedDim = dimension;
2457 }
2458 }
2459
2460 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2461 MAX_SIZES_CNT);
2462
2463 //Assume all jpeg dimensions are in processed dimensions.
2464 if (maxJpegDim > maxProcessedDim)
2465 maxProcessedDim = maxJpegDim;
2466 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2467 if (maxProcessedDim > maxRawDim) {
2468 maxRawDim = INT32_MAX;
2469
2470 for (size_t i = 0; i < count; i++) {
2471 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2472 gCamCapability[mCameraId]->raw_dim[i].height;
2473 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2474 maxRawDim = dimension;
2475 }
2476 }
2477
2478 //Find minimum durations for processed, jpeg, and raw
2479 for (size_t i = 0; i < count; i++) {
2480 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2481 gCamCapability[mCameraId]->raw_dim[i].height) {
2482 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2483 break;
2484 }
2485 }
2486 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2487 for (size_t i = 0; i < count; i++) {
2488 if (maxProcessedDim ==
2489 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2490 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2491 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2492 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2493 break;
2494 }
2495 }
2496}
2497
2498/*===========================================================================
2499 * FUNCTION : getMinFrameDuration
2500 *
2501 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2502 * and current request configuration.
2503 *
2504 * PARAMETERS : @request: requset sent by the frameworks
2505 *
2506 * RETURN : min farme duration for a particular request
2507 *
2508 *==========================================================================*/
2509int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2510{
2511 bool hasJpegStream = false;
2512 bool hasRawStream = false;
2513 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2514 const camera3_stream_t *stream = request->output_buffers[i].stream;
2515 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2516 hasJpegStream = true;
2517 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2518 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2519 stream->format == HAL_PIXEL_FORMAT_RAW16)
2520 hasRawStream = true;
2521 }
2522
2523 if (!hasJpegStream)
2524 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2525 else
2526 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2527}
2528
2529/*===========================================================================
2530 * FUNCTION : handleBuffersDuringFlushLock
2531 *
2532 * DESCRIPTION: Account for buffers returned from back-end during flush
2533 * This function is executed while mMutex is held by the caller.
2534 *
2535 * PARAMETERS :
2536 * @buffer: image buffer for the callback
2537 *
2538 * RETURN :
2539 *==========================================================================*/
2540void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2541{
2542 bool buffer_found = false;
2543 for (List<PendingBuffersInRequest>::iterator req =
2544 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2545 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2546 for (List<PendingBufferInfo>::iterator i =
2547 req->mPendingBufferList.begin();
2548 i != req->mPendingBufferList.end(); i++) {
2549 if (i->buffer == buffer->buffer) {
2550 mPendingBuffersMap.numPendingBufsAtFlush--;
2551 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2552 buffer->buffer, req->frame_number,
2553 mPendingBuffersMap.numPendingBufsAtFlush);
2554 buffer_found = true;
2555 break;
2556 }
2557 }
2558 if (buffer_found) {
2559 break;
2560 }
2561 }
2562 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2563 //signal the flush()
2564 LOGD("All buffers returned to HAL. Continue flush");
2565 pthread_cond_signal(&mBuffersCond);
2566 }
2567}
2568
2569
2570/*===========================================================================
2571 * FUNCTION : handlePendingReprocResults
2572 *
2573 * DESCRIPTION: check and notify on any pending reprocess results
2574 *
2575 * PARAMETERS :
2576 * @frame_number : Pending request frame number
2577 *
2578 * RETURN : int32_t type of status
2579 * NO_ERROR -- success
2580 * none-zero failure code
2581 *==========================================================================*/
2582int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2583{
2584 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2585 j != mPendingReprocessResultList.end(); j++) {
2586 if (j->frame_number == frame_number) {
2587 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2588
2589 LOGD("Delayed reprocess notify %d",
2590 frame_number);
2591
2592 for (pendingRequestIterator k = mPendingRequestsList.begin();
2593 k != mPendingRequestsList.end(); k++) {
2594
2595 if (k->frame_number == j->frame_number) {
2596 LOGD("Found reprocess frame number %d in pending reprocess List "
2597 "Take it out!!",
2598 k->frame_number);
2599
2600 camera3_capture_result result;
2601 memset(&result, 0, sizeof(camera3_capture_result));
2602 result.frame_number = frame_number;
2603 result.num_output_buffers = 1;
2604 result.output_buffers = &j->buffer;
2605 result.input_buffer = k->input_buffer;
2606 result.result = k->settings;
2607 result.partial_result = PARTIAL_RESULT_COUNT;
2608 mCallbackOps->process_capture_result(mCallbackOps, &result);
2609
2610 erasePendingRequest(k);
2611 break;
2612 }
2613 }
2614 mPendingReprocessResultList.erase(j);
2615 break;
2616 }
2617 }
2618 return NO_ERROR;
2619}
2620
2621/*===========================================================================
2622 * FUNCTION : handleBatchMetadata
2623 *
2624 * DESCRIPTION: Handles metadata buffer callback in batch mode
2625 *
2626 * PARAMETERS : @metadata_buf: metadata buffer
2627 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2628 * the meta buf in this method
2629 *
2630 * RETURN :
2631 *
2632 *==========================================================================*/
2633void QCamera3HardwareInterface::handleBatchMetadata(
2634 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2635{
2636 ATRACE_CALL();
2637
2638 if (NULL == metadata_buf) {
2639 LOGE("metadata_buf is NULL");
2640 return;
2641 }
2642 /* In batch mode, the metdata will contain the frame number and timestamp of
2643 * the last frame in the batch. Eg: a batch containing buffers from request
2644 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2645 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2646 * multiple process_capture_results */
2647 metadata_buffer_t *metadata =
2648 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2649 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2650 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2651 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2652 uint32_t frame_number = 0, urgent_frame_number = 0;
2653 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2654 bool invalid_metadata = false;
2655 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2656 size_t loopCount = 1;
2657
2658 int32_t *p_frame_number_valid =
2659 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2660 uint32_t *p_frame_number =
2661 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2662 int64_t *p_capture_time =
2663 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2664 int32_t *p_urgent_frame_number_valid =
2665 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2666 uint32_t *p_urgent_frame_number =
2667 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2668
2669 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2670 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2671 (NULL == p_urgent_frame_number)) {
2672 LOGE("Invalid metadata");
2673 invalid_metadata = true;
2674 } else {
2675 frame_number_valid = *p_frame_number_valid;
2676 last_frame_number = *p_frame_number;
2677 last_frame_capture_time = *p_capture_time;
2678 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2679 last_urgent_frame_number = *p_urgent_frame_number;
2680 }
2681
2682 /* In batchmode, when no video buffers are requested, set_parms are sent
2683 * for every capture_request. The difference between consecutive urgent
2684 * frame numbers and frame numbers should be used to interpolate the
2685 * corresponding frame numbers and time stamps */
2686 pthread_mutex_lock(&mMutex);
2687 if (urgent_frame_number_valid) {
2688 first_urgent_frame_number =
2689 mPendingBatchMap.valueFor(last_urgent_frame_number);
2690 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2691 first_urgent_frame_number;
2692
2693 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2694 urgent_frame_number_valid,
2695 first_urgent_frame_number, last_urgent_frame_number);
2696 }
2697
2698 if (frame_number_valid) {
2699 first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2700 frameNumDiff = last_frame_number + 1 -
2701 first_frame_number;
2702 mPendingBatchMap.removeItem(last_frame_number);
2703
2704 LOGD("frm: valid: %d frm_num: %d - %d",
2705 frame_number_valid,
2706 first_frame_number, last_frame_number);
2707
2708 }
2709 pthread_mutex_unlock(&mMutex);
2710
2711 if (urgent_frame_number_valid || frame_number_valid) {
2712 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2713 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2714 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2715 urgentFrameNumDiff, last_urgent_frame_number);
2716 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2717 LOGE("frameNumDiff: %d frameNum: %d",
2718 frameNumDiff, last_frame_number);
2719 }
2720
2721 for (size_t i = 0; i < loopCount; i++) {
2722 /* handleMetadataWithLock is called even for invalid_metadata for
2723 * pipeline depth calculation */
2724 if (!invalid_metadata) {
2725 /* Infer frame number. Batch metadata contains frame number of the
2726 * last frame */
2727 if (urgent_frame_number_valid) {
2728 if (i < urgentFrameNumDiff) {
2729 urgent_frame_number =
2730 first_urgent_frame_number + i;
2731 LOGD("inferred urgent frame_number: %d",
2732 urgent_frame_number);
2733 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2734 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2735 } else {
2736 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2737 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2738 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2739 }
2740 }
2741
2742 /* Infer frame number. Batch metadata contains frame number of the
2743 * last frame */
2744 if (frame_number_valid) {
2745 if (i < frameNumDiff) {
2746 frame_number = first_frame_number + i;
2747 LOGD("inferred frame_number: %d", frame_number);
2748 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2749 CAM_INTF_META_FRAME_NUMBER, frame_number);
2750 } else {
2751 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2752 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2753 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2754 }
2755 }
2756
2757 if (last_frame_capture_time) {
2758 //Infer timestamp
2759 first_frame_capture_time = last_frame_capture_time -
2760 (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2761 capture_time =
2762 first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2763 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2764 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2765 LOGD("batch capture_time: %lld, capture_time: %lld",
2766 last_frame_capture_time, capture_time);
2767 }
2768 }
2769 pthread_mutex_lock(&mMutex);
2770 handleMetadataWithLock(metadata_buf,
2771 false /* free_and_bufdone_meta_buf */);
2772 pthread_mutex_unlock(&mMutex);
2773 }
2774
2775 /* BufDone metadata buffer */
2776 if (free_and_bufdone_meta_buf) {
2777 mMetadataChannel->bufDone(metadata_buf);
2778 free(metadata_buf);
2779 }
2780}
2781
2782/*===========================================================================
2783 * FUNCTION : handleMetadataWithLock
2784 *
2785 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2786 *
2787 * PARAMETERS : @metadata_buf: metadata buffer
2788 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2789 * the meta buf in this method
2790 *
2791 * RETURN :
2792 *
2793 *==========================================================================*/
2794void QCamera3HardwareInterface::handleMetadataWithLock(
2795 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2796{
2797 ATRACE_CALL();
2798 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2799 //during flush do not send metadata from this thread
2800 LOGD("not sending metadata during flush or when mState is error");
2801 if (free_and_bufdone_meta_buf) {
2802 mMetadataChannel->bufDone(metadata_buf);
2803 free(metadata_buf);
2804 }
2805 return;
2806 }
2807
2808 //not in flush
2809 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2810 int32_t frame_number_valid, urgent_frame_number_valid;
2811 uint32_t frame_number, urgent_frame_number;
2812 int64_t capture_time;
2813 nsecs_t currentSysTime;
2814
2815 int32_t *p_frame_number_valid =
2816 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2817 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2818 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2819 int32_t *p_urgent_frame_number_valid =
2820 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2821 uint32_t *p_urgent_frame_number =
2822 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2823 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2824 metadata) {
2825 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2826 *p_frame_number_valid, *p_frame_number);
2827 }
2828
2829 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2830 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2831 LOGE("Invalid metadata");
2832 if (free_and_bufdone_meta_buf) {
2833 mMetadataChannel->bufDone(metadata_buf);
2834 free(metadata_buf);
2835 }
2836 goto done_metadata;
2837 }
2838 frame_number_valid = *p_frame_number_valid;
2839 frame_number = *p_frame_number;
2840 capture_time = *p_capture_time;
2841 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2842 urgent_frame_number = *p_urgent_frame_number;
2843 currentSysTime = systemTime(CLOCK_MONOTONIC);
2844
2845 // Detect if buffers from any requests are overdue
2846 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2847 if ( (currentSysTime - req.timestamp) >
2848 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2849 for (auto &missed : req.mPendingBufferList) {
2850 LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2851 "stream type = %d, stream format = %d",
2852 frame_number, req.frame_number, missed.buffer,
2853 missed.stream->stream_type, missed.stream->format);
2854 }
2855 }
2856 }
2857 //Partial result on process_capture_result for timestamp
2858 if (urgent_frame_number_valid) {
2859 LOGD("valid urgent frame_number = %u, capture_time = %lld",
2860 urgent_frame_number, capture_time);
2861
2862 //Recieved an urgent Frame Number, handle it
2863 //using partial results
2864 for (pendingRequestIterator i =
2865 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2866 LOGD("Iterator Frame = %d urgent frame = %d",
2867 i->frame_number, urgent_frame_number);
2868
2869 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2870 (i->partial_result_cnt == 0)) {
2871 LOGE("Error: HAL missed urgent metadata for frame number %d",
2872 i->frame_number);
2873 }
2874
2875 if (i->frame_number == urgent_frame_number &&
2876 i->bUrgentReceived == 0) {
2877
2878 camera3_capture_result_t result;
2879 memset(&result, 0, sizeof(camera3_capture_result_t));
2880
2881 i->partial_result_cnt++;
2882 i->bUrgentReceived = 1;
2883 // Extract 3A metadata
2884 result.result =
2885 translateCbUrgentMetadataToResultMetadata(metadata);
2886 // Populate metadata result
2887 result.frame_number = urgent_frame_number;
2888 result.num_output_buffers = 0;
2889 result.output_buffers = NULL;
2890 result.partial_result = i->partial_result_cnt;
2891
2892 mCallbackOps->process_capture_result(mCallbackOps, &result);
2893 LOGD("urgent frame_number = %u, capture_time = %lld",
2894 result.frame_number, capture_time);
2895 free_camera_metadata((camera_metadata_t *)result.result);
2896 break;
2897 }
2898 }
2899 }
2900
2901 if (!frame_number_valid) {
2902 LOGD("Not a valid normal frame number, used as SOF only");
2903 if (free_and_bufdone_meta_buf) {
2904 mMetadataChannel->bufDone(metadata_buf);
2905 free(metadata_buf);
2906 }
2907 goto done_metadata;
2908 }
2909 LOGH("valid frame_number = %u, capture_time = %lld",
2910 frame_number, capture_time);
2911
2912 for (pendingRequestIterator i = mPendingRequestsList.begin();
2913 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2914 // Flush out all entries with less or equal frame numbers.
2915
2916 camera3_capture_result_t result;
2917 memset(&result, 0, sizeof(camera3_capture_result_t));
2918
2919 LOGD("frame_number in the list is %u", i->frame_number);
2920 i->partial_result_cnt++;
2921 result.partial_result = i->partial_result_cnt;
2922
2923 // Check whether any stream buffer corresponding to this is dropped or not
2924 // If dropped, then send the ERROR_BUFFER for the corresponding stream
2925 // The API does not expect a blob buffer to be dropped
2926 if (p_cam_frame_drop) {
2927 /* Clear notify_msg structure */
2928 camera3_notify_msg_t notify_msg;
2929 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2930 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2931 j != i->buffers.end(); j++) {
2932 if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2933 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2934 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2935 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
2936 if (streamID == p_cam_frame_drop->streamID[k]) {
2937 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2938 LOGE("Start of reporting error frame#=%u, streamID=%u",
2939 i->frame_number, streamID);
2940 notify_msg.type = CAMERA3_MSG_ERROR;
2941 notify_msg.message.error.frame_number = i->frame_number;
2942 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2943 notify_msg.message.error.error_stream = j->stream;
2944 mCallbackOps->notify(mCallbackOps, &notify_msg);
2945 LOGE("End of reporting error frame#=%u, streamID=%u",
2946 i->frame_number, streamID);
2947 PendingFrameDropInfo PendingFrameDrop;
2948 PendingFrameDrop.frame_number=i->frame_number;
2949 PendingFrameDrop.stream_ID = streamID;
2950 // Add the Frame drop info to mPendingFrameDropList
2951 mPendingFrameDropList.push_back(PendingFrameDrop);
2952 }
2953 }
2954 } else {
2955 LOGE("JPEG buffer dropped for frame number %d",
2956 i->frame_number);
2957 }
2958 }
2959 }
2960
2961 // Send empty metadata with already filled buffers for dropped metadata
2962 // and send valid metadata with already filled buffers for current metadata
2963 /* we could hit this case when we either
2964 * 1. have a pending reprocess request or
2965 * 2. miss a metadata buffer callback */
2966 if (i->frame_number < frame_number) {
2967 if (i->input_buffer) {
2968 /* this will be handled in handleInputBufferWithLock */
2969 i++;
2970 continue;
2971 } else {
2972 LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2973 if (free_and_bufdone_meta_buf) {
2974 mMetadataChannel->bufDone(metadata_buf);
2975 free(metadata_buf);
2976 }
2977 mState = ERROR;
2978 goto done_metadata;
2979 }
2980 } else {
2981 mPendingLiveRequest--;
2982 /* Clear notify_msg structure */
2983 camera3_notify_msg_t notify_msg;
2984 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2985
2986 // Send shutter notify to frameworks
2987 notify_msg.type = CAMERA3_MSG_SHUTTER;
2988 notify_msg.message.shutter.frame_number = i->frame_number;
2989 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2990 mCallbackOps->notify(mCallbackOps, &notify_msg);
2991
2992 i->timestamp = capture_time;
2993
2994 // Find channel requiring metadata, meaning internal offline postprocess
2995 // is needed.
2996 //TODO: for now, we don't support two streams requiring metadata at the same time.
2997 // (because we are not making copies, and metadata buffer is not reference counted.
2998 bool internalPproc = false;
2999 for (pendingBufferIterator iter = i->buffers.begin();
3000 iter != i->buffers.end(); iter++) {
3001 if (iter->need_metadata) {
3002 internalPproc = true;
3003 QCamera3ProcessingChannel *channel =
3004 (QCamera3ProcessingChannel *)iter->stream->priv;
3005 channel->queueReprocMetadata(metadata_buf);
3006 break;
3007 }
3008 }
3009
3010 result.result = translateFromHalMetadata(metadata,
3011 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3012 i->capture_intent, internalPproc, i->fwkCacMode);
3013
3014 saveExifParams(metadata);
3015
3016 if (i->blob_request) {
3017 {
3018 //Dump tuning metadata if enabled and available
3019 char prop[PROPERTY_VALUE_MAX];
3020 memset(prop, 0, sizeof(prop));
3021 property_get("persist.camera.dumpmetadata", prop, "0");
3022 int32_t enabled = atoi(prop);
3023 if (enabled && metadata->is_tuning_params_valid) {
3024 dumpMetadataToFile(metadata->tuning_params,
3025 mMetaFrameCount,
3026 enabled,
3027 "Snapshot",
3028 frame_number);
3029 }
3030 }
3031 }
3032
3033 if (!internalPproc) {
3034 LOGD("couldn't find need_metadata for this metadata");
3035 // Return metadata buffer
3036 if (free_and_bufdone_meta_buf) {
3037 mMetadataChannel->bufDone(metadata_buf);
3038 free(metadata_buf);
3039 }
3040 }
3041 }
3042 if (!result.result) {
3043 LOGE("metadata is NULL");
3044 }
3045 result.frame_number = i->frame_number;
3046 result.input_buffer = i->input_buffer;
3047 result.num_output_buffers = 0;
3048 result.output_buffers = NULL;
3049 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3050 j != i->buffers.end(); j++) {
3051 if (j->buffer) {
3052 result.num_output_buffers++;
3053 }
3054 }
3055
3056 updateFpsInPreviewBuffer(metadata, i->frame_number);
3057
3058 if (result.num_output_buffers > 0) {
3059 camera3_stream_buffer_t *result_buffers =
3060 new camera3_stream_buffer_t[result.num_output_buffers];
3061 if (result_buffers != NULL) {
3062 size_t result_buffers_idx = 0;
3063 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3064 j != i->buffers.end(); j++) {
3065 if (j->buffer) {
3066 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3067 m != mPendingFrameDropList.end(); m++) {
3068 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3069 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3070 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3071 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3072 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3073 frame_number, streamID);
3074 m = mPendingFrameDropList.erase(m);
3075 break;
3076 }
3077 }
3078 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3079 result_buffers[result_buffers_idx++] = *(j->buffer);
3080 free(j->buffer);
3081 j->buffer = NULL;
3082 }
3083 }
3084 result.output_buffers = result_buffers;
3085 mCallbackOps->process_capture_result(mCallbackOps, &result);
3086 LOGD("meta frame_number = %u, capture_time = %lld",
3087 result.frame_number, i->timestamp);
3088 free_camera_metadata((camera_metadata_t *)result.result);
3089 delete[] result_buffers;
3090 }else {
3091 LOGE("Fatal error: out of memory");
3092 }
3093 } else {
3094 mCallbackOps->process_capture_result(mCallbackOps, &result);
3095 LOGD("meta frame_number = %u, capture_time = %lld",
3096 result.frame_number, i->timestamp);
3097 free_camera_metadata((camera_metadata_t *)result.result);
3098 }
3099
3100 i = erasePendingRequest(i);
3101
3102 if (!mPendingReprocessResultList.empty()) {
3103 handlePendingReprocResults(frame_number + 1);
3104 }
3105 }
3106
3107done_metadata:
3108 for (pendingRequestIterator i = mPendingRequestsList.begin();
3109 i != mPendingRequestsList.end() ;i++) {
3110 i->pipeline_depth++;
3111 }
3112 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3113 unblockRequestIfNecessary();
3114}
3115
3116/*===========================================================================
3117 * FUNCTION : hdrPlusPerfLock
3118 *
3119 * DESCRIPTION: perf lock for HDR+ using custom intent
3120 *
3121 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3122 *
3123 * RETURN : None
3124 *
3125 *==========================================================================*/
3126void QCamera3HardwareInterface::hdrPlusPerfLock(
3127 mm_camera_super_buf_t *metadata_buf)
3128{
3129 if (NULL == metadata_buf) {
3130 LOGE("metadata_buf is NULL");
3131 return;
3132 }
3133 metadata_buffer_t *metadata =
3134 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3135 int32_t *p_frame_number_valid =
3136 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3137 uint32_t *p_frame_number =
3138 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3139
3140 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3141 LOGE("%s: Invalid metadata", __func__);
3142 return;
3143 }
3144
3145 //acquire perf lock for 5 sec after the last HDR frame is captured
3146 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3147 if ((p_frame_number != NULL) &&
3148 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3149 m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3150 }
3151 }
3152
3153 //release lock after perf lock timer is expired. If lock is already released,
3154 //isTimerReset returns false
3155 if (m_perfLock.isTimerReset()) {
3156 mLastCustIntentFrmNum = -1;
3157 m_perfLock.lock_rel_timed();
3158 }
3159}
3160
3161/*===========================================================================
3162 * FUNCTION : handleInputBufferWithLock
3163 *
3164 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3165 *
3166 * PARAMETERS : @frame_number: frame number of the input buffer
3167 *
3168 * RETURN :
3169 *
3170 *==========================================================================*/
3171void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3172{
3173 ATRACE_CALL();
3174 pendingRequestIterator i = mPendingRequestsList.begin();
3175 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3176 i++;
3177 }
3178 if (i != mPendingRequestsList.end() && i->input_buffer) {
3179 //found the right request
3180 if (!i->shutter_notified) {
3181 CameraMetadata settings;
3182 camera3_notify_msg_t notify_msg;
3183 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3184 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3185 if(i->settings) {
3186 settings = i->settings;
3187 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3188 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3189 } else {
3190 LOGE("No timestamp in input settings! Using current one.");
3191 }
3192 } else {
3193 LOGE("Input settings missing!");
3194 }
3195
3196 notify_msg.type = CAMERA3_MSG_SHUTTER;
3197 notify_msg.message.shutter.frame_number = frame_number;
3198 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3199 mCallbackOps->notify(mCallbackOps, &notify_msg);
3200 i->shutter_notified = true;
3201 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3202 i->frame_number, notify_msg.message.shutter.timestamp);
3203 }
3204
3205 if (i->input_buffer->release_fence != -1) {
3206 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3207 close(i->input_buffer->release_fence);
3208 if (rc != OK) {
3209 LOGE("input buffer sync wait failed %d", rc);
3210 }
3211 }
3212
3213 camera3_capture_result result;
3214 memset(&result, 0, sizeof(camera3_capture_result));
3215 result.frame_number = frame_number;
3216 result.result = i->settings;
3217 result.input_buffer = i->input_buffer;
3218 result.partial_result = PARTIAL_RESULT_COUNT;
3219
3220 mCallbackOps->process_capture_result(mCallbackOps, &result);
3221 LOGD("Input request metadata and input buffer frame_number = %u",
3222 i->frame_number);
3223 i = erasePendingRequest(i);
3224 } else {
3225 LOGE("Could not find input request for frame number %d", frame_number);
3226 }
3227}
3228
3229/*===========================================================================
3230 * FUNCTION : handleBufferWithLock
3231 *
3232 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3233 *
3234 * PARAMETERS : @buffer: image buffer for the callback
3235 * @frame_number: frame number of the image buffer
3236 *
3237 * RETURN :
3238 *
3239 *==========================================================================*/
3240void QCamera3HardwareInterface::handleBufferWithLock(
3241 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3242{
3243 ATRACE_CALL();
3244 /* Nothing to be done during error state */
3245 if ((ERROR == mState) || (DEINIT == mState)) {
3246 return;
3247 }
3248 if (mFlushPerf) {
3249 handleBuffersDuringFlushLock(buffer);
3250 return;
3251 }
3252 //not in flush
3253 // If the frame number doesn't exist in the pending request list,
3254 // directly send the buffer to the frameworks, and update pending buffers map
3255 // Otherwise, book-keep the buffer.
3256 pendingRequestIterator i = mPendingRequestsList.begin();
3257 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3258 i++;
3259 }
3260 if (i == mPendingRequestsList.end()) {
3261 // Verify all pending requests frame_numbers are greater
3262 for (pendingRequestIterator j = mPendingRequestsList.begin();
3263 j != mPendingRequestsList.end(); j++) {
3264 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3265 LOGW("Error: pending live frame number %d is smaller than %d",
3266 j->frame_number, frame_number);
3267 }
3268 }
3269 camera3_capture_result_t result;
3270 memset(&result, 0, sizeof(camera3_capture_result_t));
3271 result.result = NULL;
3272 result.frame_number = frame_number;
3273 result.num_output_buffers = 1;
3274 result.partial_result = 0;
3275 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3276 m != mPendingFrameDropList.end(); m++) {
3277 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3278 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3279 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3280 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3281 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3282 frame_number, streamID);
3283 m = mPendingFrameDropList.erase(m);
3284 break;
3285 }
3286 }
3287 result.output_buffers = buffer;
3288 LOGH("result frame_number = %d, buffer = %p",
3289 frame_number, buffer->buffer);
3290
3291 mPendingBuffersMap.removeBuf(buffer->buffer);
3292
3293 mCallbackOps->process_capture_result(mCallbackOps, &result);
3294 } else {
3295 if (i->input_buffer) {
3296 CameraMetadata settings;
3297 camera3_notify_msg_t notify_msg;
3298 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3299 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3300 if(i->settings) {
3301 settings = i->settings;
3302 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3303 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3304 } else {
3305 LOGW("No timestamp in input settings! Using current one.");
3306 }
3307 } else {
3308 LOGE("Input settings missing!");
3309 }
3310
3311 notify_msg.type = CAMERA3_MSG_SHUTTER;
3312 notify_msg.message.shutter.frame_number = frame_number;
3313 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3314
3315 if (i->input_buffer->release_fence != -1) {
3316 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3317 close(i->input_buffer->release_fence);
3318 if (rc != OK) {
3319 LOGE("input buffer sync wait failed %d", rc);
3320 }
3321 }
3322 mPendingBuffersMap.removeBuf(buffer->buffer);
3323
3324 bool notifyNow = true;
3325 for (pendingRequestIterator j = mPendingRequestsList.begin();
3326 j != mPendingRequestsList.end(); j++) {
3327 if (j->frame_number < frame_number) {
3328 notifyNow = false;
3329 break;
3330 }
3331 }
3332
3333 if (notifyNow) {
3334 camera3_capture_result result;
3335 memset(&result, 0, sizeof(camera3_capture_result));
3336 result.frame_number = frame_number;
3337 result.result = i->settings;
3338 result.input_buffer = i->input_buffer;
3339 result.num_output_buffers = 1;
3340 result.output_buffers = buffer;
3341 result.partial_result = PARTIAL_RESULT_COUNT;
3342
3343 mCallbackOps->notify(mCallbackOps, &notify_msg);
3344 mCallbackOps->process_capture_result(mCallbackOps, &result);
3345 LOGD("Notify reprocess now %d!", frame_number);
3346 i = erasePendingRequest(i);
3347 } else {
3348 // Cache reprocess result for later
3349 PendingReprocessResult pendingResult;
3350 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3351 pendingResult.notify_msg = notify_msg;
3352 pendingResult.buffer = *buffer;
3353 pendingResult.frame_number = frame_number;
3354 mPendingReprocessResultList.push_back(pendingResult);
3355 LOGD("Cache reprocess result %d!", frame_number);
3356 }
3357 } else {
3358 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3359 j != i->buffers.end(); j++) {
3360 if (j->stream == buffer->stream) {
3361 if (j->buffer != NULL) {
3362 LOGE("Error: buffer is already set");
3363 } else {
3364 j->buffer = (camera3_stream_buffer_t *)malloc(
3365 sizeof(camera3_stream_buffer_t));
3366 *(j->buffer) = *buffer;
3367 LOGH("cache buffer %p at result frame_number %u",
3368 buffer->buffer, frame_number);
3369 }
3370 }
3371 }
3372 }
3373 }
3374}
3375
3376/*===========================================================================
3377 * FUNCTION : unblockRequestIfNecessary
3378 *
3379 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3380 * that mMutex is held when this function is called.
3381 *
3382 * PARAMETERS :
3383 *
3384 * RETURN :
3385 *
3386 *==========================================================================*/
3387void QCamera3HardwareInterface::unblockRequestIfNecessary()
3388{
3389 // Unblock process_capture_request
3390 pthread_cond_signal(&mRequestCond);
3391}
3392
3393
3394/*===========================================================================
3395 * FUNCTION : processCaptureRequest
3396 *
3397 * DESCRIPTION: process a capture request from camera service
3398 *
3399 * PARAMETERS :
3400 * @request : request from framework to process
3401 *
3402 * RETURN :
3403 *
3404 *==========================================================================*/
3405int QCamera3HardwareInterface::processCaptureRequest(
3406 camera3_capture_request_t *request)
3407{
3408 ATRACE_CALL();
3409 int rc = NO_ERROR;
3410 int32_t request_id;
3411 CameraMetadata meta;
3412 uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3413 uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3414 bool isVidBufRequested = false;
3415 camera3_stream_buffer_t *pInputBuffer = NULL;
3416
3417 pthread_mutex_lock(&mMutex);
3418
3419 // Validate current state
3420 switch (mState) {
3421 case CONFIGURED:
3422 case STARTED:
3423 /* valid state */
3424 break;
3425
3426 case ERROR:
3427 pthread_mutex_unlock(&mMutex);
3428 handleCameraDeviceError();
3429 return -ENODEV;
3430
3431 default:
3432 LOGE("Invalid state %d", mState);
3433 pthread_mutex_unlock(&mMutex);
3434 return -ENODEV;
3435 }
3436
3437 rc = validateCaptureRequest(request);
3438 if (rc != NO_ERROR) {
3439 LOGE("incoming request is not valid");
3440 pthread_mutex_unlock(&mMutex);
3441 return rc;
3442 }
3443
3444 meta = request->settings;
3445
3446 // For first capture request, send capture intent, and
3447 // stream on all streams
3448 if (mState == CONFIGURED) {
3449 // send an unconfigure to the backend so that the isp
3450 // resources are deallocated
3451 if (!mFirstConfiguration) {
3452 cam_stream_size_info_t stream_config_info;
3453 int32_t hal_version = CAM_HAL_V3;
3454 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3455 stream_config_info.buffer_info.min_buffers =
3456 MIN_INFLIGHT_REQUESTS;
3457 stream_config_info.buffer_info.max_buffers =
3458 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3459 clear_metadata_buffer(mParameters);
3460 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3461 CAM_INTF_PARM_HAL_VERSION, hal_version);
3462 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3463 CAM_INTF_META_STREAM_INFO, stream_config_info);
3464 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3465 mParameters);
3466 if (rc < 0) {
3467 LOGE("set_parms for unconfigure failed");
3468 pthread_mutex_unlock(&mMutex);
3469 return rc;
3470 }
3471 }
3472 m_perfLock.lock_acq();
3473 /* get eis information for stream configuration */
3474 cam_is_type_t is_type;
3475 char is_type_value[PROPERTY_VALUE_MAX];
3476 property_get("persist.camera.is_type", is_type_value, "0");
3477 is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3478
3479 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3480 int32_t hal_version = CAM_HAL_V3;
3481 uint8_t captureIntent =
3482 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3483 mCaptureIntent = captureIntent;
3484 clear_metadata_buffer(mParameters);
3485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3487 }
3488
3489 //If EIS is enabled, turn it on for video
3490 bool setEis = m_bEisEnable && m_bEisSupportedSize;
3491 int32_t vsMode;
3492 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3493 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3494 rc = BAD_VALUE;
3495 }
3496
3497 //IS type will be 0 unless EIS is supported. If EIS is supported
3498 //it could either be 1 or 4 depending on the stream and video size
3499 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3500 if (setEis) {
3501 if (!m_bEisSupportedSize) {
3502 is_type = IS_TYPE_DIS;
3503 } else {
3504 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3505 is_type = IS_TYPE_EIS_2_0;
3506 }else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO) {
3507 is_type = IS_TYPE_EIS_3_0;
3508 }else {
3509 is_type = IS_TYPE_NONE;
3510 }
3511 }
3512 mStreamConfigInfo.is_type[i] = is_type;
3513 }
3514 else {
3515 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3516 }
3517 }
3518
3519 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3520 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3521
3522 int32_t tintless_value = 1;
3523 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3524 CAM_INTF_PARM_TINTLESS, tintless_value);
3525 //Disable CDS for HFR mode or if DIS/EIS is on.
3526 //CDS is a session parameter in the backend/ISP, so need to be set/reset
3527 //after every configure_stream
3528 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3529 (m_bIsVideo)) {
3530 int32_t cds = CAM_CDS_MODE_OFF;
3531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3532 CAM_INTF_PARM_CDS_MODE, cds))
3533 LOGE("Failed to disable CDS for HFR mode");
3534
3535 }
3536 setMobicat();
3537
3538 /* Set fps and hfr mode while sending meta stream info so that sensor
3539 * can configure appropriate streaming mode */
3540 mHFRVideoFps = DEFAULT_VIDEO_FPS;
3541 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3542 rc = setHalFpsRange(meta, mParameters);
3543 if (rc != NO_ERROR) {
3544 LOGE("setHalFpsRange failed");
3545 }
3546 }
3547 if (meta.exists(ANDROID_CONTROL_MODE)) {
3548 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3549 rc = extractSceneMode(meta, metaMode, mParameters);
3550 if (rc != NO_ERROR) {
3551 LOGE("extractSceneMode failed");
3552 }
3553 }
3554
3555 //TODO: validate the arguments, HSV scenemode should have only the
3556 //advertised fps ranges
3557
3558 /*set the capture intent, hal version, tintless, stream info,
3559 *and disenable parameters to the backend*/
3560 LOGD("set_parms META_STREAM_INFO " );
3561 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3562 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3563 "Format:%d",
3564 mStreamConfigInfo.type[i],
3565 mStreamConfigInfo.stream_sizes[i].width,
3566 mStreamConfigInfo.stream_sizes[i].height,
3567 mStreamConfigInfo.postprocess_mask[i],
3568 mStreamConfigInfo.format[i]);
3569 }
3570 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3571 mParameters);
3572 if (rc < 0) {
3573 LOGE("set_parms failed for hal version, stream info");
3574 }
3575
3576 cam_dimension_t sensor_dim;
3577 memset(&sensor_dim, 0, sizeof(sensor_dim));
3578 rc = getSensorOutputSize(sensor_dim);
3579 if (rc != NO_ERROR) {
3580 LOGE("Failed to get sensor output size");
3581 pthread_mutex_unlock(&mMutex);
3582 goto error_exit;
3583 }
3584
3585 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3586 gCamCapability[mCameraId]->active_array_size.height,
3587 sensor_dim.width, sensor_dim.height);
3588
3589 /* Set batchmode before initializing channel. Since registerBuffer
3590 * internally initializes some of the channels, better set batchmode
3591 * even before first register buffer */
3592 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3593 it != mStreamInfo.end(); it++) {
3594 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3595 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3596 && mBatchSize) {
3597 rc = channel->setBatchSize(mBatchSize);
3598 //Disable per frame map unmap for HFR/batchmode case
3599 rc |= channel->setPerFrameMapUnmap(false);
3600 if (NO_ERROR != rc) {
3601 LOGE("Channel init failed %d", rc);
3602 pthread_mutex_unlock(&mMutex);
3603 goto error_exit;
3604 }
3605 }
3606 }
3607
3608 //First initialize all streams
3609 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3610 it != mStreamInfo.end(); it++) {
3611 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3612 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3613 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3614 setEis)
3615 rc = channel->initialize(is_type);
3616 else {
3617 rc = channel->initialize(IS_TYPE_NONE);
3618 }
3619 if (NO_ERROR != rc) {
3620 LOGE("Channel initialization failed %d", rc);
3621 pthread_mutex_unlock(&mMutex);
3622 goto error_exit;
3623 }
3624 }
3625
3626 if (mRawDumpChannel) {
3627 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3628 if (rc != NO_ERROR) {
3629 LOGE("Error: Raw Dump Channel init failed");
3630 pthread_mutex_unlock(&mMutex);
3631 goto error_exit;
3632 }
3633 }
3634 if (mSupportChannel) {
3635 rc = mSupportChannel->initialize(IS_TYPE_NONE);
3636 if (rc < 0) {
3637 LOGE("Support channel initialization failed");
3638 pthread_mutex_unlock(&mMutex);
3639 goto error_exit;
3640 }
3641 }
3642 if (mAnalysisChannel) {
3643 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3644 if (rc < 0) {
3645 LOGE("Analysis channel initialization failed");
3646 pthread_mutex_unlock(&mMutex);
3647 goto error_exit;
3648 }
3649 }
3650 if (mDummyBatchChannel) {
3651 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3652 if (rc < 0) {
3653 LOGE("mDummyBatchChannel setBatchSize failed");
3654 pthread_mutex_unlock(&mMutex);
3655 goto error_exit;
3656 }
3657 rc = mDummyBatchChannel->initialize(is_type);
3658 if (rc < 0) {
3659 LOGE("mDummyBatchChannel initialization failed");
3660 pthread_mutex_unlock(&mMutex);
3661 goto error_exit;
3662 }
3663 }
3664
3665 // Set bundle info
3666 rc = setBundleInfo();
3667 if (rc < 0) {
3668 LOGE("setBundleInfo failed %d", rc);
3669 pthread_mutex_unlock(&mMutex);
3670 goto error_exit;
3671 }
3672
3673 //update settings from app here
3674 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3675 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3676 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3677 }
3678 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3679 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3680 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3681 }
3682 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3683 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3684 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3685
3686 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3687 (mLinkedCameraId != mCameraId) ) {
3688 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3689 mLinkedCameraId, mCameraId);
3690 goto error_exit;
3691 }
3692 }
3693
3694 // add bundle related cameras
3695 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3696 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3697 if (mIsDeviceLinked)
3698 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3699 else
3700 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3701
3702 pthread_mutex_lock(&gCamLock);
3703
3704 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3705 LOGE("Dualcam: Invalid Session Id ");
3706 pthread_mutex_unlock(&gCamLock);
3707 goto error_exit;
3708 }
3709
3710 if (mIsMainCamera == 1) {
3711 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3712 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3713 // related session id should be session id of linked session
3714 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3715 } else {
3716 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3717 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3718 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3719 }
3720 pthread_mutex_unlock(&gCamLock);
3721
3722 rc = mCameraHandle->ops->sync_related_sensors(
3723 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3724 if (rc < 0) {
3725 LOGE("Dualcam: link failed");
3726 goto error_exit;
3727 }
3728 }
3729
3730 //Then start them.
3731 LOGH("Start META Channel");
3732 rc = mMetadataChannel->start();
3733 if (rc < 0) {
3734 LOGE("META channel start failed");
3735 pthread_mutex_unlock(&mMutex);
3736 goto error_exit;
3737 }
3738
3739 if (mAnalysisChannel) {
3740 rc = mAnalysisChannel->start();
3741 if (rc < 0) {
3742 LOGE("Analysis channel start failed");
3743 mMetadataChannel->stop();
3744 pthread_mutex_unlock(&mMutex);
3745 goto error_exit;
3746 }
3747 }
3748
3749 if (mSupportChannel) {
3750 rc = mSupportChannel->start();
3751 if (rc < 0) {
3752 LOGE("Support channel start failed");
3753 mMetadataChannel->stop();
3754 /* Although support and analysis are mutually exclusive today
3755 adding it in anycase for future proofing */
3756 if (mAnalysisChannel) {
3757 mAnalysisChannel->stop();
3758 }
3759 pthread_mutex_unlock(&mMutex);
3760 goto error_exit;
3761 }
3762 }
3763 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3764 it != mStreamInfo.end(); it++) {
3765 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3766 LOGH("Start Processing Channel mask=%d",
3767 channel->getStreamTypeMask());
3768 rc = channel->start();
3769 if (rc < 0) {
3770 LOGE("channel start failed");
3771 pthread_mutex_unlock(&mMutex);
3772 goto error_exit;
3773 }
3774 }
3775
3776 if (mRawDumpChannel) {
3777 LOGD("Starting raw dump stream");
3778 rc = mRawDumpChannel->start();
3779 if (rc != NO_ERROR) {
3780 LOGE("Error Starting Raw Dump Channel");
3781 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3782 it != mStreamInfo.end(); it++) {
3783 QCamera3Channel *channel =
3784 (QCamera3Channel *)(*it)->stream->priv;
3785 LOGH("Stopping Processing Channel mask=%d",
3786 channel->getStreamTypeMask());
3787 channel->stop();
3788 }
3789 if (mSupportChannel)
3790 mSupportChannel->stop();
3791 if (mAnalysisChannel) {
3792 mAnalysisChannel->stop();
3793 }
3794 mMetadataChannel->stop();
3795 pthread_mutex_unlock(&mMutex);
3796 goto error_exit;
3797 }
3798 }
3799
3800 if (mChannelHandle) {
3801
3802 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3803 mChannelHandle);
3804 if (rc != NO_ERROR) {
3805 LOGE("start_channel failed %d", rc);
3806 pthread_mutex_unlock(&mMutex);
3807 goto error_exit;
3808 }
3809 }
3810
3811 goto no_error;
3812error_exit:
3813 m_perfLock.lock_rel();
3814 return rc;
3815no_error:
3816 m_perfLock.lock_rel();
3817
3818 mWokenUpByDaemon = false;
3819 mPendingLiveRequest = 0;
3820 mFirstConfiguration = false;
3821 enablePowerHint();
3822 }
3823
3824 uint32_t frameNumber = request->frame_number;
3825 cam_stream_ID_t streamID;
3826
3827 if (mFlushPerf) {
3828 //we cannot accept any requests during flush
3829 LOGE("process_capture_request cannot proceed during flush");
3830 pthread_mutex_unlock(&mMutex);
3831 return NO_ERROR; //should return an error
3832 }
3833
3834 if (meta.exists(ANDROID_REQUEST_ID)) {
3835 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3836 mCurrentRequestId = request_id;
3837 LOGD("Received request with id: %d", request_id);
3838 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3839 LOGE("Unable to find request id field, \
3840 & no previous id available");
3841 pthread_mutex_unlock(&mMutex);
3842 return NAME_NOT_FOUND;
3843 } else {
3844 LOGD("Re-using old request id");
3845 request_id = mCurrentRequestId;
3846 }
3847
3848 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3849 request->num_output_buffers,
3850 request->input_buffer,
3851 frameNumber);
3852 // Acquire all request buffers first
3853 streamID.num_streams = 0;
3854 int blob_request = 0;
3855 uint32_t snapshotStreamId = 0;
3856 for (size_t i = 0; i < request->num_output_buffers; i++) {
3857 const camera3_stream_buffer_t& output = request->output_buffers[i];
3858 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3859
3860 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3861 //Call function to store local copy of jpeg data for encode params.
3862 blob_request = 1;
3863 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3864 }
3865
3866 if (output.acquire_fence != -1) {
3867 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3868 close(output.acquire_fence);
3869 if (rc != OK) {
3870 LOGE("sync wait failed %d", rc);
3871 pthread_mutex_unlock(&mMutex);
3872 return rc;
3873 }
3874 }
3875
3876 streamID.streamID[streamID.num_streams] =
3877 channel->getStreamID(channel->getStreamTypeMask());
3878 streamID.num_streams++;
3879
3880 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3881 isVidBufRequested = true;
3882 }
3883 }
3884
3885 if (blob_request) {
3886 KPI_ATRACE_INT("SNAPSHOT", 1);
3887 }
3888 if (blob_request && mRawDumpChannel) {
3889 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3890 streamID.streamID[streamID.num_streams] =
3891 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3892 streamID.num_streams++;
3893 }
3894
3895 if(request->input_buffer == NULL) {
3896 /* Parse the settings:
3897 * - For every request in NORMAL MODE
3898 * - For every request in HFR mode during preview only case
3899 * - For first request of every batch in HFR mode during video
3900 * recording. In batchmode the same settings except frame number is
3901 * repeated in each request of the batch.
3902 */
3903 if (!mBatchSize ||
3904 (mBatchSize && !isVidBufRequested) ||
3905 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3906 rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3907 if (rc < 0) {
3908 LOGE("fail to set frame parameters");
3909 pthread_mutex_unlock(&mMutex);
3910 return rc;
3911 }
3912 }
3913 /* For batchMode HFR, setFrameParameters is not called for every
3914 * request. But only frame number of the latest request is parsed.
3915 * Keep track of first and last frame numbers in a batch so that
3916 * metadata for the frame numbers of batch can be duplicated in
3917 * handleBatchMetadta */
3918 if (mBatchSize) {
3919 if (!mToBeQueuedVidBufs) {
3920 //start of the batch
3921 mFirstFrameNumberInBatch = request->frame_number;
3922 }
3923 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3924 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3925 LOGE("Failed to set the frame number in the parameters");
3926 return BAD_VALUE;
3927 }
3928 }
3929 if (mNeedSensorRestart) {
3930 /* Unlock the mutex as restartSensor waits on the channels to be
3931 * stopped, which in turn calls stream callback functions -
3932 * handleBufferWithLock and handleMetadataWithLock */
3933 pthread_mutex_unlock(&mMutex);
3934 rc = dynamicUpdateMetaStreamInfo();
3935 if (rc != NO_ERROR) {
3936 LOGE("Restarting the sensor failed");
3937 return BAD_VALUE;
3938 }
3939 mNeedSensorRestart = false;
3940 pthread_mutex_lock(&mMutex);
3941 }
3942 } else {
3943
3944 if (request->input_buffer->acquire_fence != -1) {
3945 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3946 close(request->input_buffer->acquire_fence);
3947 if (rc != OK) {
3948 LOGE("input buffer sync wait failed %d", rc);
3949 pthread_mutex_unlock(&mMutex);
3950 return rc;
3951 }
3952 }
3953 }
3954
3955 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3956 mLastCustIntentFrmNum = frameNumber;
3957 }
3958 /* Update pending request list and pending buffers map */
3959 PendingRequestInfo pendingRequest;
3960 pendingRequestIterator latestRequest;
3961 pendingRequest.frame_number = frameNumber;
3962 pendingRequest.num_buffers = request->num_output_buffers;
3963 pendingRequest.request_id = request_id;
3964 pendingRequest.blob_request = blob_request;
3965 pendingRequest.timestamp = 0;
3966 pendingRequest.bUrgentReceived = 0;
3967 if (request->input_buffer) {
3968 pendingRequest.input_buffer =
3969 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3970 *(pendingRequest.input_buffer) = *(request->input_buffer);
3971 pInputBuffer = pendingRequest.input_buffer;
3972 } else {
3973 pendingRequest.input_buffer = NULL;
3974 pInputBuffer = NULL;
3975 }
3976
3977 pendingRequest.pipeline_depth = 0;
3978 pendingRequest.partial_result_cnt = 0;
3979 extractJpegMetadata(mCurJpegMeta, request);
3980 pendingRequest.jpegMetadata = mCurJpegMeta;
3981 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3982 pendingRequest.shutter_notified = false;
3983
3984 //extract capture intent
3985 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3986 mCaptureIntent =
3987 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3988 }
3989 pendingRequest.capture_intent = mCaptureIntent;
3990
3991 //extract CAC info
3992 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
3993 mCacMode =
3994 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
3995 }
3996 pendingRequest.fwkCacMode = mCacMode;
3997
3998 PendingBuffersInRequest bufsForCurRequest;
3999 bufsForCurRequest.frame_number = frameNumber;
4000 // Mark current timestamp for the new request
4001 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4002
4003 for (size_t i = 0; i < request->num_output_buffers; i++) {
4004 RequestedBufferInfo requestedBuf;
4005 memset(&requestedBuf, 0, sizeof(requestedBuf));
4006 requestedBuf.stream = request->output_buffers[i].stream;
4007 requestedBuf.buffer = NULL;
4008 pendingRequest.buffers.push_back(requestedBuf);
4009
4010 // Add to buffer handle the pending buffers list
4011 PendingBufferInfo bufferInfo;
4012 bufferInfo.buffer = request->output_buffers[i].buffer;
4013 bufferInfo.stream = request->output_buffers[i].stream;
4014 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4015 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4016 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4017 frameNumber, bufferInfo.buffer,
4018 channel->getStreamTypeMask(), bufferInfo.stream->format);
4019 }
4020 // Add this request packet into mPendingBuffersMap
4021 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4022 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4023 mPendingBuffersMap.get_num_overall_buffers());
4024
4025 latestRequest = mPendingRequestsList.insert(
4026 mPendingRequestsList.end(), pendingRequest);
4027 if(mFlush) {
4028 LOGI("mFlush is true");
4029 pthread_mutex_unlock(&mMutex);
4030 return NO_ERROR;
4031 }
4032
4033 // Notify metadata channel we receive a request
4034 mMetadataChannel->request(NULL, frameNumber);
4035
4036 if(request->input_buffer != NULL){
4037 LOGD("Input request, frame_number %d", frameNumber);
4038 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4039 if (NO_ERROR != rc) {
4040 LOGE("fail to set reproc parameters");
4041 pthread_mutex_unlock(&mMutex);
4042 return rc;
4043 }
4044 }
4045
4046 // Call request on other streams
4047 uint32_t streams_need_metadata = 0;
4048 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4049 for (size_t i = 0; i < request->num_output_buffers; i++) {
4050 const camera3_stream_buffer_t& output = request->output_buffers[i];
4051 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4052
4053 if (channel == NULL) {
4054 LOGW("invalid channel pointer for stream");
4055 continue;
4056 }
4057
4058 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4059 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4060 output.buffer, request->input_buffer, frameNumber);
4061 if(request->input_buffer != NULL){
4062 rc = channel->request(output.buffer, frameNumber,
4063 pInputBuffer, &mReprocMeta);
4064 if (rc < 0) {
4065 LOGE("Fail to request on picture channel");
4066 pthread_mutex_unlock(&mMutex);
4067 return rc;
4068 }
4069 } else {
4070 LOGD("snapshot request with buffer %p, frame_number %d",
4071 output.buffer, frameNumber);
4072 if (!request->settings) {
4073 rc = channel->request(output.buffer, frameNumber,
4074 NULL, mPrevParameters);
4075 } else {
4076 rc = channel->request(output.buffer, frameNumber,
4077 NULL, mParameters);
4078 }
4079 if (rc < 0) {
4080 LOGE("Fail to request on picture channel");
4081 pthread_mutex_unlock(&mMutex);
4082 return rc;
4083 }
4084 pendingBufferIter->need_metadata = true;
4085 streams_need_metadata++;
4086 }
4087 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4088 bool needMetadata = false;
4089
4090 if (m_perfLock.isPerfLockTimedAcquired()) {
4091 if (m_perfLock.isTimerReset())
4092 {
4093 m_perfLock.lock_rel_timed();
4094 m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4095 }
4096 } else {
4097 m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4098 }
4099
4100 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4101 rc = yuvChannel->request(output.buffer, frameNumber,
4102 pInputBuffer,
4103 (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
4104 if (rc < 0) {
4105 LOGE("Fail to request on YUV channel");
4106 pthread_mutex_unlock(&mMutex);
4107 return rc;
4108 }
4109 pendingBufferIter->need_metadata = needMetadata;
4110 if (needMetadata)
4111 streams_need_metadata += 1;
4112 LOGD("calling YUV channel request, need_metadata is %d",
4113 needMetadata);
4114 } else {
4115 LOGD("request with buffer %p, frame_number %d",
4116 output.buffer, frameNumber);
4117 /* Set perf lock for API-2 zsl */
4118 if (IS_USAGE_ZSL(output.stream->usage)) {
4119 if (m_perfLock.isPerfLockTimedAcquired()) {
4120 if (m_perfLock.isTimerReset())
4121 {
4122 m_perfLock.lock_rel_timed();
4123 m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4124 }
4125 } else {
4126 m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4127 }
4128 }
4129
4130 rc = channel->request(output.buffer, frameNumber);
4131 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4132 && mBatchSize) {
4133 mToBeQueuedVidBufs++;
4134 if (mToBeQueuedVidBufs == mBatchSize) {
4135 channel->queueBatchBuf();
4136 }
4137 }
4138 if (rc < 0) {
4139 LOGE("request failed");
4140 pthread_mutex_unlock(&mMutex);
4141 return rc;
4142 }
4143 }
4144 pendingBufferIter++;
4145 }
4146
4147 //If 2 streams have need_metadata set to true, fail the request, unless
4148 //we copy/reference count the metadata buffer
4149 if (streams_need_metadata > 1) {
4150 LOGE("not supporting request in which two streams requires"
4151 " 2 HAL metadata for reprocessing");
4152 pthread_mutex_unlock(&mMutex);
4153 return -EINVAL;
4154 }
4155
4156 if(request->input_buffer == NULL) {
4157 /* Set the parameters to backend:
4158 * - For every request in NORMAL MODE
4159 * - For every request in HFR mode during preview only case
4160 * - Once every batch in HFR mode during video recording
4161 */
4162 if (!mBatchSize ||
4163 (mBatchSize && !isVidBufRequested) ||
4164 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4165 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4166 mBatchSize, isVidBufRequested,
4167 mToBeQueuedVidBufs);
4168 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4169 mParameters);
4170 if (rc < 0) {
4171 LOGE("set_parms failed");
4172 }
4173 /* reset to zero coz, the batch is queued */
4174 mToBeQueuedVidBufs = 0;
4175 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4176 }
4177 mPendingLiveRequest++;
4178 }
4179
4180 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4181
4182 mState = STARTED;
4183 // Added a timed condition wait
4184 struct timespec ts;
4185 uint8_t isValidTimeout = 1;
4186 rc = clock_gettime(CLOCK_REALTIME, &ts);
4187 if (rc < 0) {
4188 isValidTimeout = 0;
4189 LOGE("Error reading the real time clock!!");
4190 }
4191 else {
4192 // Make timeout as 5 sec for request to be honored
4193 ts.tv_sec += 5;
4194 }
4195 //Block on conditional variable
4196 if (mBatchSize) {
4197 /* For HFR, more buffers are dequeued upfront to improve the performance */
4198 minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4199 maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4200 }
4201 if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
4202 m_perfLock.lock_rel_timed();
4203
4204 while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
4205 (mState != ERROR) && (mState != DEINIT)) {
4206 if (!isValidTimeout) {
4207 LOGD("Blocking on conditional wait");
4208 pthread_cond_wait(&mRequestCond, &mMutex);
4209 }
4210 else {
4211 LOGD("Blocking on timed conditional wait");
4212 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4213 if (rc == ETIMEDOUT) {
4214 rc = -ENODEV;
4215 LOGE("Unblocked on timeout!!!!");
4216 break;
4217 }
4218 }
4219 LOGD("Unblocked");
4220 if (mWokenUpByDaemon) {
4221 mWokenUpByDaemon = false;
4222 if (mPendingLiveRequest < maxInFlightRequests)
4223 break;
4224 }
4225 }
4226 pthread_mutex_unlock(&mMutex);
4227
4228 return rc;
4229}
4230
4231/*===========================================================================
4232 * FUNCTION : dump
4233 *
4234 * DESCRIPTION:
4235 *
4236 * PARAMETERS :
4237 *
4238 *
4239 * RETURN :
4240 *==========================================================================*/
4241void QCamera3HardwareInterface::dump(int fd)
4242{
4243 pthread_mutex_lock(&mMutex);
4244 dprintf(fd, "\n Camera HAL3 information Begin \n");
4245
4246 dprintf(fd, "\nNumber of pending requests: %zu \n",
4247 mPendingRequestsList.size());
4248 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4249 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
4250 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4251 for(pendingRequestIterator i = mPendingRequestsList.begin();
4252 i != mPendingRequestsList.end(); i++) {
4253 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4254 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4255 i->input_buffer);
4256 }
4257 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4258 mPendingBuffersMap.get_num_overall_buffers());
4259 dprintf(fd, "-------+------------------\n");
4260 dprintf(fd, " Frame | Stream type mask \n");
4261 dprintf(fd, "-------+------------------\n");
4262 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4263 for(auto &j : req.mPendingBufferList) {
4264 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4265 dprintf(fd, " %5d | %11d \n",
4266 req.frame_number, channel->getStreamTypeMask());
4267 }
4268 }
4269 dprintf(fd, "-------+------------------\n");
4270
4271 dprintf(fd, "\nPending frame drop list: %zu\n",
4272 mPendingFrameDropList.size());
4273 dprintf(fd, "-------+-----------\n");
4274 dprintf(fd, " Frame | Stream ID \n");
4275 dprintf(fd, "-------+-----------\n");
4276 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4277 i != mPendingFrameDropList.end(); i++) {
4278 dprintf(fd, " %5d | %9d \n",
4279 i->frame_number, i->stream_ID);
4280 }
4281 dprintf(fd, "-------+-----------\n");
4282
4283 dprintf(fd, "\n Camera HAL3 information End \n");
4284
4285 /* use dumpsys media.camera as trigger to send update debug level event */
4286 mUpdateDebugLevel = true;
4287 pthread_mutex_unlock(&mMutex);
4288 return;
4289}
4290
4291/*===========================================================================
4292 * FUNCTION : flush
4293 *
4294 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4295 * conditionally restarts channels
4296 *
4297 * PARAMETERS :
4298 * @ restartChannels: re-start all channels
4299 *
4300 *
4301 * RETURN :
4302 * 0 on success
4303 * Error code on failure
4304 *==========================================================================*/
4305int QCamera3HardwareInterface::flush(bool restartChannels)
4306{
4307 KPI_ATRACE_CALL();
4308 int32_t rc = NO_ERROR;
4309
4310 LOGD("Unblocking Process Capture Request");
4311 pthread_mutex_lock(&mMutex);
4312 mFlush = true;
4313 pthread_mutex_unlock(&mMutex);
4314
4315 rc = stopAllChannels();
4316 // unlink of dualcam
4317 if (mIsDeviceLinked) {
4318 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4319 pthread_mutex_lock(&gCamLock);
4320
4321 if (mIsMainCamera == 1) {
4322 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4323 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4324 // related session id should be session id of linked session
4325 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4326 } else {
4327 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4328 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4329 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4330 }
4331 pthread_mutex_unlock(&gCamLock);
4332
4333 rc = mCameraHandle->ops->sync_related_sensors(
4334 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4335 if (rc < 0) {
4336 LOGE("Dualcam: Unlink failed, but still proceed to close");
4337 }
4338 }
4339
4340 if (rc < 0) {
4341 LOGE("stopAllChannels failed");
4342 return rc;
4343 }
4344 if (mChannelHandle) {
4345 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4346 mChannelHandle);
4347 }
4348
4349 // Reset bundle info
4350 rc = setBundleInfo();
4351 if (rc < 0) {
4352 LOGE("setBundleInfo failed %d", rc);
4353 return rc;
4354 }
4355
4356 // Mutex Lock
4357 pthread_mutex_lock(&mMutex);
4358
4359 // Unblock process_capture_request
4360 mPendingLiveRequest = 0;
4361 pthread_cond_signal(&mRequestCond);
4362
4363 rc = notifyErrorForPendingRequests();
4364 if (rc < 0) {
4365 LOGE("notifyErrorForPendingRequests failed");
4366 pthread_mutex_unlock(&mMutex);
4367 return rc;
4368 }
4369
4370 mFlush = false;
4371
4372 // Start the Streams/Channels
4373 if (restartChannels) {
4374 rc = startAllChannels();
4375 if (rc < 0) {
4376 LOGE("startAllChannels failed");
4377 pthread_mutex_unlock(&mMutex);
4378 return rc;
4379 }
4380 }
4381
4382 if (mChannelHandle) {
4383 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4384 mChannelHandle);
4385 if (rc < 0) {
4386 LOGE("start_channel failed");
4387 pthread_mutex_unlock(&mMutex);
4388 return rc;
4389 }
4390 }
4391
4392 pthread_mutex_unlock(&mMutex);
4393
4394 return 0;
4395}
4396
4397/*===========================================================================
4398 * FUNCTION : flushPerf
4399 *
4400 * DESCRIPTION: This is the performance optimization version of flush that does
4401 * not use stream off, rather flushes the system
4402 *
4403 * PARAMETERS :
4404 *
4405 *
4406 * RETURN : 0 : success
4407 * -EINVAL: input is malformed (device is not valid)
4408 * -ENODEV: if the device has encountered a serious error
4409 *==========================================================================*/
4410int QCamera3HardwareInterface::flushPerf()
4411{
4412 ATRACE_CALL();
4413 int32_t rc = 0;
4414 struct timespec timeout;
4415 bool timed_wait = false;
4416
4417 pthread_mutex_lock(&mMutex);
4418 mFlushPerf = true;
4419 mPendingBuffersMap.numPendingBufsAtFlush =
4420 mPendingBuffersMap.get_num_overall_buffers();
4421 LOGD("Calling flush. Wait for %d buffers to return",
4422 mPendingBuffersMap.numPendingBufsAtFlush);
4423
4424 /* send the flush event to the backend */
4425 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4426 if (rc < 0) {
4427 LOGE("Error in flush: IOCTL failure");
4428 mFlushPerf = false;
4429 pthread_mutex_unlock(&mMutex);
4430 return -ENODEV;
4431 }
4432
4433 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4434 LOGD("No pending buffers in HAL, return flush");
4435 mFlushPerf = false;
4436 pthread_mutex_unlock(&mMutex);
4437 return rc;
4438 }
4439
4440 /* wait on a signal that buffers were received */
4441 rc = clock_gettime(CLOCK_REALTIME, &timeout);
4442 if (rc < 0) {
4443 LOGE("Error reading the real time clock, cannot use timed wait");
4444 } else {
4445 timeout.tv_sec += FLUSH_TIMEOUT;
4446 timed_wait = true;
4447 }
4448
4449 //Block on conditional variable
4450 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4451 LOGD("Waiting on mBuffersCond");
4452 if (!timed_wait) {
4453 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4454 if (rc != 0) {
4455 LOGE("pthread_cond_wait failed due to rc = %s",
4456 strerror(rc));
4457 break;
4458 }
4459 } else {
4460 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4461 if (rc != 0) {
4462 LOGE("pthread_cond_timedwait failed due to rc = %s",
4463 strerror(rc));
4464 break;
4465 }
4466 }
4467 }
4468 if (rc != 0) {
4469 mFlushPerf = false;
4470 pthread_mutex_unlock(&mMutex);
4471 return -ENODEV;
4472 }
4473
4474 LOGD("Received buffers, now safe to return them");
4475
4476 //make sure the channels handle flush
4477 //currently only required for the picture channel to release snapshot resources
4478 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4479 it != mStreamInfo.end(); it++) {
4480 QCamera3Channel *channel = (*it)->channel;
4481 if (channel) {
4482 rc = channel->flush();
4483 if (rc) {
4484 LOGE("Flushing the channels failed with error %d", rc);
4485 // even though the channel flush failed we need to continue and
4486 // return the buffers we have to the framework, however the return
4487 // value will be an error
4488 rc = -ENODEV;
4489 }
4490 }
4491 }
4492
4493 /* notify the frameworks and send errored results */
4494 rc = notifyErrorForPendingRequests();
4495 if (rc < 0) {
4496 LOGE("notifyErrorForPendingRequests failed");
4497 pthread_mutex_unlock(&mMutex);
4498 return rc;
4499 }
4500
4501 //unblock process_capture_request
4502 mPendingLiveRequest = 0;
4503 unblockRequestIfNecessary();
4504
4505 mFlushPerf = false;
4506 pthread_mutex_unlock(&mMutex);
4507 LOGD ("Flush Operation complete. rc = %d", rc);
4508 return rc;
4509}
4510
4511/*===========================================================================
4512 * FUNCTION : handleCameraDeviceError
4513 *
4514 * DESCRIPTION: This function calls internal flush and notifies the error to
4515 * framework and updates the state variable.
4516 *
4517 * PARAMETERS : None
4518 *
4519 * RETURN : NO_ERROR on Success
4520 * Error code on failure
4521 *==========================================================================*/
4522int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4523{
4524 int32_t rc = NO_ERROR;
4525
4526 pthread_mutex_lock(&mMutex);
4527 if (mState != ERROR) {
4528 //if mState != ERROR, nothing to be done
4529 pthread_mutex_unlock(&mMutex);
4530 return NO_ERROR;
4531 }
4532 pthread_mutex_unlock(&mMutex);
4533
4534 rc = flush(false /* restart channels */);
4535 if (NO_ERROR != rc) {
4536 LOGE("internal flush to handle mState = ERROR failed");
4537 }
4538
4539 pthread_mutex_lock(&mMutex);
4540 mState = DEINIT;
4541 pthread_mutex_unlock(&mMutex);
4542
4543 camera3_notify_msg_t notify_msg;
4544 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4545 notify_msg.type = CAMERA3_MSG_ERROR;
4546 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4547 notify_msg.message.error.error_stream = NULL;
4548 notify_msg.message.error.frame_number = 0;
4549 mCallbackOps->notify(mCallbackOps, &notify_msg);
4550
4551 return rc;
4552}
4553
4554/*===========================================================================
4555 * FUNCTION : captureResultCb
4556 *
4557 * DESCRIPTION: Callback handler for all capture result
4558 * (streams, as well as metadata)
4559 *
4560 * PARAMETERS :
4561 * @metadata : metadata information
4562 * @buffer : actual gralloc buffer to be returned to frameworks.
4563 * NULL if metadata.
4564 *
4565 * RETURN : NONE
4566 *==========================================================================*/
4567void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4568 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4569{
4570 if (metadata_buf) {
4571 if (mBatchSize) {
4572 handleBatchMetadata(metadata_buf,
4573 true /* free_and_bufdone_meta_buf */);
4574 } else { /* mBatchSize = 0 */
4575 hdrPlusPerfLock(metadata_buf);
4576 pthread_mutex_lock(&mMutex);
4577 handleMetadataWithLock(metadata_buf,
4578 true /* free_and_bufdone_meta_buf */);
4579 pthread_mutex_unlock(&mMutex);
4580 }
4581 } else if (isInputBuffer) {
4582 pthread_mutex_lock(&mMutex);
4583 handleInputBufferWithLock(frame_number);
4584 pthread_mutex_unlock(&mMutex);
4585 } else {
4586 pthread_mutex_lock(&mMutex);
4587 handleBufferWithLock(buffer, frame_number);
4588 pthread_mutex_unlock(&mMutex);
4589 }
4590 return;
4591}
4592
4593/*===========================================================================
4594 * FUNCTION : getReprocessibleOutputStreamId
4595 *
4596 * DESCRIPTION: Get source output stream id for the input reprocess stream
4597 * based on size and format, which would be the largest
4598 * output stream if an input stream exists.
4599 *
4600 * PARAMETERS :
4601 * @id : return the stream id if found
4602 *
4603 * RETURN : int32_t type of status
4604 * NO_ERROR -- success
4605 * none-zero failure code
4606 *==========================================================================*/
4607int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4608{
4609 /* check if any output or bidirectional stream with the same size and format
4610 and return that stream */
4611 if ((mInputStreamInfo.dim.width > 0) &&
4612 (mInputStreamInfo.dim.height > 0)) {
4613 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4614 it != mStreamInfo.end(); it++) {
4615
4616 camera3_stream_t *stream = (*it)->stream;
4617 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4618 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4619 (stream->format == mInputStreamInfo.format)) {
4620 // Usage flag for an input stream and the source output stream
4621 // may be different.
4622 LOGD("Found reprocessible output stream! %p", *it);
4623 LOGD("input stream usage 0x%x, current stream usage 0x%x",
4624 stream->usage, mInputStreamInfo.usage);
4625
4626 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4627 if (channel != NULL && channel->mStreams[0]) {
4628 id = channel->mStreams[0]->getMyServerID();
4629 return NO_ERROR;
4630 }
4631 }
4632 }
4633 } else {
4634 LOGD("No input stream, so no reprocessible output stream");
4635 }
4636 return NAME_NOT_FOUND;
4637}
4638
4639/*===========================================================================
4640 * FUNCTION : lookupFwkName
4641 *
4642 * DESCRIPTION: In case the enum is not same in fwk and backend
4643 * make sure the parameter is correctly propogated
4644 *
4645 * PARAMETERS :
4646 * @arr : map between the two enums
4647 * @len : len of the map
4648 * @hal_name : name of the hal_parm to map
4649 *
4650 * RETURN : int type of status
4651 * fwk_name -- success
4652 * none-zero failure code
4653 *==========================================================================*/
4654template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4655 size_t len, halType hal_name)
4656{
4657
4658 for (size_t i = 0; i < len; i++) {
4659 if (arr[i].hal_name == hal_name) {
4660 return arr[i].fwk_name;
4661 }
4662 }
4663
4664 /* Not able to find matching framework type is not necessarily
4665 * an error case. This happens when mm-camera supports more attributes
4666 * than the frameworks do */
4667 LOGH("Cannot find matching framework type");
4668 return NAME_NOT_FOUND;
4669}
4670
4671/*===========================================================================
4672 * FUNCTION : lookupHalName
4673 *
4674 * DESCRIPTION: In case the enum is not same in fwk and backend
4675 * make sure the parameter is correctly propogated
4676 *
4677 * PARAMETERS :
4678 * @arr : map between the two enums
4679 * @len : len of the map
4680 * @fwk_name : name of the hal_parm to map
4681 *
4682 * RETURN : int32_t type of status
4683 * hal_name -- success
4684 * none-zero failure code
4685 *==========================================================================*/
4686template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4687 size_t len, fwkType fwk_name)
4688{
4689 for (size_t i = 0; i < len; i++) {
4690 if (arr[i].fwk_name == fwk_name) {
4691 return arr[i].hal_name;
4692 }
4693 }
4694
4695 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4696 return NAME_NOT_FOUND;
4697}
4698
4699/*===========================================================================
4700 * FUNCTION : lookupProp
4701 *
4702 * DESCRIPTION: lookup a value by its name
4703 *
4704 * PARAMETERS :
4705 * @arr : map between the two enums
4706 * @len : size of the map
4707 * @name : name to be looked up
4708 *
4709 * RETURN : Value if found
4710 * CAM_CDS_MODE_MAX if not found
4711 *==========================================================================*/
4712template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4713 size_t len, const char *name)
4714{
4715 if (name) {
4716 for (size_t i = 0; i < len; i++) {
4717 if (!strcmp(arr[i].desc, name)) {
4718 return arr[i].val;
4719 }
4720 }
4721 }
4722 return CAM_CDS_MODE_MAX;
4723}
4724
4725/*===========================================================================
4726 *
4727 * DESCRIPTION:
4728 *
4729 * PARAMETERS :
4730 * @metadata : metadata information from callback
4731 * @timestamp: metadata buffer timestamp
4732 * @request_id: request id
4733 * @jpegMetadata: additional jpeg metadata
4734 * @pprocDone: whether internal offline postprocsesing is done
4735 *
4736 * RETURN : camera_metadata_t*
4737 * metadata in a format specified by fwk
4738 *==========================================================================*/
4739camera_metadata_t*
4740QCamera3HardwareInterface::translateFromHalMetadata(
4741 metadata_buffer_t *metadata,
4742 nsecs_t timestamp,
4743 int32_t request_id,
4744 const CameraMetadata& jpegMetadata,
4745 uint8_t pipeline_depth,
4746 uint8_t capture_intent,
4747 bool pprocDone,
4748 uint8_t fwk_cacMode)
4749{
4750 CameraMetadata camMetadata;
4751 camera_metadata_t *resultMetadata;
4752
4753 if (jpegMetadata.entryCount())
4754 camMetadata.append(jpegMetadata);
4755
4756 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4757 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4758 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4759 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4760
4761 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4762 int64_t fwk_frame_number = *frame_number;
4763 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4764 }
4765
4766 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4767 int32_t fps_range[2];
4768 fps_range[0] = (int32_t)float_range->min_fps;
4769 fps_range[1] = (int32_t)float_range->max_fps;
4770 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4771 fps_range, 2);
4772 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4773 fps_range[0], fps_range[1]);
4774 }
4775
4776 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4777 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4778 }
4779
4780 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4781 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4782 METADATA_MAP_SIZE(SCENE_MODES_MAP),
4783 *sceneMode);
4784 if (NAME_NOT_FOUND != val) {
4785 uint8_t fwkSceneMode = (uint8_t)val;
4786 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4787 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4788 fwkSceneMode);
4789 }
4790 }
4791
4792 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4793 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4794 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4795 }
4796
4797 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4798 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4799 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4800 }
4801
4802 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4803 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4804 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4805 }
4806
4807 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4808 CAM_INTF_META_EDGE_MODE, metadata) {
4809 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4810 }
4811
4812 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4813 uint8_t fwk_flashPower = (uint8_t) *flashPower;
4814 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4815 }
4816
4817 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4818 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4819 }
4820
4821 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4822 if (0 <= *flashState) {
4823 uint8_t fwk_flashState = (uint8_t) *flashState;
4824 if (!gCamCapability[mCameraId]->flash_available) {
4825 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4826 }
4827 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4828 }
4829 }
4830
4831 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4832 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4833 if (NAME_NOT_FOUND != val) {
4834 uint8_t fwk_flashMode = (uint8_t)val;
4835 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4836 }
4837 }
4838
4839 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4840 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4841 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4842 }
4843
4844 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4845 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4846 }
4847
4848 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4849 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4850 }
4851
4852 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4853 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4854 }
4855
4856 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4857 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4858 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4859 }
4860
4861 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4862 uint8_t fwk_videoStab = (uint8_t) *videoStab;
4863 LOGD("fwk_videoStab = %d", fwk_videoStab);
4864 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4865 } else {
4866 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4867 // and so hardcoding the Video Stab result to OFF mode.
4868 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4869 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
4870 LOGD("%s: EIS result default to OFF mode", __func__);
4871 }
4872
4873 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4874 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4875 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4876 }
4877
4878 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4879 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4880 }
4881
4882 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4883 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4884
4885 LOGD("dynamicblackLevel = %f %f %f %f",
4886 blackLevelSourcePattern->cam_black_level[0],
4887 blackLevelSourcePattern->cam_black_level[1],
4888 blackLevelSourcePattern->cam_black_level[2],
4889 blackLevelSourcePattern->cam_black_level[3]);
4890 }
4891
4892 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4893 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4894 float fwk_blackLevelInd[4];
4895
4896 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4897 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4898 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4899 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4900
4901 LOGD("applied dynamicblackLevel = %f %f %f %f",
4902 blackLevelAppliedPattern->cam_black_level[0],
4903 blackLevelAppliedPattern->cam_black_level[1],
4904 blackLevelAppliedPattern->cam_black_level[2],
4905 blackLevelAppliedPattern->cam_black_level[3]);
4906 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4907 camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4908 }
4909
4910
4911 if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4912 gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4913 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4914 for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4915 opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4916 }
4917 camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4918 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4919 }
4920
4921 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4922 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4923 int32_t scalerCropRegion[4];
4924 scalerCropRegion[0] = hScalerCropRegion->left;
4925 scalerCropRegion[1] = hScalerCropRegion->top;
4926 scalerCropRegion[2] = hScalerCropRegion->width;
4927 scalerCropRegion[3] = hScalerCropRegion->height;
4928
4929 // Adjust crop region from sensor output coordinate system to active
4930 // array coordinate system.
4931 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4932 scalerCropRegion[2], scalerCropRegion[3]);
4933
4934 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4935 }
4936
4937 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4938 LOGD("sensorExpTime = %lld", *sensorExpTime);
4939 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4940 }
4941
4942 IF_META_AVAILABLE(int64_t, sensorFameDuration,
4943 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4944 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
4945 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4946 }
4947
4948 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4949 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4950 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
4951 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4952 sensorRollingShutterSkew, 1);
4953 }
4954
4955 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4956 LOGD("sensorSensitivity = %d", *sensorSensitivity);
4957 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4958
4959 //calculate the noise profile based on sensitivity
4960 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4961 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4962 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4963 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4964 noise_profile[i] = noise_profile_S;
4965 noise_profile[i+1] = noise_profile_O;
4966 }
4967 LOGD("noise model entry (S, O) is (%f, %f)",
4968 noise_profile_S, noise_profile_O);
4969 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4970 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4971 }
4972
4973 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4974 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4975 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4976 }
4977
4978 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4979 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4980 *faceDetectMode);
4981 if (NAME_NOT_FOUND != val) {
4982 uint8_t fwk_faceDetectMode = (uint8_t)val;
4983 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4984
4985 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4986 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4987 CAM_INTF_META_FACE_DETECTION, metadata) {
4988 uint8_t numFaces = MIN(
4989 faceDetectionInfo->num_faces_detected, MAX_ROI);
4990 int32_t faceIds[MAX_ROI];
4991 uint8_t faceScores[MAX_ROI];
4992 int32_t faceRectangles[MAX_ROI * 4];
4993 int32_t faceLandmarks[MAX_ROI * 6];
4994 size_t j = 0, k = 0;
4995
4996 for (size_t i = 0; i < numFaces; i++) {
4997 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4998 // Adjust crop region from sensor output coordinate system to active
4999 // array coordinate system.
5000 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5001 mCropRegionMapper.toActiveArray(rect.left, rect.top,
5002 rect.width, rect.height);
5003
5004 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5005 faceRectangles+j, -1);
5006
5007 j+= 4;
5008 }
5009 if (numFaces <= 0) {
5010 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5011 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5012 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5013 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5014 }
5015
5016 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5017 numFaces);
5018 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5019 faceRectangles, numFaces * 4U);
5020 if (fwk_faceDetectMode ==
5021 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5022 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5023 CAM_INTF_META_FACE_LANDMARK, metadata) {
5024
5025 for (size_t i = 0; i < numFaces; i++) {
5026 // Map the co-ordinate sensor output coordinate system to active
5027 // array coordinate system.
5028 mCropRegionMapper.toActiveArray(
5029 landmarks->face_landmarks[i].left_eye_center.x,
5030 landmarks->face_landmarks[i].left_eye_center.y);
5031 mCropRegionMapper.toActiveArray(
5032 landmarks->face_landmarks[i].right_eye_center.x,
5033 landmarks->face_landmarks[i].right_eye_center.y);
5034 mCropRegionMapper.toActiveArray(
5035 landmarks->face_landmarks[i].mouth_center.x,
5036 landmarks->face_landmarks[i].mouth_center.y);
5037
5038 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5039 k+= 6;
5040 }
5041 }
5042
5043 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5044 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5045 faceLandmarks, numFaces * 6U);
5046 }
5047 }
5048 }
5049 }
5050 }
5051
5052 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5053 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5054 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5055 }
5056
5057 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5058 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5059 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5060 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5061 }
5062
5063 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5064 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5065 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5066 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5067 }
5068
5069 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5070 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5071 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5072 CAM_MAX_SHADING_MAP_HEIGHT);
5073 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5074 CAM_MAX_SHADING_MAP_WIDTH);
5075 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5076 lensShadingMap->lens_shading, 4U * map_width * map_height);
5077 }
5078
5079 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5080 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5081 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5082 }
5083
5084 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5085 //Populate CAM_INTF_META_TONEMAP_CURVES
5086 /* ch0 = G, ch 1 = B, ch 2 = R*/
5087 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5088 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5089 tonemap->tonemap_points_cnt,
5090 CAM_MAX_TONEMAP_CURVE_SIZE);
5091 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5092 }
5093
5094 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5095 &tonemap->curves[0].tonemap_points[0][0],
5096 tonemap->tonemap_points_cnt * 2);
5097
5098 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5099 &tonemap->curves[1].tonemap_points[0][0],
5100 tonemap->tonemap_points_cnt * 2);
5101
5102 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5103 &tonemap->curves[2].tonemap_points[0][0],
5104 tonemap->tonemap_points_cnt * 2);
5105 }
5106
5107 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5108 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5109 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5110 CC_GAIN_MAX);
5111 }
5112
5113 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5114 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5115 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5116 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5117 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5118 }
5119
5120 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5121 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5122 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5123 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5124 toneCurve->tonemap_points_cnt,
5125 CAM_MAX_TONEMAP_CURVE_SIZE);
5126 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5127 }
5128 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5129 (float*)toneCurve->curve.tonemap_points,
5130 toneCurve->tonemap_points_cnt * 2);
5131 }
5132
5133 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5134 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5135 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5136 predColorCorrectionGains->gains, 4);
5137 }
5138
5139 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5140 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5141 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5142 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5143 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5144 }
5145
5146 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5147 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5148 }
5149
5150 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5151 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5152 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5153 }
5154
5155 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5156 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5157 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5158 }
5159
5160 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5161 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5162 *effectMode);
5163 if (NAME_NOT_FOUND != val) {
5164 uint8_t fwk_effectMode = (uint8_t)val;
5165 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5166 }
5167 }
5168
5169 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5170 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5171 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5172 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5173 if (NAME_NOT_FOUND != fwk_testPatternMode) {
5174 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5175 }
5176 int32_t fwk_testPatternData[4];
5177 fwk_testPatternData[0] = testPatternData->r;
5178 fwk_testPatternData[3] = testPatternData->b;
5179 switch (gCamCapability[mCameraId]->color_arrangement) {
5180 case CAM_FILTER_ARRANGEMENT_RGGB:
5181 case CAM_FILTER_ARRANGEMENT_GRBG:
5182 fwk_testPatternData[1] = testPatternData->gr;
5183 fwk_testPatternData[2] = testPatternData->gb;
5184 break;
5185 case CAM_FILTER_ARRANGEMENT_GBRG:
5186 case CAM_FILTER_ARRANGEMENT_BGGR:
5187 fwk_testPatternData[2] = testPatternData->gr;
5188 fwk_testPatternData[1] = testPatternData->gb;
5189 break;
5190 default:
5191 LOGE("color arrangement %d is not supported",
5192 gCamCapability[mCameraId]->color_arrangement);
5193 break;
5194 }
5195 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5196 }
5197
5198 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5199 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5200 }
5201
5202 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5203 String8 str((const char *)gps_methods);
5204 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5205 }
5206
5207 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5208 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5209 }
5210
5211 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5212 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5213 }
5214
5215 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5216 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5217 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5218 }
5219
5220 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5221 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5222 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5223 }
5224
5225 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5226 int32_t fwk_thumb_size[2];
5227 fwk_thumb_size[0] = thumb_size->width;
5228 fwk_thumb_size[1] = thumb_size->height;
5229 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5230 }
5231
5232 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5233 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5234 privateData,
5235 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5236 }
5237
5238 if (metadata->is_tuning_params_valid) {
5239 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5240 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5241 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5242
5243
5244 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5245 sizeof(uint32_t));
5246 data += sizeof(uint32_t);
5247
5248 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5249 sizeof(uint32_t));
5250 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5251 data += sizeof(uint32_t);
5252
5253 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5254 sizeof(uint32_t));
5255 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5256 data += sizeof(uint32_t);
5257
5258 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5259 sizeof(uint32_t));
5260 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5261 data += sizeof(uint32_t);
5262
5263 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5264 sizeof(uint32_t));
5265 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5266 data += sizeof(uint32_t);
5267
5268 metadata->tuning_params.tuning_mod3_data_size = 0;
5269 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5270 sizeof(uint32_t));
5271 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5272 data += sizeof(uint32_t);
5273
5274 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5275 TUNING_SENSOR_DATA_MAX);
5276 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5277 count);
5278 data += count;
5279
5280 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5281 TUNING_VFE_DATA_MAX);
5282 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5283 count);
5284 data += count;
5285
5286 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5287 TUNING_CPP_DATA_MAX);
5288 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5289 count);
5290 data += count;
5291
5292 count = MIN(metadata->tuning_params.tuning_cac_data_size,
5293 TUNING_CAC_DATA_MAX);
5294 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5295 count);
5296 data += count;
5297
5298 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5299 (int32_t *)(void *)tuning_meta_data_blob,
5300 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5301 }
5302
5303 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5304 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5305 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5306 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5307 NEUTRAL_COL_POINTS);
5308 }
5309
5310 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5311 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5312 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5313 }
5314
5315 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5316 int32_t aeRegions[REGIONS_TUPLE_COUNT];
5317 // Adjust crop region from sensor output coordinate system to active
5318 // array coordinate system.
5319 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5320 hAeRegions->rect.width, hAeRegions->rect.height);
5321
5322 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5323 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5324 REGIONS_TUPLE_COUNT);
5325 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5326 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5327 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5328 hAeRegions->rect.height);
5329 }
5330
5331 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5332 uint8_t fwk_afState = (uint8_t) *afState;
5333 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5334 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5335 }
5336
5337 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5338 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5339 }
5340
5341 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5342 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5343 }
5344
5345 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5346 uint8_t fwk_lensState = *lensState;
5347 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5348 }
5349
5350 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5351 /*af regions*/
5352 int32_t afRegions[REGIONS_TUPLE_COUNT];
5353 // Adjust crop region from sensor output coordinate system to active
5354 // array coordinate system.
5355 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5356 hAfRegions->rect.width, hAfRegions->rect.height);
5357
5358 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5359 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5360 REGIONS_TUPLE_COUNT);
5361 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5362 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5363 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5364 hAfRegions->rect.height);
5365 }
5366
5367 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5368 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5369 *hal_ab_mode);
5370 if (NAME_NOT_FOUND != val) {
5371 uint8_t fwk_ab_mode = (uint8_t)val;
5372 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5373 }
5374 }
5375
5376 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5377 int val = lookupFwkName(SCENE_MODES_MAP,
5378 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5379 if (NAME_NOT_FOUND != val) {
5380 uint8_t fwkBestshotMode = (uint8_t)val;
5381 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5382 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5383 } else {
5384 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5385 }
5386 }
5387
5388 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5389 uint8_t fwk_mode = (uint8_t) *mode;
5390 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5391 }
5392
5393 /* Constant metadata values to be update*/
5394 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5395 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5396
5397 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5398 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5399
5400 int32_t hotPixelMap[2];
5401 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5402
5403 // CDS
5404 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5405 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5406 }
5407
5408 // TNR
5409 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5410 uint8_t tnr_enable = tnr->denoise_enable;
5411 int32_t tnr_process_type = (int32_t)tnr->process_plates;
5412
5413 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5414 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5415 }
5416
5417 // Reprocess crop data
5418 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5419 uint8_t cnt = crop_data->num_of_streams;
5420 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5421 // mm-qcamera-daemon only posts crop_data for streams
5422 // not linked to pproc. So no valid crop metadata is not
5423 // necessarily an error case.
5424 LOGD("No valid crop metadata entries");
5425 } else {
5426 uint32_t reproc_stream_id;
5427 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5428 LOGD("No reprocessible stream found, ignore crop data");
5429 } else {
5430 int rc = NO_ERROR;
5431 Vector<int32_t> roi_map;
5432 int32_t *crop = new int32_t[cnt*4];
5433 if (NULL == crop) {
5434 rc = NO_MEMORY;
5435 }
5436 if (NO_ERROR == rc) {
5437 int32_t streams_found = 0;
5438 for (size_t i = 0; i < cnt; i++) {
5439 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5440 if (pprocDone) {
5441 // HAL already does internal reprocessing,
5442 // either via reprocessing before JPEG encoding,
5443 // or offline postprocessing for pproc bypass case.
5444 crop[0] = 0;
5445 crop[1] = 0;
5446 crop[2] = mInputStreamInfo.dim.width;
5447 crop[3] = mInputStreamInfo.dim.height;
5448 } else {
5449 crop[0] = crop_data->crop_info[i].crop.left;
5450 crop[1] = crop_data->crop_info[i].crop.top;
5451 crop[2] = crop_data->crop_info[i].crop.width;
5452 crop[3] = crop_data->crop_info[i].crop.height;
5453 }
5454 roi_map.add(crop_data->crop_info[i].roi_map.left);
5455 roi_map.add(crop_data->crop_info[i].roi_map.top);
5456 roi_map.add(crop_data->crop_info[i].roi_map.width);
5457 roi_map.add(crop_data->crop_info[i].roi_map.height);
5458 streams_found++;
5459 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5460 crop[0], crop[1], crop[2], crop[3]);
5461 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5462 crop_data->crop_info[i].roi_map.left,
5463 crop_data->crop_info[i].roi_map.top,
5464 crop_data->crop_info[i].roi_map.width,
5465 crop_data->crop_info[i].roi_map.height);
5466 break;
5467
5468 }
5469 }
5470 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5471 &streams_found, 1);
5472 camMetadata.update(QCAMERA3_CROP_REPROCESS,
5473 crop, (size_t)(streams_found * 4));
5474 if (roi_map.array()) {
5475 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5476 roi_map.array(), roi_map.size());
5477 }
5478 }
5479 if (crop) {
5480 delete [] crop;
5481 }
5482 }
5483 }
5484 }
5485
5486 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5487 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5488 // so hardcoding the CAC result to OFF mode.
5489 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5490 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5491 } else {
5492 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5493 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5494 *cacMode);
5495 if (NAME_NOT_FOUND != val) {
5496 uint8_t resultCacMode = (uint8_t)val;
5497 // check whether CAC result from CB is equal to Framework set CAC mode
5498 // If not equal then set the CAC mode came in corresponding request
5499 if (fwk_cacMode != resultCacMode) {
5500 resultCacMode = fwk_cacMode;
5501 }
5502 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5503 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5504 } else {
5505 LOGE("Invalid CAC camera parameter: %d", *cacMode);
5506 }
5507 }
5508 }
5509
5510 // Post blob of cam_cds_data through vendor tag.
5511 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5512 uint8_t cnt = cdsInfo->num_of_streams;
5513 cam_cds_data_t cdsDataOverride;
5514 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5515 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5516 cdsDataOverride.num_of_streams = 1;
5517 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5518 uint32_t reproc_stream_id;
5519 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5520 LOGD("No reprocessible stream found, ignore cds data");
5521 } else {
5522 for (size_t i = 0; i < cnt; i++) {
5523 if (cdsInfo->cds_info[i].stream_id ==
5524 reproc_stream_id) {
5525 cdsDataOverride.cds_info[0].cds_enable =
5526 cdsInfo->cds_info[i].cds_enable;
5527 break;
5528 }
5529 }
5530 }
5531 } else {
5532 LOGD("Invalid stream count %d in CDS_DATA", cnt);
5533 }
5534 camMetadata.update(QCAMERA3_CDS_INFO,
5535 (uint8_t *)&cdsDataOverride,
5536 sizeof(cam_cds_data_t));
5537 }
5538
5539 // Ldaf calibration data
5540 if (!mLdafCalibExist) {
5541 IF_META_AVAILABLE(uint32_t, ldafCalib,
5542 CAM_INTF_META_LDAF_EXIF, metadata) {
5543 mLdafCalibExist = true;
5544 mLdafCalib[0] = ldafCalib[0];
5545 mLdafCalib[1] = ldafCalib[1];
5546 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5547 ldafCalib[0], ldafCalib[1]);
5548 }
5549 }
5550
5551 // DDM debug data through vendor tag
5552 cam_ddm_info_t ddm_info;
5553 memset(&ddm_info, 0, sizeof(cam_ddm_info_t));
5554 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
5555 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
5556 memcpy(&(ddm_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
5557 }
5558 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
5559 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
5560 memcpy(&(ddm_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
5561 }
5562 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
5563 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
5564 memcpy(&(ddm_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
5565 }
5566 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
5567 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
5568 memcpy(&(ddm_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
5569 }
5570 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
5571 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
5572 memcpy(&(ddm_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
5573 }
5574 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
5575 memcpy(&(ddm_info.pipeline_flip), flip, sizeof(int32_t));
5576 }
5577 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
5578 CAM_INTF_PARM_ROTATION, metadata) {
5579 memcpy(&(ddm_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
5580 }
5581 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB,
5582 (uint8_t *)&ddm_info, sizeof(cam_ddm_info_t));
5583
5584 resultMetadata = camMetadata.release();
5585 return resultMetadata;
5586}
5587
5588/*===========================================================================
5589 * FUNCTION : saveExifParams
5590 *
5591 * DESCRIPTION:
5592 *
5593 * PARAMETERS :
5594 * @metadata : metadata information from callback
5595 *
5596 * RETURN : none
5597 *
5598 *==========================================================================*/
5599void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5600{
5601 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5602 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5603 if (mExifParams.debug_params) {
5604 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5605 mExifParams.debug_params->ae_debug_params_valid = TRUE;
5606 }
5607 }
5608 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5609 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5610 if (mExifParams.debug_params) {
5611 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5612 mExifParams.debug_params->awb_debug_params_valid = TRUE;
5613 }
5614 }
5615 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5616 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5617 if (mExifParams.debug_params) {
5618 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5619 mExifParams.debug_params->af_debug_params_valid = TRUE;
5620 }
5621 }
5622 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5623 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5624 if (mExifParams.debug_params) {
5625 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5626 mExifParams.debug_params->asd_debug_params_valid = TRUE;
5627 }
5628 }
5629 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5630 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5631 if (mExifParams.debug_params) {
5632 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5633 mExifParams.debug_params->stats_debug_params_valid = TRUE;
5634 }
5635 }
5636 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
5637 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
5638 if (mExifParams.debug_params) {
5639 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
5640 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
5641 }
5642 }
5643 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
5644 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
5645 if (mExifParams.debug_params) {
5646 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
5647 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
5648 }
5649 }
5650 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
5651 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
5652 if (mExifParams.debug_params) {
5653 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
5654 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
5655 }
5656 }
5657}
5658
5659/*===========================================================================
5660 * FUNCTION : get3AExifParams
5661 *
5662 * DESCRIPTION:
5663 *
5664 * PARAMETERS : none
5665 *
5666 *
5667 * RETURN : mm_jpeg_exif_params_t
5668 *
5669 *==========================================================================*/
5670mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5671{
5672 return mExifParams;
5673}
5674
5675/*===========================================================================
5676 * FUNCTION : translateCbUrgentMetadataToResultMetadata
5677 *
5678 * DESCRIPTION:
5679 *
5680 * PARAMETERS :
5681 * @metadata : metadata information from callback
5682 *
5683 * RETURN : camera_metadata_t*
5684 * metadata in a format specified by fwk
5685 *==========================================================================*/
5686camera_metadata_t*
5687QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5688 (metadata_buffer_t *metadata)
5689{
5690 CameraMetadata camMetadata;
5691 camera_metadata_t *resultMetadata;
5692
5693
5694 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5695 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5696 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5697 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5698 }
5699
5700 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5701 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5702 &aecTrigger->trigger, 1);
5703 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5704 &aecTrigger->trigger_id, 1);
5705 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5706 aecTrigger->trigger);
5707 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5708 aecTrigger->trigger_id);
5709 }
5710
5711 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5712 uint8_t fwk_ae_state = (uint8_t) *ae_state;
5713 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5714 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5715 }
5716
5717 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5718 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5719 if (NAME_NOT_FOUND != val) {
5720 uint8_t fwkAfMode = (uint8_t)val;
5721 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5722 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5723 } else {
5724 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5725 val);
5726 }
5727 }
5728
5729 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5730 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5731 &af_trigger->trigger, 1);
5732 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5733 af_trigger->trigger);
5734 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5735 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5736 af_trigger->trigger_id);
5737 }
5738
5739 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5740 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5741 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5742 if (NAME_NOT_FOUND != val) {
5743 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5744 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5745 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5746 } else {
5747 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5748 }
5749 }
5750
5751 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5752 uint32_t aeMode = CAM_AE_MODE_MAX;
5753 int32_t flashMode = CAM_FLASH_MODE_MAX;
5754 int32_t redeye = -1;
5755 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5756 aeMode = *pAeMode;
5757 }
5758 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5759 flashMode = *pFlashMode;
5760 }
5761 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5762 redeye = *pRedeye;
5763 }
5764
5765 if (1 == redeye) {
5766 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5767 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5768 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5769 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5770 flashMode);
5771 if (NAME_NOT_FOUND != val) {
5772 fwk_aeMode = (uint8_t)val;
5773 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5774 } else {
5775 LOGE("Unsupported flash mode %d", flashMode);
5776 }
5777 } else if (aeMode == CAM_AE_MODE_ON) {
5778 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5779 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5780 } else if (aeMode == CAM_AE_MODE_OFF) {
5781 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5782 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5783 } else {
5784 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5785 "flashMode:%d, aeMode:%u!!!",
5786 redeye, flashMode, aeMode);
5787 }
5788
5789 resultMetadata = camMetadata.release();
5790 return resultMetadata;
5791}
5792
5793/*===========================================================================
5794 * FUNCTION : dumpMetadataToFile
5795 *
5796 * DESCRIPTION: Dumps tuning metadata to file system
5797 *
5798 * PARAMETERS :
5799 * @meta : tuning metadata
5800 * @dumpFrameCount : current dump frame count
5801 * @enabled : Enable mask
5802 *
5803 *==========================================================================*/
5804void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5805 uint32_t &dumpFrameCount,
5806 bool enabled,
5807 const char *type,
5808 uint32_t frameNumber)
5809{
5810 //Some sanity checks
5811 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5812 LOGE("Tuning sensor data size bigger than expected %d: %d",
5813 meta.tuning_sensor_data_size,
5814 TUNING_SENSOR_DATA_MAX);
5815 return;
5816 }
5817
5818 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5819 LOGE("Tuning VFE data size bigger than expected %d: %d",
5820 meta.tuning_vfe_data_size,
5821 TUNING_VFE_DATA_MAX);
5822 return;
5823 }
5824
5825 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5826 LOGE("Tuning CPP data size bigger than expected %d: %d",
5827 meta.tuning_cpp_data_size,
5828 TUNING_CPP_DATA_MAX);
5829 return;
5830 }
5831
5832 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5833 LOGE("Tuning CAC data size bigger than expected %d: %d",
5834 meta.tuning_cac_data_size,
5835 TUNING_CAC_DATA_MAX);
5836 return;
5837 }
5838 //
5839
5840 if(enabled){
5841 char timeBuf[FILENAME_MAX];
5842 char buf[FILENAME_MAX];
5843 memset(buf, 0, sizeof(buf));
5844 memset(timeBuf, 0, sizeof(timeBuf));
5845 time_t current_time;
5846 struct tm * timeinfo;
5847 time (&current_time);
5848 timeinfo = localtime (&current_time);
5849 if (timeinfo != NULL) {
5850 strftime (timeBuf, sizeof(timeBuf),
5851 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5852 }
5853 String8 filePath(timeBuf);
5854 snprintf(buf,
5855 sizeof(buf),
5856 "%dm_%s_%d.bin",
5857 dumpFrameCount,
5858 type,
5859 frameNumber);
5860 filePath.append(buf);
5861 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5862 if (file_fd >= 0) {
5863 ssize_t written_len = 0;
5864 meta.tuning_data_version = TUNING_DATA_VERSION;
5865 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5866 written_len += write(file_fd, data, sizeof(uint32_t));
5867 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5868 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5869 written_len += write(file_fd, data, sizeof(uint32_t));
5870 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5871 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5872 written_len += write(file_fd, data, sizeof(uint32_t));
5873 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5874 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5875 written_len += write(file_fd, data, sizeof(uint32_t));
5876 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5877 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5878 written_len += write(file_fd, data, sizeof(uint32_t));
5879 meta.tuning_mod3_data_size = 0;
5880 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5881 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5882 written_len += write(file_fd, data, sizeof(uint32_t));
5883 size_t total_size = meta.tuning_sensor_data_size;
5884 data = (void *)((uint8_t *)&meta.data);
5885 written_len += write(file_fd, data, total_size);
5886 total_size = meta.tuning_vfe_data_size;
5887 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5888 written_len += write(file_fd, data, total_size);
5889 total_size = meta.tuning_cpp_data_size;
5890 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5891 written_len += write(file_fd, data, total_size);
5892 total_size = meta.tuning_cac_data_size;
5893 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5894 written_len += write(file_fd, data, total_size);
5895 close(file_fd);
5896 }else {
5897 LOGE("fail to open file for metadata dumping");
5898 }
5899 }
5900}
5901
5902/*===========================================================================
5903 * FUNCTION : cleanAndSortStreamInfo
5904 *
5905 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5906 * and sort them such that raw stream is at the end of the list
5907 * This is a workaround for camera daemon constraint.
5908 *
5909 * PARAMETERS : None
5910 *
5911 *==========================================================================*/
5912void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5913{
5914 List<stream_info_t *> newStreamInfo;
5915
5916 /*clean up invalid streams*/
5917 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5918 it != mStreamInfo.end();) {
5919 if(((*it)->status) == INVALID){
5920 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5921 delete channel;
5922 free(*it);
5923 it = mStreamInfo.erase(it);
5924 } else {
5925 it++;
5926 }
5927 }
5928
5929 // Move preview/video/callback/snapshot streams into newList
5930 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5931 it != mStreamInfo.end();) {
5932 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5933 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5934 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5935 newStreamInfo.push_back(*it);
5936 it = mStreamInfo.erase(it);
5937 } else
5938 it++;
5939 }
5940 // Move raw streams into newList
5941 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5942 it != mStreamInfo.end();) {
5943 newStreamInfo.push_back(*it);
5944 it = mStreamInfo.erase(it);
5945 }
5946
5947 mStreamInfo = newStreamInfo;
5948}
5949
5950/*===========================================================================
5951 * FUNCTION : extractJpegMetadata
5952 *
5953 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5954 * JPEG metadata is cached in HAL, and return as part of capture
5955 * result when metadata is returned from camera daemon.
5956 *
5957 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5958 * @request: capture request
5959 *
5960 *==========================================================================*/
5961void QCamera3HardwareInterface::extractJpegMetadata(
5962 CameraMetadata& jpegMetadata,
5963 const camera3_capture_request_t *request)
5964{
5965 CameraMetadata frame_settings;
5966 frame_settings = request->settings;
5967
5968 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5969 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5970 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5971 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5972
5973 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5974 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5975 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5976 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5977
5978 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5979 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5980 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5981 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5982
5983 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5984 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5985 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5986 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5987
5988 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5989 jpegMetadata.update(ANDROID_JPEG_QUALITY,
5990 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5991 frame_settings.find(ANDROID_JPEG_QUALITY).count);
5992
5993 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5994 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5995 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5996 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5997
5998 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5999 int32_t thumbnail_size[2];
6000 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6001 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6002 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6003 int32_t orientation =
6004 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6005 if ((orientation == 90) || (orientation == 270)) {
6006 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6007 int32_t temp;
6008 temp = thumbnail_size[0];
6009 thumbnail_size[0] = thumbnail_size[1];
6010 thumbnail_size[1] = temp;
6011 }
6012 }
6013 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6014 thumbnail_size,
6015 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6016 }
6017
6018}
6019
6020/*===========================================================================
6021 * FUNCTION : convertToRegions
6022 *
6023 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6024 *
6025 * PARAMETERS :
6026 * @rect : cam_rect_t struct to convert
6027 * @region : int32_t destination array
6028 * @weight : if we are converting from cam_area_t, weight is valid
6029 * else weight = -1
6030 *
6031 *==========================================================================*/
6032void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6033 int32_t *region, int weight)
6034{
6035 region[0] = rect.left;
6036 region[1] = rect.top;
6037 region[2] = rect.left + rect.width;
6038 region[3] = rect.top + rect.height;
6039 if (weight > -1) {
6040 region[4] = weight;
6041 }
6042}
6043
6044/*===========================================================================
6045 * FUNCTION : convertFromRegions
6046 *
6047 * DESCRIPTION: helper method to convert from array to cam_rect_t
6048 *
6049 * PARAMETERS :
6050 * @rect : cam_rect_t struct to convert
6051 * @region : int32_t destination array
6052 * @weight : if we are converting from cam_area_t, weight is valid
6053 * else weight = -1
6054 *
6055 *==========================================================================*/
6056void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6057 const camera_metadata_t *settings, uint32_t tag)
6058{
6059 CameraMetadata frame_settings;
6060 frame_settings = settings;
6061 int32_t x_min = frame_settings.find(tag).data.i32[0];
6062 int32_t y_min = frame_settings.find(tag).data.i32[1];
6063 int32_t x_max = frame_settings.find(tag).data.i32[2];
6064 int32_t y_max = frame_settings.find(tag).data.i32[3];
6065 roi.weight = frame_settings.find(tag).data.i32[4];
6066 roi.rect.left = x_min;
6067 roi.rect.top = y_min;
6068 roi.rect.width = x_max - x_min;
6069 roi.rect.height = y_max - y_min;
6070}
6071
6072/*===========================================================================
6073 * FUNCTION : resetIfNeededROI
6074 *
6075 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6076 * crop region
6077 *
6078 * PARAMETERS :
6079 * @roi : cam_area_t struct to resize
6080 * @scalerCropRegion : cam_crop_region_t region to compare against
6081 *
6082 *
6083 *==========================================================================*/
6084bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6085 const cam_crop_region_t* scalerCropRegion)
6086{
6087 int32_t roi_x_max = roi->rect.width + roi->rect.left;
6088 int32_t roi_y_max = roi->rect.height + roi->rect.top;
6089 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6090 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6091
6092 /* According to spec weight = 0 is used to indicate roi needs to be disabled
6093 * without having this check the calculations below to validate if the roi
6094 * is inside scalar crop region will fail resulting in the roi not being
6095 * reset causing algorithm to continue to use stale roi window
6096 */
6097 if (roi->weight == 0) {
6098 return true;
6099 }
6100
6101 if ((roi_x_max < scalerCropRegion->left) ||
6102 // right edge of roi window is left of scalar crop's left edge
6103 (roi_y_max < scalerCropRegion->top) ||
6104 // bottom edge of roi window is above scalar crop's top edge
6105 (roi->rect.left > crop_x_max) ||
6106 // left edge of roi window is beyond(right) of scalar crop's right edge
6107 (roi->rect.top > crop_y_max)){
6108 // top edge of roi windo is above scalar crop's top edge
6109 return false;
6110 }
6111 if (roi->rect.left < scalerCropRegion->left) {
6112 roi->rect.left = scalerCropRegion->left;
6113 }
6114 if (roi->rect.top < scalerCropRegion->top) {
6115 roi->rect.top = scalerCropRegion->top;
6116 }
6117 if (roi_x_max > crop_x_max) {
6118 roi_x_max = crop_x_max;
6119 }
6120 if (roi_y_max > crop_y_max) {
6121 roi_y_max = crop_y_max;
6122 }
6123 roi->rect.width = roi_x_max - roi->rect.left;
6124 roi->rect.height = roi_y_max - roi->rect.top;
6125 return true;
6126}
6127
6128/*===========================================================================
6129 * FUNCTION : convertLandmarks
6130 *
6131 * DESCRIPTION: helper method to extract the landmarks from face detection info
6132 *
6133 * PARAMETERS :
6134 * @landmark_data : input landmark data to be converted
6135 * @landmarks : int32_t destination array
6136 *
6137 *
6138 *==========================================================================*/
6139void QCamera3HardwareInterface::convertLandmarks(
6140 cam_face_landmarks_info_t landmark_data,
6141 int32_t *landmarks)
6142{
6143 landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6144 landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6145 landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6146 landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6147 landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6148 landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6149}
6150
6151#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6152/*===========================================================================
6153 * FUNCTION : initCapabilities
6154 *
6155 * DESCRIPTION: initialize camera capabilities in static data struct
6156 *
6157 * PARAMETERS :
6158 * @cameraId : camera Id
6159 *
6160 * RETURN : int32_t type of status
6161 * NO_ERROR -- success
6162 * none-zero failure code
6163 *==========================================================================*/
6164int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6165{
6166 int rc = 0;
6167 mm_camera_vtbl_t *cameraHandle = NULL;
6168 QCamera3HeapMemory *capabilityHeap = NULL;
6169
6170 rc = camera_open((uint8_t)cameraId, &cameraHandle);
6171 if (rc) {
6172 LOGE("camera_open failed. rc = %d", rc);
6173 goto open_failed;
6174 }
6175 if (!cameraHandle) {
6176 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6177 goto open_failed;
6178 }
6179
6180 capabilityHeap = new QCamera3HeapMemory(1);
6181 if (capabilityHeap == NULL) {
6182 LOGE("creation of capabilityHeap failed");
6183 goto heap_creation_failed;
6184 }
6185 /* Allocate memory for capability buffer */
6186 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6187 if(rc != OK) {
6188 LOGE("No memory for cappability");
6189 goto allocate_failed;
6190 }
6191
6192 /* Map memory for capability buffer */
6193 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6194 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6195 CAM_MAPPING_BUF_TYPE_CAPABILITY,
6196 capabilityHeap->getFd(0),
6197 sizeof(cam_capability_t),
6198 capabilityHeap->getPtr(0));
6199 if(rc < 0) {
6200 LOGE("failed to map capability buffer");
6201 goto map_failed;
6202 }
6203
6204 /* Query Capability */
6205 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6206 if(rc < 0) {
6207 LOGE("failed to query capability");
6208 goto query_failed;
6209 }
6210 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6211 if (!gCamCapability[cameraId]) {
6212 LOGE("out of memory");
6213 goto query_failed;
6214 }
6215 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6216 sizeof(cam_capability_t));
6217
6218 int index;
6219 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6220 cam_analysis_info_t *p_analysis_info =
6221 &gCamCapability[cameraId]->analysis_info[index];
6222 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6223 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6224 }
6225 rc = 0;
6226
6227query_failed:
6228 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6229 CAM_MAPPING_BUF_TYPE_CAPABILITY);
6230map_failed:
6231 capabilityHeap->deallocate();
6232allocate_failed:
6233 delete capabilityHeap;
6234heap_creation_failed:
6235 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6236 cameraHandle = NULL;
6237open_failed:
6238 return rc;
6239}
6240
6241/*==========================================================================
6242 * FUNCTION : get3Aversion
6243 *
6244 * DESCRIPTION: get the Q3A S/W version
6245 *
6246 * PARAMETERS :
6247 * @sw_version: Reference of Q3A structure which will hold version info upon
6248 * return
6249 *
6250 * RETURN : None
6251 *
6252 *==========================================================================*/
6253void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6254{
6255 if(gCamCapability[mCameraId])
6256 sw_version = gCamCapability[mCameraId]->q3a_version;
6257 else
6258 LOGE("Capability structure NULL!");
6259}
6260
6261
6262/*===========================================================================
6263 * FUNCTION : initParameters
6264 *
6265 * DESCRIPTION: initialize camera parameters
6266 *
6267 * PARAMETERS :
6268 *
6269 * RETURN : int32_t type of status
6270 * NO_ERROR -- success
6271 * none-zero failure code
6272 *==========================================================================*/
6273int QCamera3HardwareInterface::initParameters()
6274{
6275 int rc = 0;
6276
6277 //Allocate Set Param Buffer
6278 mParamHeap = new QCamera3HeapMemory(1);
6279 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6280 if(rc != OK) {
6281 rc = NO_MEMORY;
6282 LOGE("Failed to allocate SETPARM Heap memory");
6283 delete mParamHeap;
6284 mParamHeap = NULL;
6285 return rc;
6286 }
6287
6288 //Map memory for parameters buffer
6289 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6290 CAM_MAPPING_BUF_TYPE_PARM_BUF,
6291 mParamHeap->getFd(0),
6292 sizeof(metadata_buffer_t),
6293 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6294 if(rc < 0) {
6295 LOGE("failed to map SETPARM buffer");
6296 rc = FAILED_TRANSACTION;
6297 mParamHeap->deallocate();
6298 delete mParamHeap;
6299 mParamHeap = NULL;
6300 return rc;
6301 }
6302
6303 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6304
6305 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6306 return rc;
6307}
6308
6309/*===========================================================================
6310 * FUNCTION : deinitParameters
6311 *
6312 * DESCRIPTION: de-initialize camera parameters
6313 *
6314 * PARAMETERS :
6315 *
6316 * RETURN : NONE
6317 *==========================================================================*/
6318void QCamera3HardwareInterface::deinitParameters()
6319{
6320 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6321 CAM_MAPPING_BUF_TYPE_PARM_BUF);
6322
6323 mParamHeap->deallocate();
6324 delete mParamHeap;
6325 mParamHeap = NULL;
6326
6327 mParameters = NULL;
6328
6329 free(mPrevParameters);
6330 mPrevParameters = NULL;
6331}
6332
6333/*===========================================================================
6334 * FUNCTION : calcMaxJpegSize
6335 *
6336 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6337 *
6338 * PARAMETERS :
6339 *
6340 * RETURN : max_jpeg_size
6341 *==========================================================================*/
6342size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6343{
6344 size_t max_jpeg_size = 0;
6345 size_t temp_width, temp_height;
6346 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6347 MAX_SIZES_CNT);
6348 for (size_t i = 0; i < count; i++) {
6349 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6350 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6351 if (temp_width * temp_height > max_jpeg_size ) {
6352 max_jpeg_size = temp_width * temp_height;
6353 }
6354 }
6355 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6356 return max_jpeg_size;
6357}
6358
6359/*===========================================================================
6360 * FUNCTION : getMaxRawSize
6361 *
6362 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6363 *
6364 * PARAMETERS :
6365 *
6366 * RETURN : Largest supported Raw Dimension
6367 *==========================================================================*/
6368cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6369{
6370 int max_width = 0;
6371 cam_dimension_t maxRawSize;
6372
6373 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6374 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6375 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6376 max_width = gCamCapability[camera_id]->raw_dim[i].width;
6377 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6378 }
6379 }
6380 return maxRawSize;
6381}
6382
6383
6384/*===========================================================================
6385 * FUNCTION : calcMaxJpegDim
6386 *
6387 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6388 *
6389 * PARAMETERS :
6390 *
6391 * RETURN : max_jpeg_dim
6392 *==========================================================================*/
6393cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6394{
6395 cam_dimension_t max_jpeg_dim;
6396 cam_dimension_t curr_jpeg_dim;
6397 max_jpeg_dim.width = 0;
6398 max_jpeg_dim.height = 0;
6399 curr_jpeg_dim.width = 0;
6400 curr_jpeg_dim.height = 0;
6401 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6402 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6403 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6404 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6405 max_jpeg_dim.width * max_jpeg_dim.height ) {
6406 max_jpeg_dim.width = curr_jpeg_dim.width;
6407 max_jpeg_dim.height = curr_jpeg_dim.height;
6408 }
6409 }
6410 return max_jpeg_dim;
6411}
6412
6413/*===========================================================================
6414 * FUNCTION : addStreamConfig
6415 *
6416 * DESCRIPTION: adds the stream configuration to the array
6417 *
6418 * PARAMETERS :
6419 * @available_stream_configs : pointer to stream configuration array
6420 * @scalar_format : scalar format
6421 * @dim : configuration dimension
6422 * @config_type : input or output configuration type
6423 *
6424 * RETURN : NONE
6425 *==========================================================================*/
6426void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6427 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6428{
6429 available_stream_configs.add(scalar_format);
6430 available_stream_configs.add(dim.width);
6431 available_stream_configs.add(dim.height);
6432 available_stream_configs.add(config_type);
6433}
6434
6435/*===========================================================================
6436 * FUNCTION : suppportBurstCapture
6437 *
6438 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6439 *
6440 * PARAMETERS :
6441 * @cameraId : camera Id
6442 *
6443 * RETURN : true if camera supports BURST_CAPTURE
6444 * false otherwise
6445 *==========================================================================*/
6446bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6447{
6448 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6449 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6450 const int32_t highResWidth = 3264;
6451 const int32_t highResHeight = 2448;
6452
6453 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6454 // Maximum resolution images cannot be captured at >= 10fps
6455 // -> not supporting BURST_CAPTURE
6456 return false;
6457 }
6458
6459 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6460 // Maximum resolution images can be captured at >= 20fps
6461 // --> supporting BURST_CAPTURE
6462 return true;
6463 }
6464
6465 // Find the smallest highRes resolution, or largest resolution if there is none
6466 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6467 MAX_SIZES_CNT);
6468 size_t highRes = 0;
6469 while ((highRes + 1 < totalCnt) &&
6470 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6471 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6472 highResWidth * highResHeight)) {
6473 highRes++;
6474 }
6475 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6476 return true;
6477 } else {
6478 return false;
6479 }
6480}
6481
6482/*===========================================================================
6483 * FUNCTION : initStaticMetadata
6484 *
6485 * DESCRIPTION: initialize the static metadata
6486 *
6487 * PARAMETERS :
6488 * @cameraId : camera Id
6489 *
6490 * RETURN : int32_t type of status
6491 * 0 -- success
6492 * non-zero failure code
6493 *==========================================================================*/
6494int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6495{
6496 int rc = 0;
6497 CameraMetadata staticInfo;
6498 size_t count = 0;
6499 bool limitedDevice = false;
6500 char prop[PROPERTY_VALUE_MAX];
6501 bool supportBurst = false;
6502
6503 supportBurst = supportBurstCapture(cameraId);
6504
6505 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6506 * guaranteed or if min fps of max resolution is less than 20 fps, its
6507 * advertised as limited device*/
6508 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6509 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6510 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6511 !supportBurst;
6512
6513 uint8_t supportedHwLvl = limitedDevice ?
6514 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6515 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
6516
6517 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6518 &supportedHwLvl, 1);
6519
6520 bool facingBack = false;
6521 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
6522 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
6523 facingBack = true;
6524 }
6525 /*HAL 3 only*/
6526 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6527 &gCamCapability[cameraId]->min_focus_distance, 1);
6528
6529 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6530 &gCamCapability[cameraId]->hyper_focal_distance, 1);
6531
6532 /*should be using focal lengths but sensor doesn't provide that info now*/
6533 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6534 &gCamCapability[cameraId]->focal_length,
6535 1);
6536
6537 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6538 gCamCapability[cameraId]->apertures,
6539 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6540
6541 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6542 gCamCapability[cameraId]->filter_densities,
6543 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6544
6545
6546 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6547 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6548 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6549
6550 int32_t lens_shading_map_size[] = {
6551 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6552 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6553 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6554 lens_shading_map_size,
6555 sizeof(lens_shading_map_size)/sizeof(int32_t));
6556
6557 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6558 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6559
6560 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6561 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6562
6563 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6564 &gCamCapability[cameraId]->max_frame_duration, 1);
6565
6566 camera_metadata_rational baseGainFactor = {
6567 gCamCapability[cameraId]->base_gain_factor.numerator,
6568 gCamCapability[cameraId]->base_gain_factor.denominator};
6569 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6570 &baseGainFactor, 1);
6571
6572 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6573 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6574
6575 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6576 gCamCapability[cameraId]->pixel_array_size.height};
6577 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6578 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6579
6580 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6581 gCamCapability[cameraId]->active_array_size.top,
6582 gCamCapability[cameraId]->active_array_size.width,
6583 gCamCapability[cameraId]->active_array_size.height};
6584 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6585 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6586
6587 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6588 &gCamCapability[cameraId]->white_level, 1);
6589
6590 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6591 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6592
6593 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6594 &gCamCapability[cameraId]->flash_charge_duration, 1);
6595
6596 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6597 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6598
6599 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
6600 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6601 &timestampSource, 1);
6602
6603 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6604 &gCamCapability[cameraId]->histogram_size, 1);
6605
6606 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6607 &gCamCapability[cameraId]->max_histogram_count, 1);
6608
6609 int32_t sharpness_map_size[] = {
6610 gCamCapability[cameraId]->sharpness_map_size.width,
6611 gCamCapability[cameraId]->sharpness_map_size.height};
6612
6613 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6614 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6615
6616 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6617 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6618
6619 int32_t scalar_formats[] = {
6620 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6621 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6622 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6623 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6624 HAL_PIXEL_FORMAT_RAW10,
6625 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6626 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6627 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6628 scalar_formats,
6629 scalar_formats_count);
6630
6631 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6632 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6633 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6634 count, MAX_SIZES_CNT, available_processed_sizes);
6635 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6636 available_processed_sizes, count * 2);
6637
6638 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6639 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6640 makeTable(gCamCapability[cameraId]->raw_dim,
6641 count, MAX_SIZES_CNT, available_raw_sizes);
6642 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6643 available_raw_sizes, count * 2);
6644
6645 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6646 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6647 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6648 count, MAX_SIZES_CNT, available_fps_ranges);
6649 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6650 available_fps_ranges, count * 2);
6651
6652 camera_metadata_rational exposureCompensationStep = {
6653 gCamCapability[cameraId]->exp_compensation_step.numerator,
6654 gCamCapability[cameraId]->exp_compensation_step.denominator};
6655 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6656 &exposureCompensationStep, 1);
6657
6658 Vector<uint8_t> availableVstabModes;
6659 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6660 char eis_prop[PROPERTY_VALUE_MAX];
6661 memset(eis_prop, 0, sizeof(eis_prop));
6662 property_get("persist.camera.eis.enable", eis_prop, "0");
6663 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6664 if (facingBack && eis_prop_set) {
6665 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6666 }
6667 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6668 availableVstabModes.array(), availableVstabModes.size());
6669
6670 /*HAL 1 and HAL 3 common*/
6671 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
6672 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
6673 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
6674 float maxZoom = maxZoomStep/minZoomStep;
6675 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6676 &maxZoom, 1);
6677
6678 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6679 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6680
6681 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6682 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6683 max3aRegions[2] = 0; /* AF not supported */
6684 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6685 max3aRegions, 3);
6686
6687 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6688 memset(prop, 0, sizeof(prop));
6689 property_get("persist.camera.facedetect", prop, "1");
6690 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6691 LOGD("Support face detection mode: %d",
6692 supportedFaceDetectMode);
6693
6694 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6695 Vector<uint8_t> availableFaceDetectModes;
6696 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6697 if (supportedFaceDetectMode == 1) {
6698 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6699 } else if (supportedFaceDetectMode == 2) {
6700 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6701 } else if (supportedFaceDetectMode == 3) {
6702 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6703 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6704 } else {
6705 maxFaces = 0;
6706 }
6707 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6708 availableFaceDetectModes.array(),
6709 availableFaceDetectModes.size());
6710 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6711 (int32_t *)&maxFaces, 1);
6712
6713 int32_t exposureCompensationRange[] = {
6714 gCamCapability[cameraId]->exposure_compensation_min,
6715 gCamCapability[cameraId]->exposure_compensation_max};
6716 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6717 exposureCompensationRange,
6718 sizeof(exposureCompensationRange)/sizeof(int32_t));
6719
6720 uint8_t lensFacing = (facingBack) ?
6721 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6722 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6723
6724 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6725 available_thumbnail_sizes,
6726 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6727
6728 /*all sizes will be clubbed into this tag*/
6729 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6730 /*android.scaler.availableStreamConfigurations*/
6731 Vector<int32_t> available_stream_configs;
6732 cam_dimension_t active_array_dim;
6733 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6734 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6735 /* Add input/output stream configurations for each scalar formats*/
6736 for (size_t j = 0; j < scalar_formats_count; j++) {
6737 switch (scalar_formats[j]) {
6738 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6739 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6740 case HAL_PIXEL_FORMAT_RAW10:
6741 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6742 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6743 addStreamConfig(available_stream_configs, scalar_formats[j],
6744 gCamCapability[cameraId]->raw_dim[i],
6745 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6746 }
6747 break;
6748 case HAL_PIXEL_FORMAT_BLOB:
6749 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6750 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6751 addStreamConfig(available_stream_configs, scalar_formats[j],
6752 gCamCapability[cameraId]->picture_sizes_tbl[i],
6753 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6754 }
6755 break;
6756 case HAL_PIXEL_FORMAT_YCbCr_420_888:
6757 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6758 default:
6759 cam_dimension_t largest_picture_size;
6760 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6761 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6762 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6763 addStreamConfig(available_stream_configs, scalar_formats[j],
6764 gCamCapability[cameraId]->picture_sizes_tbl[i],
6765 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6766 /* Book keep largest */
6767 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6768 >= largest_picture_size.width &&
6769 gCamCapability[cameraId]->picture_sizes_tbl[i].height
6770 >= largest_picture_size.height)
6771 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6772 }
6773 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6774 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6775 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6776 addStreamConfig(available_stream_configs, scalar_formats[j],
6777 largest_picture_size,
6778 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6779 }
6780 break;
6781 }
6782 }
6783
6784 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6785 available_stream_configs.array(), available_stream_configs.size());
6786 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6787 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6788
6789 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6790 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6791
6792 /* android.scaler.availableMinFrameDurations */
6793 Vector<int64_t> available_min_durations;
6794 for (size_t j = 0; j < scalar_formats_count; j++) {
6795 switch (scalar_formats[j]) {
6796 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6797 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6798 case HAL_PIXEL_FORMAT_RAW10:
6799 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6800 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6801 available_min_durations.add(scalar_formats[j]);
6802 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
6803 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
6804 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
6805 }
6806 break;
6807 default:
6808 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6809 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6810 available_min_durations.add(scalar_formats[j]);
6811 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
6812 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
6813 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
6814 }
6815 break;
6816 }
6817 }
6818 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6819 available_min_durations.array(), available_min_durations.size());
6820
6821 Vector<int32_t> available_hfr_configs;
6822 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6823 int32_t fps = 0;
6824 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6825 case CAM_HFR_MODE_60FPS:
6826 fps = 60;
6827 break;
6828 case CAM_HFR_MODE_90FPS:
6829 fps = 90;
6830 break;
6831 case CAM_HFR_MODE_120FPS:
6832 fps = 120;
6833 break;
6834 case CAM_HFR_MODE_150FPS:
6835 fps = 150;
6836 break;
6837 case CAM_HFR_MODE_180FPS:
6838 fps = 180;
6839 break;
6840 case CAM_HFR_MODE_210FPS:
6841 fps = 210;
6842 break;
6843 case CAM_HFR_MODE_240FPS:
6844 fps = 240;
6845 break;
6846 case CAM_HFR_MODE_480FPS:
6847 fps = 480;
6848 break;
6849 case CAM_HFR_MODE_OFF:
6850 case CAM_HFR_MODE_MAX:
6851 default:
6852 break;
6853 }
6854
6855 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6856 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6857 /* For each HFR frame rate, need to advertise one variable fps range
6858 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
6859 * and [120, 120]. While camcorder preview alone is running [30, 120] is
6860 * set by the app. When video recording is started, [120, 120] is
6861 * set. This way sensor configuration does not change when recording
6862 * is started */
6863
6864 /* (width, height, fps_min, fps_max, batch_size_max) */
6865 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
6866 j < MAX_SIZES_CNT; j++) {
6867 available_hfr_configs.add(
6868 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
6869 available_hfr_configs.add(
6870 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
6871 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6872 available_hfr_configs.add(fps);
6873 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6874
6875 /* (width, height, fps_min, fps_max, batch_size_max) */
6876 available_hfr_configs.add(
6877 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
6878 available_hfr_configs.add(
6879 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
6880 available_hfr_configs.add(fps);
6881 available_hfr_configs.add(fps);
6882 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6883 }
6884 }
6885 }
6886 //Advertise HFR capability only if the property is set
6887 memset(prop, 0, sizeof(prop));
6888 property_get("persist.camera.hal3hfr.enable", prop, "1");
6889 uint8_t hfrEnable = (uint8_t)atoi(prop);
6890
6891 if(hfrEnable && available_hfr_configs.array()) {
6892 staticInfo.update(
6893 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6894 available_hfr_configs.array(), available_hfr_configs.size());
6895 }
6896
6897 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6898 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6899 &max_jpeg_size, 1);
6900
6901 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6902 size_t size = 0;
6903 count = CAM_EFFECT_MODE_MAX;
6904 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6905 for (size_t i = 0; i < count; i++) {
6906 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6907 gCamCapability[cameraId]->supported_effects[i]);
6908 if (NAME_NOT_FOUND != val) {
6909 avail_effects[size] = (uint8_t)val;
6910 size++;
6911 }
6912 }
6913 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6914 avail_effects,
6915 size);
6916
6917 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6918 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6919 size_t supported_scene_modes_cnt = 0;
6920 count = CAM_SCENE_MODE_MAX;
6921 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6922 for (size_t i = 0; i < count; i++) {
6923 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6924 CAM_SCENE_MODE_OFF) {
6925 int val = lookupFwkName(SCENE_MODES_MAP,
6926 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6927 gCamCapability[cameraId]->supported_scene_modes[i]);
6928 if (NAME_NOT_FOUND != val) {
6929 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6930 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6931 supported_scene_modes_cnt++;
6932 }
6933 }
6934 }
6935 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6936 avail_scene_modes,
6937 supported_scene_modes_cnt);
6938
6939 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
6940 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6941 supported_scene_modes_cnt,
6942 CAM_SCENE_MODE_MAX,
6943 scene_mode_overrides,
6944 supported_indexes,
6945 cameraId);
6946
6947 if (supported_scene_modes_cnt == 0) {
6948 supported_scene_modes_cnt = 1;
6949 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6950 }
6951
6952 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6953 scene_mode_overrides, supported_scene_modes_cnt * 3);
6954
6955 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6956 ANDROID_CONTROL_MODE_AUTO,
6957 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6958 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6959 available_control_modes,
6960 3);
6961
6962 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6963 size = 0;
6964 count = CAM_ANTIBANDING_MODE_MAX;
6965 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6966 for (size_t i = 0; i < count; i++) {
6967 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6968 gCamCapability[cameraId]->supported_antibandings[i]);
6969 if (NAME_NOT_FOUND != val) {
6970 avail_antibanding_modes[size] = (uint8_t)val;
6971 size++;
6972 }
6973
6974 }
6975 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6976 avail_antibanding_modes,
6977 size);
6978
6979 uint8_t avail_abberation_modes[] = {
6980 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
6981 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
6982 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
6983 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6984 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6985 if (0 == count) {
6986 // If no aberration correction modes are available for a device, this advertise OFF mode
6987 size = 1;
6988 } else {
6989 // If count is not zero then atleast one among the FAST or HIGH quality is supported
6990 // So, advertize all 3 modes if atleast any one mode is supported as per the
6991 // new M requirement
6992 size = 3;
6993 }
6994 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6995 avail_abberation_modes,
6996 size);
6997
6998 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6999 size = 0;
7000 count = CAM_FOCUS_MODE_MAX;
7001 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7002 for (size_t i = 0; i < count; i++) {
7003 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7004 gCamCapability[cameraId]->supported_focus_modes[i]);
7005 if (NAME_NOT_FOUND != val) {
7006 avail_af_modes[size] = (uint8_t)val;
7007 size++;
7008 }
7009 }
7010 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7011 avail_af_modes,
7012 size);
7013
7014 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7015 size = 0;
7016 count = CAM_WB_MODE_MAX;
7017 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7018 for (size_t i = 0; i < count; i++) {
7019 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7020 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7021 gCamCapability[cameraId]->supported_white_balances[i]);
7022 if (NAME_NOT_FOUND != val) {
7023 avail_awb_modes[size] = (uint8_t)val;
7024 size++;
7025 }
7026 }
7027 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7028 avail_awb_modes,
7029 size);
7030
7031 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7032 count = CAM_FLASH_FIRING_LEVEL_MAX;
7033 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7034 count);
7035 for (size_t i = 0; i < count; i++) {
7036 available_flash_levels[i] =
7037 gCamCapability[cameraId]->supported_firing_levels[i];
7038 }
7039 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7040 available_flash_levels, count);
7041
7042 uint8_t flashAvailable;
7043 if (gCamCapability[cameraId]->flash_available)
7044 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7045 else
7046 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7047 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7048 &flashAvailable, 1);
7049
7050 Vector<uint8_t> avail_ae_modes;
7051 count = CAM_AE_MODE_MAX;
7052 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7053 for (size_t i = 0; i < count; i++) {
7054 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7055 }
7056 if (flashAvailable) {
7057 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7058 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7059 }
7060 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7061 avail_ae_modes.array(),
7062 avail_ae_modes.size());
7063
7064 int32_t sensitivity_range[2];
7065 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7066 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7067 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7068 sensitivity_range,
7069 sizeof(sensitivity_range) / sizeof(int32_t));
7070
7071 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7072 &gCamCapability[cameraId]->max_analog_sensitivity,
7073 1);
7074
7075 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7076 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7077 &sensor_orientation,
7078 1);
7079
7080 int32_t max_output_streams[] = {
7081 MAX_STALLING_STREAMS,
7082 MAX_PROCESSED_STREAMS,
7083 MAX_RAW_STREAMS};
7084 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7085 max_output_streams,
7086 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7087
7088 uint8_t avail_leds = 0;
7089 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7090 &avail_leds, 0);
7091
7092 uint8_t focus_dist_calibrated;
7093 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7094 gCamCapability[cameraId]->focus_dist_calibrated);
7095 if (NAME_NOT_FOUND != val) {
7096 focus_dist_calibrated = (uint8_t)val;
7097 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7098 &focus_dist_calibrated, 1);
7099 }
7100
7101 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7102 size = 0;
7103 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7104 MAX_TEST_PATTERN_CNT);
7105 for (size_t i = 0; i < count; i++) {
7106 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7107 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7108 if (NAME_NOT_FOUND != testpatternMode) {
7109 avail_testpattern_modes[size] = testpatternMode;
7110 size++;
7111 }
7112 }
7113 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7114 avail_testpattern_modes,
7115 size);
7116
7117 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7118 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7119 &max_pipeline_depth,
7120 1);
7121
7122 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7123 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7124 &partial_result_count,
7125 1);
7126
7127 int32_t max_stall_duration = MAX_REPROCESS_STALL;
7128 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7129
7130 Vector<uint8_t> available_capabilities;
7131 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7132 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7133 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7134 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7135 if (supportBurst) {
7136 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7137 }
7138 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7139 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7140 if (hfrEnable && available_hfr_configs.array()) {
7141 available_capabilities.add(
7142 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7143 }
7144
7145 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7146 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7147 }
7148 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7149 available_capabilities.array(),
7150 available_capabilities.size());
7151
7152 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7153 //Assumption is that all bayer cameras support MANUAL_SENSOR.
7154 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7155 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7156
7157 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7158 &aeLockAvailable, 1);
7159
7160 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7161 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7162 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7163 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7164
7165 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7166 &awbLockAvailable, 1);
7167
7168 int32_t max_input_streams = 1;
7169 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7170 &max_input_streams,
7171 1);
7172
7173 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7174 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7175 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7176 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7177 HAL_PIXEL_FORMAT_YCbCr_420_888};
7178 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7179 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7180
7181 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7182 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7183 &max_latency,
7184 1);
7185
7186 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7187 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7188 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7189 available_hot_pixel_modes,
7190 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7191
7192 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7193 ANDROID_SHADING_MODE_FAST,
7194 ANDROID_SHADING_MODE_HIGH_QUALITY};
7195 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7196 available_shading_modes,
7197 3);
7198
7199 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7200 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7201 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7202 available_lens_shading_map_modes,
7203 2);
7204
7205 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7206 ANDROID_EDGE_MODE_FAST,
7207 ANDROID_EDGE_MODE_HIGH_QUALITY,
7208 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7209 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7210 available_edge_modes,
7211 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7212
7213 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7214 ANDROID_NOISE_REDUCTION_MODE_FAST,
7215 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7216 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7217 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7218 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7219 available_noise_red_modes,
7220 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7221
7222 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7223 ANDROID_TONEMAP_MODE_FAST,
7224 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7225 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7226 available_tonemap_modes,
7227 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7228
7229 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7230 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7231 available_hot_pixel_map_modes,
7232 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7233
7234 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7235 gCamCapability[cameraId]->reference_illuminant1);
7236 if (NAME_NOT_FOUND != val) {
7237 uint8_t fwkReferenceIlluminant = (uint8_t)val;
7238 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7239 }
7240
7241 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7242 gCamCapability[cameraId]->reference_illuminant2);
7243 if (NAME_NOT_FOUND != val) {
7244 uint8_t fwkReferenceIlluminant = (uint8_t)val;
7245 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7246 }
7247
7248 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7249 (void *)gCamCapability[cameraId]->forward_matrix1,
7250 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7251
7252 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7253 (void *)gCamCapability[cameraId]->forward_matrix2,
7254 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7255
7256 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7257 (void *)gCamCapability[cameraId]->color_transform1,
7258 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7259
7260 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7261 (void *)gCamCapability[cameraId]->color_transform2,
7262 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7263
7264 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7265 (void *)gCamCapability[cameraId]->calibration_transform1,
7266 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7267
7268 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7269 (void *)gCamCapability[cameraId]->calibration_transform2,
7270 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7271
7272 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7273 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7274 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7275 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7276 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7277 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7278 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7279 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7280 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7281 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7282 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7283 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7284 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7285 ANDROID_JPEG_GPS_COORDINATES,
7286 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7287 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7288 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7289 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7290 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7291 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7292 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7293 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7294 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7295 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7296 ANDROID_STATISTICS_FACE_DETECT_MODE,
7297 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7298 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7299 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7300 ANDROID_BLACK_LEVEL_LOCK };
7301
7302 size_t request_keys_cnt =
7303 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7304 Vector<int32_t> available_request_keys;
7305 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7306 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7307 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7308 }
7309
7310 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7311 available_request_keys.array(), available_request_keys.size());
7312
7313 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7314 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7315 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7316 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7317 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7318 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7319 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7320 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7321 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7322 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7323 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7324 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7325 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7326 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7327 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7328 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7329 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7330 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7331 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7332 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7333 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7334 ANDROID_STATISTICS_FACE_SCORES};
7335 size_t result_keys_cnt =
7336 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7337
7338 Vector<int32_t> available_result_keys;
7339 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7340 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7341 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7342 }
7343 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7344 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7345 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7346 }
7347 if (supportedFaceDetectMode == 1) {
7348 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7349 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7350 } else if ((supportedFaceDetectMode == 2) ||
7351 (supportedFaceDetectMode == 3)) {
7352 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7353 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7354 }
7355 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7356 available_result_keys.array(), available_result_keys.size());
7357
7358 int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7359 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7360 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7361 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7362 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7363 ANDROID_SCALER_CROPPING_TYPE,
7364 ANDROID_SYNC_MAX_LATENCY,
7365 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7366 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7367 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7368 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7369 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7370 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7371 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7372 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7373 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7374 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7375 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7376 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7377 ANDROID_LENS_FACING,
7378 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7379 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7380 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7381 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7382 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7383 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7384 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7385 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7386 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7387 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7388 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7389 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7390 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7391 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7392 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7393 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7394 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7395 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7396 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7397 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7398 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7399 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7400 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7401 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7402 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7403 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7404 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7405 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7406 ANDROID_TONEMAP_MAX_CURVE_POINTS,
7407 ANDROID_CONTROL_AVAILABLE_MODES,
7408 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7409 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7410 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7411 ANDROID_SHADING_AVAILABLE_MODES,
7412 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
7413 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7414 available_characteristics_keys,
7415 sizeof(available_characteristics_keys)/sizeof(int32_t));
7416
7417 /*available stall durations depend on the hw + sw and will be different for different devices */
7418 /*have to add for raw after implementation*/
7419 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7420 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7421
7422 Vector<int64_t> available_stall_durations;
7423 for (uint32_t j = 0; j < stall_formats_count; j++) {
7424 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7425 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7426 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7427 available_stall_durations.add(stall_formats[j]);
7428 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7429 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7430 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7431 }
7432 } else {
7433 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7434 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7435 available_stall_durations.add(stall_formats[j]);
7436 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7437 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7438 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7439 }
7440 }
7441 }
7442 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7443 available_stall_durations.array(),
7444 available_stall_durations.size());
7445
7446 //QCAMERA3_OPAQUE_RAW
7447 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7448 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7449 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7450 case LEGACY_RAW:
7451 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7452 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7453 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7454 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7455 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7456 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7457 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7458 break;
7459 case MIPI_RAW:
7460 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7461 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7462 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7463 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7464 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7465 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7466 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7467 break;
7468 default:
7469 LOGE("unknown opaque_raw_format %d",
7470 gCamCapability[cameraId]->opaque_raw_fmt);
7471 break;
7472 }
7473 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7474
7475 Vector<int32_t> strides;
7476 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7477 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7478 cam_stream_buf_plane_info_t buf_planes;
7479 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7480 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7481 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7482 &gCamCapability[cameraId]->padding_info, &buf_planes);
7483 strides.add(buf_planes.plane_info.mp[0].stride);
7484 }
7485 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7486 strides.size());
7487
7488 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
7489 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
7490 sizeof(gCamCapability[cameraId]->related_cam_calibration));
7491
7492 uint8_t isMonoOnly =
7493 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
7494 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
7495 &isMonoOnly, 1);
7496
7497 gStaticMetadata[cameraId] = staticInfo.release();
7498 return rc;
7499}
7500
7501/*===========================================================================
7502 * FUNCTION : makeTable
7503 *
7504 * DESCRIPTION: make a table of sizes
7505 *
7506 * PARAMETERS :
7507 *
7508 *
7509 *==========================================================================*/
7510void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7511 size_t max_size, int32_t *sizeTable)
7512{
7513 size_t j = 0;
7514 if (size > max_size) {
7515 size = max_size;
7516 }
7517 for (size_t i = 0; i < size; i++) {
7518 sizeTable[j] = dimTable[i].width;
7519 sizeTable[j+1] = dimTable[i].height;
7520 j+=2;
7521 }
7522}
7523
7524/*===========================================================================
7525 * FUNCTION : makeFPSTable
7526 *
7527 * DESCRIPTION: make a table of fps ranges
7528 *
7529 * PARAMETERS :
7530 *
7531 *==========================================================================*/
7532void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7533 size_t max_size, int32_t *fpsRangesTable)
7534{
7535 size_t j = 0;
7536 if (size > max_size) {
7537 size = max_size;
7538 }
7539 for (size_t i = 0; i < size; i++) {
7540 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7541 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7542 j+=2;
7543 }
7544}
7545
7546/*===========================================================================
7547 * FUNCTION : makeOverridesList
7548 *
7549 * DESCRIPTION: make a list of scene mode overrides
7550 *
7551 * PARAMETERS :
7552 *
7553 *
7554 *==========================================================================*/
7555void QCamera3HardwareInterface::makeOverridesList(
7556 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7557 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7558{
7559 /*daemon will give a list of overrides for all scene modes.
7560 However we should send the fwk only the overrides for the scene modes
7561 supported by the framework*/
7562 size_t j = 0;
7563 if (size > max_size) {
7564 size = max_size;
7565 }
7566 size_t focus_count = CAM_FOCUS_MODE_MAX;
7567 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7568 focus_count);
7569 for (size_t i = 0; i < size; i++) {
7570 bool supt = false;
7571 size_t index = supported_indexes[i];
7572 overridesList[j] = gCamCapability[camera_id]->flash_available ?
7573 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7574 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7575 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7576 overridesTable[index].awb_mode);
7577 if (NAME_NOT_FOUND != val) {
7578 overridesList[j+1] = (uint8_t)val;
7579 }
7580 uint8_t focus_override = overridesTable[index].af_mode;
7581 for (size_t k = 0; k < focus_count; k++) {
7582 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7583 supt = true;
7584 break;
7585 }
7586 }
7587 if (supt) {
7588 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7589 focus_override);
7590 if (NAME_NOT_FOUND != val) {
7591 overridesList[j+2] = (uint8_t)val;
7592 }
7593 } else {
7594 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7595 }
7596 j+=3;
7597 }
7598}
7599
7600/*===========================================================================
7601 * FUNCTION : filterJpegSizes
7602 *
7603 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7604 * could be downscaled to
7605 *
7606 * PARAMETERS :
7607 *
7608 * RETURN : length of jpegSizes array
7609 *==========================================================================*/
7610
7611size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7612 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7613 uint8_t downscale_factor)
7614{
7615 if (0 == downscale_factor) {
7616 downscale_factor = 1;
7617 }
7618
7619 int32_t min_width = active_array_size.width / downscale_factor;
7620 int32_t min_height = active_array_size.height / downscale_factor;
7621 size_t jpegSizesCnt = 0;
7622 if (processedSizesCnt > maxCount) {
7623 processedSizesCnt = maxCount;
7624 }
7625 for (size_t i = 0; i < processedSizesCnt; i+=2) {
7626 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7627 jpegSizes[jpegSizesCnt] = processedSizes[i];
7628 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7629 jpegSizesCnt += 2;
7630 }
7631 }
7632 return jpegSizesCnt;
7633}
7634
7635/*===========================================================================
7636 * FUNCTION : computeNoiseModelEntryS
7637 *
7638 * DESCRIPTION: function to map a given sensitivity to the S noise
7639 * model parameters in the DNG noise model.
7640 *
7641 * PARAMETERS : sens : the sensor sensitivity
7642 *
7643 ** RETURN : S (sensor amplification) noise
7644 *
7645 *==========================================================================*/
7646double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7647 double s = gCamCapability[mCameraId]->gradient_S * sens +
7648 gCamCapability[mCameraId]->offset_S;
7649 return ((s < 0.0) ? 0.0 : s);
7650}
7651
7652/*===========================================================================
7653 * FUNCTION : computeNoiseModelEntryO
7654 *
7655 * DESCRIPTION: function to map a given sensitivity to the O noise
7656 * model parameters in the DNG noise model.
7657 *
7658 * PARAMETERS : sens : the sensor sensitivity
7659 *
7660 ** RETURN : O (sensor readout) noise
7661 *
7662 *==========================================================================*/
7663double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7664 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7665 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7666 1.0 : (1.0 * sens / max_analog_sens);
7667 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7668 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7669 return ((o < 0.0) ? 0.0 : o);
7670}
7671
7672/*===========================================================================
7673 * FUNCTION : getSensorSensitivity
7674 *
7675 * DESCRIPTION: convert iso_mode to an integer value
7676 *
7677 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7678 *
7679 ** RETURN : sensitivity supported by sensor
7680 *
7681 *==========================================================================*/
7682int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7683{
7684 int32_t sensitivity;
7685
7686 switch (iso_mode) {
7687 case CAM_ISO_MODE_100:
7688 sensitivity = 100;
7689 break;
7690 case CAM_ISO_MODE_200:
7691 sensitivity = 200;
7692 break;
7693 case CAM_ISO_MODE_400:
7694 sensitivity = 400;
7695 break;
7696 case CAM_ISO_MODE_800:
7697 sensitivity = 800;
7698 break;
7699 case CAM_ISO_MODE_1600:
7700 sensitivity = 1600;
7701 break;
7702 default:
7703 sensitivity = -1;
7704 break;
7705 }
7706 return sensitivity;
7707}
7708
7709/*===========================================================================
7710 * FUNCTION : getCamInfo
7711 *
7712 * DESCRIPTION: query camera capabilities
7713 *
7714 * PARAMETERS :
7715 * @cameraId : camera Id
7716 * @info : camera info struct to be filled in with camera capabilities
7717 *
7718 * RETURN : int type of status
7719 * NO_ERROR -- success
7720 * none-zero failure code
7721 *==========================================================================*/
7722int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7723 struct camera_info *info)
7724{
7725 ATRACE_CALL();
7726 int rc = 0;
7727
7728 pthread_mutex_lock(&gCamLock);
7729 if (NULL == gCamCapability[cameraId]) {
7730 rc = initCapabilities(cameraId);
7731 if (rc < 0) {
7732 pthread_mutex_unlock(&gCamLock);
7733 return rc;
7734 }
7735 }
7736
7737 if (NULL == gStaticMetadata[cameraId]) {
7738 rc = initStaticMetadata(cameraId);
7739 if (rc < 0) {
7740 pthread_mutex_unlock(&gCamLock);
7741 return rc;
7742 }
7743 }
7744
7745 switch(gCamCapability[cameraId]->position) {
7746 case CAM_POSITION_BACK:
7747 case CAM_POSITION_BACK_AUX:
7748 info->facing = CAMERA_FACING_BACK;
7749 break;
7750
7751 case CAM_POSITION_FRONT:
7752 case CAM_POSITION_FRONT_AUX:
7753 info->facing = CAMERA_FACING_FRONT;
7754 break;
7755
7756 default:
7757 LOGE("Unknown position type %d for camera id:%d",
7758 gCamCapability[cameraId]->position, cameraId);
7759 rc = -1;
7760 break;
7761 }
7762
7763
7764 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7765 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7766 info->static_camera_characteristics = gStaticMetadata[cameraId];
7767
7768 //For now assume both cameras can operate independently.
7769 info->conflicting_devices = NULL;
7770 info->conflicting_devices_length = 0;
7771
7772 //resource cost is 100 * MIN(1.0, m/M),
7773 //where m is throughput requirement with maximum stream configuration
7774 //and M is CPP maximum throughput.
7775 float max_fps = 0.0;
7776 for (uint32_t i = 0;
7777 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7778 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7779 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7780 }
7781 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7782 gCamCapability[cameraId]->active_array_size.width *
7783 gCamCapability[cameraId]->active_array_size.height * max_fps /
7784 gCamCapability[cameraId]->max_pixel_bandwidth;
7785 info->resource_cost = 100 * MIN(1.0, ratio);
7786 LOGI("camera %d resource cost is %d", cameraId,
7787 info->resource_cost);
7788
7789 pthread_mutex_unlock(&gCamLock);
7790 return rc;
7791}
7792
7793/*===========================================================================
7794 * FUNCTION : translateCapabilityToMetadata
7795 *
7796 * DESCRIPTION: translate the capability into camera_metadata_t
7797 *
7798 * PARAMETERS : type of the request
7799 *
7800 *
7801 * RETURN : success: camera_metadata_t*
7802 * failure: NULL
7803 *
7804 *==========================================================================*/
7805camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7806{
7807 if (mDefaultMetadata[type] != NULL) {
7808 return mDefaultMetadata[type];
7809 }
7810 //first time we are handling this request
7811 //fill up the metadata structure using the wrapper class
7812 CameraMetadata settings;
7813 //translate from cam_capability_t to camera_metadata_tag_t
7814 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7815 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7816 int32_t defaultRequestID = 0;
7817 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7818
7819 /* OIS disable */
7820 char ois_prop[PROPERTY_VALUE_MAX];
7821 memset(ois_prop, 0, sizeof(ois_prop));
7822 property_get("persist.camera.ois.disable", ois_prop, "0");
7823 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7824
7825 /* Force video to use OIS */
7826 char videoOisProp[PROPERTY_VALUE_MAX];
7827 memset(videoOisProp, 0, sizeof(videoOisProp));
7828 property_get("persist.camera.ois.video", videoOisProp, "1");
7829 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7830
7831 // EIS enable/disable
7832 char eis_prop[PROPERTY_VALUE_MAX];
7833 memset(eis_prop, 0, sizeof(eis_prop));
7834 property_get("persist.camera.eis.enable", eis_prop, "0");
7835 const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7836
7837 const bool facingBack = ((gCamCapability[mCameraId]->position == CAM_POSITION_BACK) ||
7838 (gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX));
7839 // This is a bit hacky. EIS is enabled only when the above setprop
7840 // is set to non-zero value and on back camera (for 2015 Nexus).
7841 // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7842 // configureStream is called before this function. In other words,
7843 // we cannot guarantee the app will call configureStream before
7844 // calling createDefaultRequest.
7845 const bool eisEnabled = facingBack && eis_prop_set;
7846
7847 uint8_t controlIntent = 0;
7848 uint8_t focusMode;
7849 uint8_t vsMode;
7850 uint8_t optStabMode;
7851 uint8_t cacMode;
7852 uint8_t edge_mode;
7853 uint8_t noise_red_mode;
7854 uint8_t tonemap_mode;
7855 bool highQualityModeEntryAvailable = FALSE;
7856 bool fastModeEntryAvailable = FALSE;
7857 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7858 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7859 switch (type) {
7860 case CAMERA3_TEMPLATE_PREVIEW:
7861 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7862 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7863 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7864 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7865 edge_mode = ANDROID_EDGE_MODE_FAST;
7866 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7867 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7868 break;
7869 case CAMERA3_TEMPLATE_STILL_CAPTURE:
7870 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7871 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7872 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7873 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7874 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7875 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7876 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7877 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
7878 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
7879 if (gCamCapability[mCameraId]->aberration_modes[i] ==
7880 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
7881 highQualityModeEntryAvailable = TRUE;
7882 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
7883 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
7884 fastModeEntryAvailable = TRUE;
7885 }
7886 }
7887 if (highQualityModeEntryAvailable) {
7888 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7889 } else if (fastModeEntryAvailable) {
7890 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7891 }
7892 break;
7893 case CAMERA3_TEMPLATE_VIDEO_RECORD:
7894 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7895 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7896 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7897 if (eisEnabled) {
7898 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7899 }
7900 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7901 edge_mode = ANDROID_EDGE_MODE_FAST;
7902 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7903 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7904 if (forceVideoOis)
7905 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7906 break;
7907 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7908 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7909 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7910 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7911 if (eisEnabled) {
7912 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7913 }
7914 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7915 edge_mode = ANDROID_EDGE_MODE_FAST;
7916 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7917 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7918 if (forceVideoOis)
7919 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7920 break;
7921 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7922 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7923 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7924 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7925 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7926 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7927 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7928 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7929 break;
7930 case CAMERA3_TEMPLATE_MANUAL:
7931 edge_mode = ANDROID_EDGE_MODE_FAST;
7932 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7933 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7934 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7935 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7936 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7937 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7938 break;
7939 default:
7940 edge_mode = ANDROID_EDGE_MODE_FAST;
7941 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7942 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7943 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7944 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7945 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7946 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7947 break;
7948 }
7949 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7950 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7951 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7952 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7953 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7954 }
7955 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7956
7957 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7958 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7959 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7960 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7961 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7962 || ois_disable)
7963 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7964 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7965
7966 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7967 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7968
7969 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7970 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7971
7972 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7973 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7974
7975 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7976 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7977
7978 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7979 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7980
7981 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7982 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7983
7984 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7985 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7986
7987 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7988 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7989
7990 /*flash*/
7991 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7992 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7993
7994 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7995 settings.update(ANDROID_FLASH_FIRING_POWER,
7996 &flashFiringLevel, 1);
7997
7998 /* lens */
7999 float default_aperture = gCamCapability[mCameraId]->apertures[0];
8000 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8001
8002 if (gCamCapability[mCameraId]->filter_densities_count) {
8003 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8004 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8005 gCamCapability[mCameraId]->filter_densities_count);
8006 }
8007
8008 float default_focal_length = gCamCapability[mCameraId]->focal_length;
8009 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8010
8011 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8012 float default_focus_distance = 0;
8013 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8014 }
8015
8016 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8017 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8018
8019 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8020 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8021
8022 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8023 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8024
8025 /* face detection (default to OFF) */
8026 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8027 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8028
8029 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8030 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8031
8032 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8033 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8034
8035 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8036 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8037
8038 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8039 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8040
8041 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8042 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8043
8044 /* Exposure time(Update the Min Exposure Time)*/
8045 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8046 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8047
8048 /* frame duration */
8049 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8050 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8051
8052 /* sensitivity */
8053 static const int32_t default_sensitivity = 100;
8054 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8055
8056 /*edge mode*/
8057 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8058
8059 /*noise reduction mode*/
8060 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8061
8062 /*color correction mode*/
8063 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8064 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8065
8066 /*transform matrix mode*/
8067 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8068
8069 int32_t scaler_crop_region[4];
8070 scaler_crop_region[0] = 0;
8071 scaler_crop_region[1] = 0;
8072 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8073 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8074 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8075
8076 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8077 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8078
8079 /*focus distance*/
8080 float focus_distance = 0.0;
8081 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8082
8083 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8084 float max_range = 0.0;
8085 float max_fixed_fps = 0.0;
8086 int32_t fps_range[2] = {0, 0};
8087 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8088 i++) {
8089 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8090 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8091 if (type == CAMERA3_TEMPLATE_PREVIEW ||
8092 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8093 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8094 if (range > max_range) {
8095 fps_range[0] =
8096 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8097 fps_range[1] =
8098 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8099 max_range = range;
8100 }
8101 } else {
8102 if (range < 0.01 && max_fixed_fps <
8103 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8104 fps_range[0] =
8105 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8106 fps_range[1] =
8107 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8108 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8109 }
8110 }
8111 }
8112 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8113
8114 /*precapture trigger*/
8115 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8116 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8117
8118 /*af trigger*/
8119 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8120 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8121
8122 /* ae & af regions */
8123 int32_t active_region[] = {
8124 gCamCapability[mCameraId]->active_array_size.left,
8125 gCamCapability[mCameraId]->active_array_size.top,
8126 gCamCapability[mCameraId]->active_array_size.left +
8127 gCamCapability[mCameraId]->active_array_size.width,
8128 gCamCapability[mCameraId]->active_array_size.top +
8129 gCamCapability[mCameraId]->active_array_size.height,
8130 0};
8131 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8132 sizeof(active_region) / sizeof(active_region[0]));
8133 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8134 sizeof(active_region) / sizeof(active_region[0]));
8135
8136 /* black level lock */
8137 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8138 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8139
8140 /* lens shading map mode */
8141 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8142 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8143 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8144 }
8145 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8146
8147 //special defaults for manual template
8148 if (type == CAMERA3_TEMPLATE_MANUAL) {
8149 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8150 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8151
8152 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8153 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8154
8155 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8156 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8157
8158 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8159 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8160
8161 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8162 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8163
8164 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8165 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8166 }
8167
8168
8169 /* TNR
8170 * We'll use this location to determine which modes TNR will be set.
8171 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8172 * This is not to be confused with linking on a per stream basis that decision
8173 * is still on per-session basis and will be handled as part of config stream
8174 */
8175 uint8_t tnr_enable = 0;
8176
8177 if (m_bTnrPreview || m_bTnrVideo) {
8178
8179 switch (type) {
8180 case CAMERA3_TEMPLATE_VIDEO_RECORD:
8181 tnr_enable = 1;
8182 break;
8183
8184 default:
8185 tnr_enable = 0;
8186 break;
8187 }
8188
8189 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8190 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8191 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8192
8193 LOGD("TNR:%d with process plate %d for template:%d",
8194 tnr_enable, tnr_process_type, type);
8195 }
8196
8197 //Update Link tags to default
8198 int32_t sync_type = CAM_TYPE_STANDALONE;
8199 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8200
8201 int32_t is_main = 0; //this doesn't matter as app should overwrite
8202 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8203
8204 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8205
8206 /* CDS default */
8207 char prop[PROPERTY_VALUE_MAX];
8208 memset(prop, 0, sizeof(prop));
8209 property_get("persist.camera.CDS", prop, "Auto");
8210 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8211 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8212 if (CAM_CDS_MODE_MAX == cds_mode) {
8213 cds_mode = CAM_CDS_MODE_AUTO;
8214 }
8215
8216 /* Disabling CDS in templates which have TNR enabled*/
8217 if (tnr_enable)
8218 cds_mode = CAM_CDS_MODE_OFF;
8219
8220 int32_t mode = cds_mode;
8221 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8222 mDefaultMetadata[type] = settings.release();
8223
8224 return mDefaultMetadata[type];
8225}
8226
8227/*===========================================================================
8228 * FUNCTION : setFrameParameters
8229 *
8230 * DESCRIPTION: set parameters per frame as requested in the metadata from
8231 * framework
8232 *
8233 * PARAMETERS :
8234 * @request : request that needs to be serviced
8235 * @streamID : Stream ID of all the requested streams
8236 * @blob_request: Whether this request is a blob request or not
8237 *
8238 * RETURN : success: NO_ERROR
8239 * failure:
8240 *==========================================================================*/
8241int QCamera3HardwareInterface::setFrameParameters(
8242 camera3_capture_request_t *request,
8243 cam_stream_ID_t streamID,
8244 int blob_request,
8245 uint32_t snapshotStreamId)
8246{
8247 /*translate from camera_metadata_t type to parm_type_t*/
8248 int rc = 0;
8249 int32_t hal_version = CAM_HAL_V3;
8250
8251 clear_metadata_buffer(mParameters);
8252 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8253 LOGE("Failed to set hal version in the parameters");
8254 return BAD_VALUE;
8255 }
8256
8257 /*we need to update the frame number in the parameters*/
8258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8259 request->frame_number)) {
8260 LOGE("Failed to set the frame number in the parameters");
8261 return BAD_VALUE;
8262 }
8263
8264 /* Update stream id of all the requested buffers */
8265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8266 LOGE("Failed to set stream type mask in the parameters");
8267 return BAD_VALUE;
8268 }
8269
8270 if (mUpdateDebugLevel) {
8271 uint32_t dummyDebugLevel = 0;
8272 /* The value of dummyDebugLevel is irrelavent. On
8273 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8275 dummyDebugLevel)) {
8276 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8277 return BAD_VALUE;
8278 }
8279 mUpdateDebugLevel = false;
8280 }
8281
8282 if(request->settings != NULL){
8283 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8284 if (blob_request)
8285 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8286 }
8287
8288 return rc;
8289}
8290
8291/*===========================================================================
8292 * FUNCTION : setReprocParameters
8293 *
8294 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8295 * return it.
8296 *
8297 * PARAMETERS :
8298 * @request : request that needs to be serviced
8299 *
8300 * RETURN : success: NO_ERROR
8301 * failure:
8302 *==========================================================================*/
8303int32_t QCamera3HardwareInterface::setReprocParameters(
8304 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8305 uint32_t snapshotStreamId)
8306{
8307 /*translate from camera_metadata_t type to parm_type_t*/
8308 int rc = 0;
8309
8310 if (NULL == request->settings){
8311 LOGE("Reprocess settings cannot be NULL");
8312 return BAD_VALUE;
8313 }
8314
8315 if (NULL == reprocParam) {
8316 LOGE("Invalid reprocessing metadata buffer");
8317 return BAD_VALUE;
8318 }
8319 clear_metadata_buffer(reprocParam);
8320
8321 /*we need to update the frame number in the parameters*/
8322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8323 request->frame_number)) {
8324 LOGE("Failed to set the frame number in the parameters");
8325 return BAD_VALUE;
8326 }
8327
8328 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8329 if (rc < 0) {
8330 LOGE("Failed to translate reproc request");
8331 return rc;
8332 }
8333
8334 CameraMetadata frame_settings;
8335 frame_settings = request->settings;
8336 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8337 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8338 int32_t *crop_count =
8339 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8340 int32_t *crop_data =
8341 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8342 int32_t *roi_map =
8343 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8344 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8345 cam_crop_data_t crop_meta;
8346 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8347 crop_meta.num_of_streams = 1;
8348 crop_meta.crop_info[0].crop.left = crop_data[0];
8349 crop_meta.crop_info[0].crop.top = crop_data[1];
8350 crop_meta.crop_info[0].crop.width = crop_data[2];
8351 crop_meta.crop_info[0].crop.height = crop_data[3];
8352
8353 crop_meta.crop_info[0].roi_map.left =
8354 roi_map[0];
8355 crop_meta.crop_info[0].roi_map.top =
8356 roi_map[1];
8357 crop_meta.crop_info[0].roi_map.width =
8358 roi_map[2];
8359 crop_meta.crop_info[0].roi_map.height =
8360 roi_map[3];
8361
8362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8363 rc = BAD_VALUE;
8364 }
8365 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8366 request->input_buffer->stream,
8367 crop_meta.crop_info[0].crop.left,
8368 crop_meta.crop_info[0].crop.top,
8369 crop_meta.crop_info[0].crop.width,
8370 crop_meta.crop_info[0].crop.height);
8371 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8372 request->input_buffer->stream,
8373 crop_meta.crop_info[0].roi_map.left,
8374 crop_meta.crop_info[0].roi_map.top,
8375 crop_meta.crop_info[0].roi_map.width,
8376 crop_meta.crop_info[0].roi_map.height);
8377 } else {
8378 LOGE("Invalid reprocess crop count %d!", *crop_count);
8379 }
8380 } else {
8381 LOGE("No crop data from matching output stream");
8382 }
8383
8384 /* These settings are not needed for regular requests so handle them specially for
8385 reprocess requests; information needed for EXIF tags */
8386 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8387 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8388 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8389 if (NAME_NOT_FOUND != val) {
8390 uint32_t flashMode = (uint32_t)val;
8391 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8392 rc = BAD_VALUE;
8393 }
8394 } else {
8395 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8396 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8397 }
8398 } else {
8399 LOGH("No flash mode in reprocess settings");
8400 }
8401
8402 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8403 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8405 rc = BAD_VALUE;
8406 }
8407 } else {
8408 LOGH("No flash state in reprocess settings");
8409 }
8410
8411 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
8412 uint8_t *reprocessFlags =
8413 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
8414 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
8415 *reprocessFlags)) {
8416 rc = BAD_VALUE;
8417 }
8418 }
8419
8420 // Add metadata which DDM needs
8421 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB)) {
8422 cam_ddm_info_t *ddm_info =
8423 (cam_ddm_info_t *)frame_settings.find
8424 (QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB).data.u8;
8425 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
8426 ddm_info->sensor_crop_info);
8427 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
8428 ddm_info->camif_crop_info);
8429 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
8430 ddm_info->isp_crop_info);
8431 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
8432 ddm_info->cpp_crop_info);
8433 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
8434 ddm_info->af_focal_length_ratio);
8435 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
8436 ddm_info->pipeline_flip);
8437 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
8438 CAM_INTF_PARM_ROTATION metadata then has been added in
8439 translateToHalMetadata. HAL need to keep this new rotation
8440 metadata. Otherwise, the old rotation info saved in the vendor tag
8441 would be used */
8442 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8443 CAM_INTF_PARM_ROTATION, reprocParam) {
8444 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
8445 } else {
8446 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
8447 ddm_info->rotation_info);
8448 }
8449
8450 }
8451
8452 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
8453 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
8454 roi.width and roi.height would be the final JPEG size.
8455 For now, HAL only checks this for reprocess request */
8456 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
8457 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
8458 uint8_t *enable =
8459 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
8460 if (*enable == TRUE) {
8461 int32_t *crop_data =
8462 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
8463 cam_stream_crop_info_t crop_meta;
8464 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
8465 crop_meta.stream_id = 0;
8466 crop_meta.crop.left = crop_data[0];
8467 crop_meta.crop.top = crop_data[1];
8468 crop_meta.crop.width = crop_data[2];
8469 crop_meta.crop.height = crop_data[3];
8470 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
8471 int32_t *roi =
8472 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
8473 crop_meta.roi_map.left =
8474 roi[0];
8475 crop_meta.roi_map.top =
8476 roi[1];
8477 crop_meta.roi_map.width =
8478 roi[2];
8479 crop_meta.roi_map.height =
8480 roi[3];
8481 }
8482 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
8483 crop_meta);
8484 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d",
8485 crop_meta.crop.left, crop_meta.crop.top,
8486 crop_meta.crop.width, crop_meta.crop.height);
8487 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d",
8488 crop_meta.roi_map.left, crop_meta.roi_map.top,
8489 crop_meta.roi_map.width, crop_meta.roi_map.height);
8490 }
8491 }
8492
8493 return rc;
8494}
8495
8496/*===========================================================================
8497 * FUNCTION : saveRequestSettings
8498 *
8499 * DESCRIPTION: Add any settings that might have changed to the request settings
8500 * and save the settings to be applied on the frame
8501 *
8502 * PARAMETERS :
8503 * @jpegMetadata : the extracted and/or modified jpeg metadata
8504 * @request : request with initial settings
8505 *
8506 * RETURN :
8507 * camera_metadata_t* : pointer to the saved request settings
8508 *==========================================================================*/
8509camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8510 const CameraMetadata &jpegMetadata,
8511 camera3_capture_request_t *request)
8512{
8513 camera_metadata_t *resultMetadata;
8514 CameraMetadata camMetadata;
8515 camMetadata = request->settings;
8516
8517 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8518 int32_t thumbnail_size[2];
8519 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8520 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8521 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8522 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8523 }
8524
8525 if (request->input_buffer != NULL) {
8526 uint8_t reprocessFlags = 1;
8527 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
8528 (uint8_t*)&reprocessFlags,
8529 sizeof(reprocessFlags));
8530 }
8531
8532 resultMetadata = camMetadata.release();
8533 return resultMetadata;
8534}
8535
8536/*===========================================================================
8537 * FUNCTION : setHalFpsRange
8538 *
8539 * DESCRIPTION: set FPS range parameter
8540 *
8541 *
8542 * PARAMETERS :
8543 * @settings : Metadata from framework
8544 * @hal_metadata: Metadata buffer
8545 *
8546 *
8547 * RETURN : success: NO_ERROR
8548 * failure:
8549 *==========================================================================*/
8550int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8551 metadata_buffer_t *hal_metadata)
8552{
8553 int32_t rc = NO_ERROR;
8554 cam_fps_range_t fps_range;
8555 fps_range.min_fps = (float)
8556 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8557 fps_range.max_fps = (float)
8558 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8559 fps_range.video_min_fps = fps_range.min_fps;
8560 fps_range.video_max_fps = fps_range.max_fps;
8561
8562 LOGD("aeTargetFpsRange fps: [%f %f]",
8563 fps_range.min_fps, fps_range.max_fps);
8564 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8565 * follows:
8566 * ---------------------------------------------------------------|
8567 * Video stream is absent in configure_streams |
8568 * (Camcorder preview before the first video record |
8569 * ---------------------------------------------------------------|
8570 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8571 * | | | vid_min/max_fps|
8572 * ---------------------------------------------------------------|
8573 * NO | [ 30, 240] | 240 | [240, 240] |
8574 * |-------------|-------------|----------------|
8575 * | [240, 240] | 240 | [240, 240] |
8576 * ---------------------------------------------------------------|
8577 * Video stream is present in configure_streams |
8578 * ---------------------------------------------------------------|
8579 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8580 * | | | vid_min/max_fps|
8581 * ---------------------------------------------------------------|
8582 * NO | [ 30, 240] | 240 | [240, 240] |
8583 * (camcorder prev |-------------|-------------|----------------|
8584 * after video rec | [240, 240] | 240 | [240, 240] |
8585 * is stopped) | | | |
8586 * ---------------------------------------------------------------|
8587 * YES | [ 30, 240] | 240 | [240, 240] |
8588 * |-------------|-------------|----------------|
8589 * | [240, 240] | 240 | [240, 240] |
8590 * ---------------------------------------------------------------|
8591 * When Video stream is absent in configure_streams,
8592 * preview fps = sensor_fps / batchsize
8593 * Eg: for 240fps at batchSize 4, preview = 60fps
8594 * for 120fps at batchSize 4, preview = 30fps
8595 *
8596 * When video stream is present in configure_streams, preview fps is as per
8597 * the ratio of preview buffers to video buffers requested in process
8598 * capture request
8599 */
8600 mBatchSize = 0;
8601 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8602 fps_range.min_fps = fps_range.video_max_fps;
8603 fps_range.video_min_fps = fps_range.video_max_fps;
8604 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8605 fps_range.max_fps);
8606 if (NAME_NOT_FOUND != val) {
8607 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8609 return BAD_VALUE;
8610 }
8611
8612 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8613 /* If batchmode is currently in progress and the fps changes,
8614 * set the flag to restart the sensor */
8615 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8616 (mHFRVideoFps != fps_range.max_fps)) {
8617 mNeedSensorRestart = true;
8618 }
8619 mHFRVideoFps = fps_range.max_fps;
8620 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8621 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8622 mBatchSize = MAX_HFR_BATCH_SIZE;
8623 }
8624 }
8625 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8626
8627 }
8628 } else {
8629 /* HFR mode is session param in backend/ISP. This should be reset when
8630 * in non-HFR mode */
8631 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8633 return BAD_VALUE;
8634 }
8635 }
8636 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8637 return BAD_VALUE;
8638 }
8639 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8640 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8641 return rc;
8642}
8643
8644/*===========================================================================
8645 * FUNCTION : translateToHalMetadata
8646 *
8647 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8648 *
8649 *
8650 * PARAMETERS :
8651 * @request : request sent from framework
8652 *
8653 *
8654 * RETURN : success: NO_ERROR
8655 * failure:
8656 *==========================================================================*/
8657int QCamera3HardwareInterface::translateToHalMetadata
8658 (const camera3_capture_request_t *request,
8659 metadata_buffer_t *hal_metadata,
8660 uint32_t snapshotStreamId)
8661{
8662 int rc = 0;
8663 CameraMetadata frame_settings;
8664 frame_settings = request->settings;
8665
8666 /* Do not change the order of the following list unless you know what you are
8667 * doing.
8668 * The order is laid out in such a way that parameters in the front of the table
8669 * may be used to override the parameters later in the table. Examples are:
8670 * 1. META_MODE should precede AEC/AWB/AF MODE
8671 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8672 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8673 * 4. Any mode should precede it's corresponding settings
8674 */
8675 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8676 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8678 rc = BAD_VALUE;
8679 }
8680 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8681 if (rc != NO_ERROR) {
8682 LOGE("extractSceneMode failed");
8683 }
8684 }
8685
8686 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8687 uint8_t fwk_aeMode =
8688 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8689 uint8_t aeMode;
8690 int32_t redeye;
8691
8692 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8693 aeMode = CAM_AE_MODE_OFF;
8694 } else {
8695 aeMode = CAM_AE_MODE_ON;
8696 }
8697 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8698 redeye = 1;
8699 } else {
8700 redeye = 0;
8701 }
8702
8703 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8704 fwk_aeMode);
8705 if (NAME_NOT_FOUND != val) {
8706 int32_t flashMode = (int32_t)val;
8707 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8708 }
8709
8710 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8712 rc = BAD_VALUE;
8713 }
8714 }
8715
8716 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8717 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8718 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8719 fwk_whiteLevel);
8720 if (NAME_NOT_FOUND != val) {
8721 uint8_t whiteLevel = (uint8_t)val;
8722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8723 rc = BAD_VALUE;
8724 }
8725 }
8726 }
8727
8728 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8729 uint8_t fwk_cacMode =
8730 frame_settings.find(
8731 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8732 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8733 fwk_cacMode);
8734 if (NAME_NOT_FOUND != val) {
8735 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8736 bool entryAvailable = FALSE;
8737 // Check whether Frameworks set CAC mode is supported in device or not
8738 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8739 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8740 entryAvailable = TRUE;
8741 break;
8742 }
8743 }
8744 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8745 // If entry not found then set the device supported mode instead of frameworks mode i.e,
8746 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8747 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8748 if (entryAvailable == FALSE) {
8749 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8750 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8751 } else {
8752 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8753 // High is not supported and so set the FAST as spec say's underlying
8754 // device implementation can be the same for both modes.
8755 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8756 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8757 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8758 // in order to avoid the fps drop due to high quality
8759 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8760 } else {
8761 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8762 }
8763 }
8764 }
8765 LOGD("Final cacMode is %d", cacMode);
8766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8767 rc = BAD_VALUE;
8768 }
8769 } else {
8770 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
8771 }
8772 }
8773
8774 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8775 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8776 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8777 fwk_focusMode);
8778 if (NAME_NOT_FOUND != val) {
8779 uint8_t focusMode = (uint8_t)val;
8780 LOGD("set focus mode %d", focusMode);
8781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8782 rc = BAD_VALUE;
8783 }
8784 }
8785 }
8786
8787 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8788 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8790 focalDistance)) {
8791 rc = BAD_VALUE;
8792 }
8793 }
8794
8795 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8796 uint8_t fwk_antibandingMode =
8797 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8798 int val = lookupHalName(ANTIBANDING_MODES_MAP,
8799 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8800 if (NAME_NOT_FOUND != val) {
8801 uint32_t hal_antibandingMode = (uint32_t)val;
8802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8803 hal_antibandingMode)) {
8804 rc = BAD_VALUE;
8805 }
8806 }
8807 }
8808
8809 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8810 int32_t expCompensation = frame_settings.find(
8811 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8812 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8813 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8814 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8815 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8817 expCompensation)) {
8818 rc = BAD_VALUE;
8819 }
8820 }
8821
8822 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8823 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8825 rc = BAD_VALUE;
8826 }
8827 }
8828 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8829 rc = setHalFpsRange(frame_settings, hal_metadata);
8830 if (rc != NO_ERROR) {
8831 LOGE("setHalFpsRange failed");
8832 }
8833 }
8834
8835 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8836 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8838 rc = BAD_VALUE;
8839 }
8840 }
8841
8842 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8843 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8844 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8845 fwk_effectMode);
8846 if (NAME_NOT_FOUND != val) {
8847 uint8_t effectMode = (uint8_t)val;
8848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8849 rc = BAD_VALUE;
8850 }
8851 }
8852 }
8853
8854 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8855 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8857 colorCorrectMode)) {
8858 rc = BAD_VALUE;
8859 }
8860 }
8861
8862 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8863 cam_color_correct_gains_t colorCorrectGains;
8864 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
8865 colorCorrectGains.gains[i] =
8866 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8867 }
8868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8869 colorCorrectGains)) {
8870 rc = BAD_VALUE;
8871 }
8872 }
8873
8874 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8875 cam_color_correct_matrix_t colorCorrectTransform;
8876 cam_rational_type_t transform_elem;
8877 size_t num = 0;
8878 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8879 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8880 transform_elem.numerator =
8881 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8882 transform_elem.denominator =
8883 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8884 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8885 num++;
8886 }
8887 }
8888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8889 colorCorrectTransform)) {
8890 rc = BAD_VALUE;
8891 }
8892 }
8893
8894 cam_trigger_t aecTrigger;
8895 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8896 aecTrigger.trigger_id = -1;
8897 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8898 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8899 aecTrigger.trigger =
8900 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8901 aecTrigger.trigger_id =
8902 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8904 aecTrigger)) {
8905 rc = BAD_VALUE;
8906 }
8907 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
8908 aecTrigger.trigger, aecTrigger.trigger_id);
8909 }
8910
8911 /*af_trigger must come with a trigger id*/
8912 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8913 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8914 cam_trigger_t af_trigger;
8915 af_trigger.trigger =
8916 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8917 af_trigger.trigger_id =
8918 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8919 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8920 rc = BAD_VALUE;
8921 }
8922 LOGD("AfTrigger: %d AfTriggerID: %d",
8923 af_trigger.trigger, af_trigger.trigger_id);
8924 }
8925
8926 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8927 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8929 rc = BAD_VALUE;
8930 }
8931 }
8932 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8933 cam_edge_application_t edge_application;
8934 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8935 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8936 edge_application.sharpness = 0;
8937 } else {
8938 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8939 }
8940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8941 rc = BAD_VALUE;
8942 }
8943 }
8944
8945 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8946 int32_t respectFlashMode = 1;
8947 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8948 uint8_t fwk_aeMode =
8949 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8950 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8951 respectFlashMode = 0;
8952 LOGH("AE Mode controls flash, ignore android.flash.mode");
8953 }
8954 }
8955 if (respectFlashMode) {
8956 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8957 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8958 LOGH("flash mode after mapping %d", val);
8959 // To check: CAM_INTF_META_FLASH_MODE usage
8960 if (NAME_NOT_FOUND != val) {
8961 uint8_t flashMode = (uint8_t)val;
8962 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8963 rc = BAD_VALUE;
8964 }
8965 }
8966 }
8967 }
8968
8969 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8970 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8972 rc = BAD_VALUE;
8973 }
8974 }
8975
8976 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8977 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8979 flashFiringTime)) {
8980 rc = BAD_VALUE;
8981 }
8982 }
8983
8984 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8985 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8987 hotPixelMode)) {
8988 rc = BAD_VALUE;
8989 }
8990 }
8991
8992 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8993 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8995 lensAperture)) {
8996 rc = BAD_VALUE;
8997 }
8998 }
8999
9000 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9001 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9003 filterDensity)) {
9004 rc = BAD_VALUE;
9005 }
9006 }
9007
9008 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9009 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9011 focalLength)) {
9012 rc = BAD_VALUE;
9013 }
9014 }
9015
9016 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9017 uint8_t optStabMode =
9018 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9020 optStabMode)) {
9021 rc = BAD_VALUE;
9022 }
9023 }
9024
9025 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9026 uint8_t videoStabMode =
9027 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9028 LOGD("videoStabMode from APP = %d", videoStabMode);
9029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9030 videoStabMode)) {
9031 rc = BAD_VALUE;
9032 }
9033 }
9034
9035
9036 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9037 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9039 noiseRedMode)) {
9040 rc = BAD_VALUE;
9041 }
9042 }
9043
9044 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9045 float reprocessEffectiveExposureFactor =
9046 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9048 reprocessEffectiveExposureFactor)) {
9049 rc = BAD_VALUE;
9050 }
9051 }
9052
9053 cam_crop_region_t scalerCropRegion;
9054 bool scalerCropSet = false;
9055 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9056 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9057 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9058 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9059 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9060
9061 // Map coordinate system from active array to sensor output.
9062 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9063 scalerCropRegion.width, scalerCropRegion.height);
9064
9065 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9066 scalerCropRegion)) {
9067 rc = BAD_VALUE;
9068 }
9069 scalerCropSet = true;
9070 }
9071
9072 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9073 int64_t sensorExpTime =
9074 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9075 LOGD("setting sensorExpTime %lld", sensorExpTime);
9076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9077 sensorExpTime)) {
9078 rc = BAD_VALUE;
9079 }
9080 }
9081
9082 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9083 int64_t sensorFrameDuration =
9084 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9085 int64_t minFrameDuration = getMinFrameDuration(request);
9086 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9087 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9088 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9089 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9091 sensorFrameDuration)) {
9092 rc = BAD_VALUE;
9093 }
9094 }
9095
9096 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9097 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9098 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9099 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9100 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9101 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9102 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9104 sensorSensitivity)) {
9105 rc = BAD_VALUE;
9106 }
9107 }
9108
9109 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9110 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9112 rc = BAD_VALUE;
9113 }
9114 }
9115
9116 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9117 uint8_t fwk_facedetectMode =
9118 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9119
9120 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9121 fwk_facedetectMode);
9122
9123 if (NAME_NOT_FOUND != val) {
9124 uint8_t facedetectMode = (uint8_t)val;
9125 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9126 facedetectMode)) {
9127 rc = BAD_VALUE;
9128 }
9129 }
9130 }
9131
9132 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9133 uint8_t histogramMode =
9134 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9136 histogramMode)) {
9137 rc = BAD_VALUE;
9138 }
9139 }
9140
9141 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9142 uint8_t sharpnessMapMode =
9143 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9144 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9145 sharpnessMapMode)) {
9146 rc = BAD_VALUE;
9147 }
9148 }
9149
9150 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9151 uint8_t tonemapMode =
9152 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9154 rc = BAD_VALUE;
9155 }
9156 }
9157 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9158 /*All tonemap channels will have the same number of points*/
9159 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9160 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9161 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9162 cam_rgb_tonemap_curves tonemapCurves;
9163 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9164 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9165 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9166 tonemapCurves.tonemap_points_cnt,
9167 CAM_MAX_TONEMAP_CURVE_SIZE);
9168 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9169 }
9170
9171 /* ch0 = G*/
9172 size_t point = 0;
9173 cam_tonemap_curve_t tonemapCurveGreen;
9174 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9175 for (size_t j = 0; j < 2; j++) {
9176 tonemapCurveGreen.tonemap_points[i][j] =
9177 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9178 point++;
9179 }
9180 }
9181 tonemapCurves.curves[0] = tonemapCurveGreen;
9182
9183 /* ch 1 = B */
9184 point = 0;
9185 cam_tonemap_curve_t tonemapCurveBlue;
9186 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9187 for (size_t j = 0; j < 2; j++) {
9188 tonemapCurveBlue.tonemap_points[i][j] =
9189 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9190 point++;
9191 }
9192 }
9193 tonemapCurves.curves[1] = tonemapCurveBlue;
9194
9195 /* ch 2 = R */
9196 point = 0;
9197 cam_tonemap_curve_t tonemapCurveRed;
9198 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9199 for (size_t j = 0; j < 2; j++) {
9200 tonemapCurveRed.tonemap_points[i][j] =
9201 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9202 point++;
9203 }
9204 }
9205 tonemapCurves.curves[2] = tonemapCurveRed;
9206
9207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9208 tonemapCurves)) {
9209 rc = BAD_VALUE;
9210 }
9211 }
9212
9213 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9214 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9216 captureIntent)) {
9217 rc = BAD_VALUE;
9218 }
9219 }
9220
9221 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9222 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9224 blackLevelLock)) {
9225 rc = BAD_VALUE;
9226 }
9227 }
9228
9229 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9230 uint8_t lensShadingMapMode =
9231 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9233 lensShadingMapMode)) {
9234 rc = BAD_VALUE;
9235 }
9236 }
9237
9238 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9239 cam_area_t roi;
9240 bool reset = true;
9241 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9242
9243 // Map coordinate system from active array to sensor output.
9244 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9245 roi.rect.height);
9246
9247 if (scalerCropSet) {
9248 reset = resetIfNeededROI(&roi, &scalerCropRegion);
9249 }
9250 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9251 rc = BAD_VALUE;
9252 }
9253 }
9254
9255 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9256 cam_area_t roi;
9257 bool reset = true;
9258 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9259
9260 // Map coordinate system from active array to sensor output.
9261 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9262 roi.rect.height);
9263
9264 if (scalerCropSet) {
9265 reset = resetIfNeededROI(&roi, &scalerCropRegion);
9266 }
9267 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9268 rc = BAD_VALUE;
9269 }
9270 }
9271
9272 // CDS for non-HFR non-video mode
9273 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9274 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9275 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9276 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9277 LOGE("Invalid CDS mode %d!", *fwk_cds);
9278 } else {
9279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9280 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9281 rc = BAD_VALUE;
9282 }
9283 }
9284 }
9285
9286 // TNR
9287 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9288 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9289 uint8_t b_TnrRequested = 0;
9290 cam_denoise_param_t tnr;
9291 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9292 tnr.process_plates =
9293 (cam_denoise_process_type_t)frame_settings.find(
9294 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9295 b_TnrRequested = tnr.denoise_enable;
9296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9297 rc = BAD_VALUE;
9298 }
9299 }
9300
9301 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9302 int32_t fwk_testPatternMode =
9303 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9304 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9305 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9306
9307 if (NAME_NOT_FOUND != testPatternMode) {
9308 cam_test_pattern_data_t testPatternData;
9309 memset(&testPatternData, 0, sizeof(testPatternData));
9310 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9311 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9312 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9313 int32_t *fwk_testPatternData =
9314 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9315 testPatternData.r = fwk_testPatternData[0];
9316 testPatternData.b = fwk_testPatternData[3];
9317 switch (gCamCapability[mCameraId]->color_arrangement) {
9318 case CAM_FILTER_ARRANGEMENT_RGGB:
9319 case CAM_FILTER_ARRANGEMENT_GRBG:
9320 testPatternData.gr = fwk_testPatternData[1];
9321 testPatternData.gb = fwk_testPatternData[2];
9322 break;
9323 case CAM_FILTER_ARRANGEMENT_GBRG:
9324 case CAM_FILTER_ARRANGEMENT_BGGR:
9325 testPatternData.gr = fwk_testPatternData[2];
9326 testPatternData.gb = fwk_testPatternData[1];
9327 break;
9328 default:
9329 LOGE("color arrangement %d is not supported",
9330 gCamCapability[mCameraId]->color_arrangement);
9331 break;
9332 }
9333 }
9334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9335 testPatternData)) {
9336 rc = BAD_VALUE;
9337 }
9338 } else {
9339 LOGE("Invalid framework sensor test pattern mode %d",
9340 fwk_testPatternMode);
9341 }
9342 }
9343
9344 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9345 size_t count = 0;
9346 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9347 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9348 gps_coords.data.d, gps_coords.count, count);
9349 if (gps_coords.count != count) {
9350 rc = BAD_VALUE;
9351 }
9352 }
9353
9354 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9355 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9356 size_t count = 0;
9357 const char *gps_methods_src = (const char *)
9358 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9359 memset(gps_methods, '\0', sizeof(gps_methods));
9360 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9361 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9362 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9363 if (GPS_PROCESSING_METHOD_SIZE != count) {
9364 rc = BAD_VALUE;
9365 }
9366 }
9367
9368 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9369 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9371 gps_timestamp)) {
9372 rc = BAD_VALUE;
9373 }
9374 }
9375
9376 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9377 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9378 cam_rotation_info_t rotation_info;
9379 if (orientation == 0) {
9380 rotation_info.rotation = ROTATE_0;
9381 } else if (orientation == 90) {
9382 rotation_info.rotation = ROTATE_90;
9383 } else if (orientation == 180) {
9384 rotation_info.rotation = ROTATE_180;
9385 } else if (orientation == 270) {
9386 rotation_info.rotation = ROTATE_270;
9387 }
9388 rotation_info.streamId = snapshotStreamId;
9389 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9390 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9391 rc = BAD_VALUE;
9392 }
9393 }
9394
9395 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9396 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9397 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9398 rc = BAD_VALUE;
9399 }
9400 }
9401
9402 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9403 uint32_t thumb_quality = (uint32_t)
9404 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9405 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9406 thumb_quality)) {
9407 rc = BAD_VALUE;
9408 }
9409 }
9410
9411 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9412 cam_dimension_t dim;
9413 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9414 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9415 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9416 rc = BAD_VALUE;
9417 }
9418 }
9419
9420 // Internal metadata
9421 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9422 size_t count = 0;
9423 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9424 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9425 privatedata.data.i32, privatedata.count, count);
9426 if (privatedata.count != count) {
9427 rc = BAD_VALUE;
9428 }
9429 }
9430
9431 if (m_debug_avtimer || frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9432 uint8_t* use_av_timer = NULL;
9433
9434 if (m_debug_avtimer){
9435 use_av_timer = &m_debug_avtimer;
9436 }
9437 else{
9438 use_av_timer =
9439 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9440 }
9441
9442 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9443 rc = BAD_VALUE;
9444 }
9445 }
9446
9447 // EV step
9448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9449 gCamCapability[mCameraId]->exp_compensation_step)) {
9450 rc = BAD_VALUE;
9451 }
9452
9453 // CDS info
9454 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9455 cam_cds_data_t *cdsData = (cam_cds_data_t *)
9456 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9457
9458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9459 CAM_INTF_META_CDS_DATA, *cdsData)) {
9460 rc = BAD_VALUE;
9461 }
9462 }
9463
9464 return rc;
9465}
9466
9467/*===========================================================================
9468 * FUNCTION : captureResultCb
9469 *
9470 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9471 *
9472 * PARAMETERS :
9473 * @frame : frame information from mm-camera-interface
9474 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9475 * @userdata: userdata
9476 *
9477 * RETURN : NONE
9478 *==========================================================================*/
9479void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9480 camera3_stream_buffer_t *buffer,
9481 uint32_t frame_number, bool isInputBuffer, void *userdata)
9482{
9483 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9484 if (hw == NULL) {
9485 LOGE("Invalid hw %p", hw);
9486 return;
9487 }
9488
9489 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9490 return;
9491}
9492
9493
9494/*===========================================================================
9495 * FUNCTION : initialize
9496 *
9497 * DESCRIPTION: Pass framework callback pointers to HAL
9498 *
9499 * PARAMETERS :
9500 *
9501 *
9502 * RETURN : Success : 0
9503 * Failure: -ENODEV
9504 *==========================================================================*/
9505
9506int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9507 const camera3_callback_ops_t *callback_ops)
9508{
9509 LOGD("E");
9510 QCamera3HardwareInterface *hw =
9511 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9512 if (!hw) {
9513 LOGE("NULL camera device");
9514 return -ENODEV;
9515 }
9516
9517 int rc = hw->initialize(callback_ops);
9518 LOGD("X");
9519 return rc;
9520}
9521
9522/*===========================================================================
9523 * FUNCTION : configure_streams
9524 *
9525 * DESCRIPTION:
9526 *
9527 * PARAMETERS :
9528 *
9529 *
9530 * RETURN : Success: 0
9531 * Failure: -EINVAL (if stream configuration is invalid)
9532 * -ENODEV (fatal error)
9533 *==========================================================================*/
9534
9535int QCamera3HardwareInterface::configure_streams(
9536 const struct camera3_device *device,
9537 camera3_stream_configuration_t *stream_list)
9538{
9539 LOGD("E");
9540 QCamera3HardwareInterface *hw =
9541 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9542 if (!hw) {
9543 LOGE("NULL camera device");
9544 return -ENODEV;
9545 }
9546 int rc = hw->configureStreams(stream_list);
9547 LOGD("X");
9548 return rc;
9549}
9550
9551/*===========================================================================
9552 * FUNCTION : construct_default_request_settings
9553 *
9554 * DESCRIPTION: Configure a settings buffer to meet the required use case
9555 *
9556 * PARAMETERS :
9557 *
9558 *
9559 * RETURN : Success: Return valid metadata
9560 * Failure: Return NULL
9561 *==========================================================================*/
9562const camera_metadata_t* QCamera3HardwareInterface::
9563 construct_default_request_settings(const struct camera3_device *device,
9564 int type)
9565{
9566
9567 LOGD("E");
9568 camera_metadata_t* fwk_metadata = NULL;
9569 QCamera3HardwareInterface *hw =
9570 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9571 if (!hw) {
9572 LOGE("NULL camera device");
9573 return NULL;
9574 }
9575
9576 fwk_metadata = hw->translateCapabilityToMetadata(type);
9577
9578 LOGD("X");
9579 return fwk_metadata;
9580}
9581
9582/*===========================================================================
9583 * FUNCTION : process_capture_request
9584 *
9585 * DESCRIPTION:
9586 *
9587 * PARAMETERS :
9588 *
9589 *
9590 * RETURN :
9591 *==========================================================================*/
9592int QCamera3HardwareInterface::process_capture_request(
9593 const struct camera3_device *device,
9594 camera3_capture_request_t *request)
9595{
9596 LOGD("E");
9597 QCamera3HardwareInterface *hw =
9598 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9599 if (!hw) {
9600 LOGE("NULL camera device");
9601 return -EINVAL;
9602 }
9603
9604 int rc = hw->processCaptureRequest(request);
9605 LOGD("X");
9606 return rc;
9607}
9608
9609/*===========================================================================
9610 * FUNCTION : dump
9611 *
9612 * DESCRIPTION:
9613 *
9614 * PARAMETERS :
9615 *
9616 *
9617 * RETURN :
9618 *==========================================================================*/
9619
9620void QCamera3HardwareInterface::dump(
9621 const struct camera3_device *device, int fd)
9622{
9623 /* Log level property is read when "adb shell dumpsys media.camera" is
9624 called so that the log level can be controlled without restarting
9625 the media server */
9626 getLogLevel();
9627
9628 LOGD("E");
9629 QCamera3HardwareInterface *hw =
9630 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9631 if (!hw) {
9632 LOGE("NULL camera device");
9633 return;
9634 }
9635
9636 hw->dump(fd);
9637 LOGD("X");
9638 return;
9639}
9640
9641/*===========================================================================
9642 * FUNCTION : flush
9643 *
9644 * DESCRIPTION:
9645 *
9646 * PARAMETERS :
9647 *
9648 *
9649 * RETURN :
9650 *==========================================================================*/
9651
9652int QCamera3HardwareInterface::flush(
9653 const struct camera3_device *device)
9654{
9655 int rc;
9656 LOGD("E");
9657 QCamera3HardwareInterface *hw =
9658 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9659 if (!hw) {
9660 LOGE("NULL camera device");
9661 return -EINVAL;
9662 }
9663
9664 pthread_mutex_lock(&hw->mMutex);
9665 // Validate current state
9666 switch (hw->mState) {
9667 case STARTED:
9668 /* valid state */
9669 break;
9670
9671 case ERROR:
9672 pthread_mutex_unlock(&hw->mMutex);
9673 hw->handleCameraDeviceError();
9674 return -ENODEV;
9675
9676 default:
9677 LOGI("Flush returned during state %d", hw->mState);
9678 pthread_mutex_unlock(&hw->mMutex);
9679 return 0;
9680 }
9681 pthread_mutex_unlock(&hw->mMutex);
9682
9683 rc = hw->flush(true /* restart channels */ );
9684 LOGD("X");
9685 return rc;
9686}
9687
9688/*===========================================================================
9689 * FUNCTION : close_camera_device
9690 *
9691 * DESCRIPTION:
9692 *
9693 * PARAMETERS :
9694 *
9695 *
9696 * RETURN :
9697 *==========================================================================*/
9698int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9699{
9700 int ret = NO_ERROR;
9701 QCamera3HardwareInterface *hw =
9702 reinterpret_cast<QCamera3HardwareInterface *>(
9703 reinterpret_cast<camera3_device_t *>(device)->priv);
9704 if (!hw) {
9705 LOGE("NULL camera device");
9706 return BAD_VALUE;
9707 }
9708
9709 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9710 delete hw;
9711 LOGI("[KPI Perf]: X");
9712 return ret;
9713}
9714
9715/*===========================================================================
9716 * FUNCTION : getWaveletDenoiseProcessPlate
9717 *
9718 * DESCRIPTION: query wavelet denoise process plate
9719 *
9720 * PARAMETERS : None
9721 *
9722 * RETURN : WNR prcocess plate value
9723 *==========================================================================*/
9724cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9725{
9726 char prop[PROPERTY_VALUE_MAX];
9727 memset(prop, 0, sizeof(prop));
9728 property_get("persist.denoise.process.plates", prop, "0");
9729 int processPlate = atoi(prop);
9730 switch(processPlate) {
9731 case 0:
9732 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9733 case 1:
9734 return CAM_WAVELET_DENOISE_CBCR_ONLY;
9735 case 2:
9736 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9737 case 3:
9738 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9739 default:
9740 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9741 }
9742}
9743
9744
9745/*===========================================================================
9746 * FUNCTION : getTemporalDenoiseProcessPlate
9747 *
9748 * DESCRIPTION: query temporal denoise process plate
9749 *
9750 * PARAMETERS : None
9751 *
9752 * RETURN : TNR prcocess plate value
9753 *==========================================================================*/
9754cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9755{
9756 char prop[PROPERTY_VALUE_MAX];
9757 memset(prop, 0, sizeof(prop));
9758 property_get("persist.tnr.process.plates", prop, "0");
9759 int processPlate = atoi(prop);
9760 switch(processPlate) {
9761 case 0:
9762 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9763 case 1:
9764 return CAM_WAVELET_DENOISE_CBCR_ONLY;
9765 case 2:
9766 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9767 case 3:
9768 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9769 default:
9770 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9771 }
9772}
9773
9774
9775/*===========================================================================
9776 * FUNCTION : extractSceneMode
9777 *
9778 * DESCRIPTION: Extract scene mode from frameworks set metadata
9779 *
9780 * PARAMETERS :
9781 * @frame_settings: CameraMetadata reference
9782 * @metaMode: ANDROID_CONTORL_MODE
9783 * @hal_metadata: hal metadata structure
9784 *
9785 * RETURN : None
9786 *==========================================================================*/
9787int32_t QCamera3HardwareInterface::extractSceneMode(
9788 const CameraMetadata &frame_settings, uint8_t metaMode,
9789 metadata_buffer_t *hal_metadata)
9790{
9791 int32_t rc = NO_ERROR;
9792
9793 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9794 camera_metadata_ro_entry entry =
9795 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9796 if (0 == entry.count)
9797 return rc;
9798
9799 uint8_t fwk_sceneMode = entry.data.u8[0];
9800
9801 int val = lookupHalName(SCENE_MODES_MAP,
9802 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9803 fwk_sceneMode);
9804 if (NAME_NOT_FOUND != val) {
9805 uint8_t sceneMode = (uint8_t)val;
9806 LOGD("sceneMode: %d", sceneMode);
9807 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9808 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9809 rc = BAD_VALUE;
9810 }
9811 }
9812 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9813 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9814 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9815 LOGD("sceneMode: %d", sceneMode);
9816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9817 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9818 rc = BAD_VALUE;
9819 }
9820 }
9821 return rc;
9822}
9823
9824/*===========================================================================
9825 * FUNCTION : needRotationReprocess
9826 *
9827 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9828 *
9829 * PARAMETERS : none
9830 *
9831 * RETURN : true: needed
9832 * false: no need
9833 *==========================================================================*/
9834bool QCamera3HardwareInterface::needRotationReprocess()
9835{
9836 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9837 // current rotation is not zero, and pp has the capability to process rotation
9838 LOGH("need do reprocess for rotation");
9839 return true;
9840 }
9841
9842 return false;
9843}
9844
9845/*===========================================================================
9846 * FUNCTION : needReprocess
9847 *
9848 * DESCRIPTION: if reprocess in needed
9849 *
9850 * PARAMETERS : none
9851 *
9852 * RETURN : true: needed
9853 * false: no need
9854 *==========================================================================*/
9855bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
9856{
9857 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
9858 // TODO: add for ZSL HDR later
9859 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9860 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9861 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
9862 return true;
9863 } else {
9864 LOGH("already post processed frame");
9865 return false;
9866 }
9867 }
9868 return needRotationReprocess();
9869}
9870
9871/*===========================================================================
9872 * FUNCTION : needJpegExifRotation
9873 *
9874 * DESCRIPTION: if rotation from jpeg is needed
9875 *
9876 * PARAMETERS : none
9877 *
9878 * RETURN : true: needed
9879 * false: no need
9880 *==========================================================================*/
9881bool QCamera3HardwareInterface::needJpegExifRotation()
9882{
9883 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9884 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9885 LOGD("Need use Jpeg EXIF Rotation");
9886 return true;
9887 }
9888 return false;
9889}
9890
9891/*===========================================================================
9892 * FUNCTION : addOfflineReprocChannel
9893 *
9894 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9895 * coming from input channel
9896 *
9897 * PARAMETERS :
9898 * @config : reprocess configuration
9899 * @inputChHandle : pointer to the input (source) channel
9900 *
9901 *
9902 * RETURN : Ptr to the newly created channel obj. NULL if failed.
9903 *==========================================================================*/
9904QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9905 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9906{
9907 int32_t rc = NO_ERROR;
9908 QCamera3ReprocessChannel *pChannel = NULL;
9909
9910 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9911 mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9912 CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9913 if (NULL == pChannel) {
9914 LOGE("no mem for reprocess channel");
9915 return NULL;
9916 }
9917
9918 rc = pChannel->initialize(IS_TYPE_NONE);
9919 if (rc != NO_ERROR) {
9920 LOGE("init reprocess channel failed, ret = %d", rc);
9921 delete pChannel;
9922 return NULL;
9923 }
9924
9925 // pp feature config
9926 cam_pp_feature_config_t pp_config;
9927 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9928
9929 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9930 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
9931 & CAM_QCOM_FEATURE_DSDN) {
9932 //Use CPP CDS incase h/w supports it.
9933 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
9934 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
9935 }
9936 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9937 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
9938 }
9939
9940 rc = pChannel->addReprocStreamsFromSource(pp_config,
9941 config,
9942 IS_TYPE_NONE,
9943 mMetadataChannel);
9944
9945 if (rc != NO_ERROR) {
9946 delete pChannel;
9947 return NULL;
9948 }
9949 return pChannel;
9950}
9951
9952/*===========================================================================
9953 * FUNCTION : getMobicatMask
9954 *
9955 * DESCRIPTION: returns mobicat mask
9956 *
9957 * PARAMETERS : none
9958 *
9959 * RETURN : mobicat mask
9960 *
9961 *==========================================================================*/
9962uint8_t QCamera3HardwareInterface::getMobicatMask()
9963{
9964 return m_MobicatMask;
9965}
9966
9967/*===========================================================================
9968 * FUNCTION : setMobicat
9969 *
9970 * DESCRIPTION: set Mobicat on/off.
9971 *
9972 * PARAMETERS :
9973 * @params : none
9974 *
9975 * RETURN : int32_t type of status
9976 * NO_ERROR -- success
9977 * none-zero failure code
9978 *==========================================================================*/
9979int32_t QCamera3HardwareInterface::setMobicat()
9980{
9981 char value [PROPERTY_VALUE_MAX];
9982 property_get("persist.camera.mobicat", value, "0");
9983 int32_t ret = NO_ERROR;
9984 uint8_t enableMobi = (uint8_t)atoi(value);
9985
9986 if (enableMobi) {
9987 tune_cmd_t tune_cmd;
9988 tune_cmd.type = SET_RELOAD_CHROMATIX;
9989 tune_cmd.module = MODULE_ALL;
9990 tune_cmd.value = TRUE;
9991 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9992 CAM_INTF_PARM_SET_VFE_COMMAND,
9993 tune_cmd);
9994
9995 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9996 CAM_INTF_PARM_SET_PP_COMMAND,
9997 tune_cmd);
9998 }
9999 m_MobicatMask = enableMobi;
10000
10001 return ret;
10002}
10003
10004/*===========================================================================
10005* FUNCTION : getLogLevel
10006*
10007* DESCRIPTION: Reads the log level property into a variable
10008*
10009* PARAMETERS :
10010* None
10011*
10012* RETURN :
10013* None
10014*==========================================================================*/
10015void QCamera3HardwareInterface::getLogLevel()
10016{
10017 char prop[PROPERTY_VALUE_MAX];
10018 uint32_t globalLogLevel = 0;
10019
10020 property_get("persist.camera.hal.debug", prop, "0");
10021 int val = atoi(prop);
10022 if (0 <= val) {
10023 gCamHal3LogLevel = (uint32_t)val;
10024 }
10025
10026 property_get("persist.camera.kpi.debug", prop, "1");
10027 gKpiDebugLevel = atoi(prop);
10028
10029 property_get("persist.camera.global.debug", prop, "0");
10030 val = atoi(prop);
10031 if (0 <= val) {
10032 globalLogLevel = (uint32_t)val;
10033 }
10034
10035 /* Highest log level among hal.logs and global.logs is selected */
10036 if (gCamHal3LogLevel < globalLogLevel)
10037 gCamHal3LogLevel = globalLogLevel;
10038
10039 return;
10040}
10041
10042/*===========================================================================
10043 * FUNCTION : validateStreamRotations
10044 *
10045 * DESCRIPTION: Check if the rotations requested are supported
10046 *
10047 * PARAMETERS :
10048 * @stream_list : streams to be configured
10049 *
10050 * RETURN : NO_ERROR on success
10051 * -EINVAL on failure
10052 *
10053 *==========================================================================*/
10054int QCamera3HardwareInterface::validateStreamRotations(
10055 camera3_stream_configuration_t *streamList)
10056{
10057 int rc = NO_ERROR;
10058
10059 /*
10060 * Loop through all streams requested in configuration
10061 * Check if unsupported rotations have been requested on any of them
10062 */
10063 for (size_t j = 0; j < streamList->num_streams; j++){
10064 camera3_stream_t *newStream = streamList->streams[j];
10065
10066 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10067 bool isImplDef = (newStream->format ==
10068 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10069 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10070 isImplDef);
10071
10072 if (isRotated && (!isImplDef || isZsl)) {
10073 LOGE("Error: Unsupported rotation of %d requested for stream"
10074 "type:%d and stream format:%d",
10075 newStream->rotation, newStream->stream_type,
10076 newStream->format);
10077 rc = -EINVAL;
10078 break;
10079 }
10080 }
10081
10082 return rc;
10083}
10084
10085/*===========================================================================
10086* FUNCTION : getFlashInfo
10087*
10088* DESCRIPTION: Retrieve information about whether the device has a flash.
10089*
10090* PARAMETERS :
10091* @cameraId : Camera id to query
10092* @hasFlash : Boolean indicating whether there is a flash device
10093* associated with given camera
10094* @flashNode : If a flash device exists, this will be its device node.
10095*
10096* RETURN :
10097* None
10098*==========================================================================*/
10099void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10100 bool& hasFlash,
10101 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10102{
10103 cam_capability_t* camCapability = gCamCapability[cameraId];
10104 if (NULL == camCapability) {
10105 hasFlash = false;
10106 flashNode[0] = '\0';
10107 } else {
10108 hasFlash = camCapability->flash_available;
10109 strlcpy(flashNode,
10110 (char*)camCapability->flash_dev_name,
10111 QCAMERA_MAX_FILEPATH_LENGTH);
10112 }
10113}
10114
10115/*===========================================================================
10116* FUNCTION : getEepromVersionInfo
10117*
10118* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10119*
10120* PARAMETERS : None
10121*
10122* RETURN : string describing EEPROM version
10123* "\0" if no such info available
10124*==========================================================================*/
10125const char *QCamera3HardwareInterface::getEepromVersionInfo()
10126{
10127 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10128}
10129
10130/*===========================================================================
10131* FUNCTION : getLdafCalib
10132*
10133* DESCRIPTION: Retrieve Laser AF calibration data
10134*
10135* PARAMETERS : None
10136*
10137* RETURN : Two uint32_t describing laser AF calibration data
10138* NULL if none is available.
10139*==========================================================================*/
10140const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10141{
10142 if (mLdafCalibExist) {
10143 return &mLdafCalib[0];
10144 } else {
10145 return NULL;
10146 }
10147}
10148
10149/*===========================================================================
10150 * FUNCTION : dynamicUpdateMetaStreamInfo
10151 *
10152 * DESCRIPTION: This function:
10153 * (1) stops all the channels
10154 * (2) returns error on pending requests and buffers
10155 * (3) sends metastream_info in setparams
10156 * (4) starts all channels
10157 * This is useful when sensor has to be restarted to apply any
10158 * settings such as frame rate from a different sensor mode
10159 *
10160 * PARAMETERS : None
10161 *
10162 * RETURN : NO_ERROR on success
10163 * Error codes on failure
10164 *
10165 *==========================================================================*/
10166int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10167{
10168 ATRACE_CALL();
10169 int rc = NO_ERROR;
10170
10171 LOGD("E");
10172
10173 rc = stopAllChannels();
10174 if (rc < 0) {
10175 LOGE("stopAllChannels failed");
10176 return rc;
10177 }
10178
10179 rc = notifyErrorForPendingRequests();
10180 if (rc < 0) {
10181 LOGE("notifyErrorForPendingRequests failed");
10182 return rc;
10183 }
10184
10185 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10186 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10187 "Format:%d",
10188 mStreamConfigInfo.type[i],
10189 mStreamConfigInfo.stream_sizes[i].width,
10190 mStreamConfigInfo.stream_sizes[i].height,
10191 mStreamConfigInfo.postprocess_mask[i],
10192 mStreamConfigInfo.format[i]);
10193 }
10194
10195 /* Send meta stream info once again so that ISP can start */
10196 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10197 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10198 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10199 mParameters);
10200 if (rc < 0) {
10201 LOGE("set Metastreaminfo failed. Sensor mode does not change");
10202 }
10203
10204 rc = startAllChannels();
10205 if (rc < 0) {
10206 LOGE("startAllChannels failed");
10207 return rc;
10208 }
10209
10210 LOGD("X");
10211 return rc;
10212}
10213
10214/*===========================================================================
10215 * FUNCTION : stopAllChannels
10216 *
10217 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10218 *
10219 * PARAMETERS : None
10220 *
10221 * RETURN : NO_ERROR on success
10222 * Error codes on failure
10223 *
10224 *==========================================================================*/
10225int32_t QCamera3HardwareInterface::stopAllChannels()
10226{
10227 int32_t rc = NO_ERROR;
10228
10229 LOGD("Stopping all channels");
10230 // Stop the Streams/Channels
10231 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10232 it != mStreamInfo.end(); it++) {
10233 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10234 if (channel) {
10235 channel->stop();
10236 }
10237 (*it)->status = INVALID;
10238 }
10239
10240 if (mSupportChannel) {
10241 mSupportChannel->stop();
10242 }
10243 if (mAnalysisChannel) {
10244 mAnalysisChannel->stop();
10245 }
10246 if (mRawDumpChannel) {
10247 mRawDumpChannel->stop();
10248 }
10249 if (mMetadataChannel) {
10250 /* If content of mStreamInfo is not 0, there is metadata stream */
10251 mMetadataChannel->stop();
10252 }
10253
10254 LOGD("All channels stopped");
10255 return rc;
10256}
10257
10258/*===========================================================================
10259 * FUNCTION : startAllChannels
10260 *
10261 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10262 *
10263 * PARAMETERS : None
10264 *
10265 * RETURN : NO_ERROR on success
10266 * Error codes on failure
10267 *
10268 *==========================================================================*/
10269int32_t QCamera3HardwareInterface::startAllChannels()
10270{
10271 int32_t rc = NO_ERROR;
10272
10273 LOGD("Start all channels ");
10274 // Start the Streams/Channels
10275 if (mMetadataChannel) {
10276 /* If content of mStreamInfo is not 0, there is metadata stream */
10277 rc = mMetadataChannel->start();
10278 if (rc < 0) {
10279 LOGE("META channel start failed");
10280 return rc;
10281 }
10282 }
10283 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10284 it != mStreamInfo.end(); it++) {
10285 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10286 if (channel) {
10287 rc = channel->start();
10288 if (rc < 0) {
10289 LOGE("channel start failed");
10290 return rc;
10291 }
10292 }
10293 }
10294 if (mAnalysisChannel) {
10295 mAnalysisChannel->start();
10296 }
10297 if (mSupportChannel) {
10298 rc = mSupportChannel->start();
10299 if (rc < 0) {
10300 LOGE("Support channel start failed");
10301 return rc;
10302 }
10303 }
10304 if (mRawDumpChannel) {
10305 rc = mRawDumpChannel->start();
10306 if (rc < 0) {
10307 LOGE("RAW dump channel start failed");
10308 return rc;
10309 }
10310 }
10311
10312 LOGD("All channels started");
10313 return rc;
10314}
10315
10316/*===========================================================================
10317 * FUNCTION : notifyErrorForPendingRequests
10318 *
10319 * DESCRIPTION: This function sends error for all the pending requests/buffers
10320 *
10321 * PARAMETERS : None
10322 *
10323 * RETURN : Error codes
10324 * NO_ERROR on success
10325 *
10326 *==========================================================================*/
10327int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10328{
10329 int32_t rc = NO_ERROR;
10330 unsigned int frameNum = 0;
10331 camera3_capture_result_t result;
10332 camera3_stream_buffer_t *pStream_Buf = NULL;
10333
10334 memset(&result, 0, sizeof(camera3_capture_result_t));
10335
10336 if (mPendingRequestsList.size() > 0) {
10337 pendingRequestIterator i = mPendingRequestsList.begin();
10338 frameNum = i->frame_number;
10339 } else {
10340 /* There might still be pending buffers even though there are
10341 no pending requests. Setting the frameNum to MAX so that
10342 all the buffers with smaller frame numbers are returned */
10343 frameNum = UINT_MAX;
10344 }
10345
10346 LOGH("Oldest frame num on mPendingRequestsList = %u",
10347 frameNum);
10348
10349 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10350 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10351
10352 if (req->frame_number < frameNum) {
10353 // Send Error notify to frameworks for each buffer for which
10354 // metadata buffer is already sent
10355 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10356 req->frame_number, req->mPendingBufferList.size());
10357
10358 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10359 if (NULL == pStream_Buf) {
10360 LOGE("No memory for pending buffers array");
10361 return NO_MEMORY;
10362 }
10363 memset(pStream_Buf, 0,
10364 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10365 result.result = NULL;
10366 result.frame_number = req->frame_number;
10367 result.num_output_buffers = req->mPendingBufferList.size();
10368 result.output_buffers = pStream_Buf;
10369
10370 size_t index = 0;
10371 for (auto info = req->mPendingBufferList.begin();
10372 info != req->mPendingBufferList.end(); ) {
10373
10374 camera3_notify_msg_t notify_msg;
10375 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10376 notify_msg.type = CAMERA3_MSG_ERROR;
10377 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10378 notify_msg.message.error.error_stream = info->stream;
10379 notify_msg.message.error.frame_number = req->frame_number;
10380 pStream_Buf[index].acquire_fence = -1;
10381 pStream_Buf[index].release_fence = -1;
10382 pStream_Buf[index].buffer = info->buffer;
10383 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10384 pStream_Buf[index].stream = info->stream;
10385 mCallbackOps->notify(mCallbackOps, &notify_msg);
10386 index++;
10387 // Remove buffer from list
10388 info = req->mPendingBufferList.erase(info);
10389 }
10390
10391 // Remove this request from Map
10392 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10393 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10394 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10395
10396 mCallbackOps->process_capture_result(mCallbackOps, &result);
10397
10398 delete [] pStream_Buf;
10399 } else {
10400
10401 // Go through the pending requests info and send error request to framework
10402 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10403
10404 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
10405
10406 // Send error notify to frameworks
10407 camera3_notify_msg_t notify_msg;
10408 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10409 notify_msg.type = CAMERA3_MSG_ERROR;
10410 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10411 notify_msg.message.error.error_stream = NULL;
10412 notify_msg.message.error.frame_number = req->frame_number;
10413 mCallbackOps->notify(mCallbackOps, &notify_msg);
10414
10415 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10416 if (NULL == pStream_Buf) {
10417 LOGE("No memory for pending buffers array");
10418 return NO_MEMORY;
10419 }
10420 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10421
10422 result.result = NULL;
10423 result.frame_number = req->frame_number;
10424 result.input_buffer = i->input_buffer;
10425 result.num_output_buffers = req->mPendingBufferList.size();
10426 result.output_buffers = pStream_Buf;
10427
10428 size_t index = 0;
10429 for (auto info = req->mPendingBufferList.begin();
10430 info != req->mPendingBufferList.end(); ) {
10431 pStream_Buf[index].acquire_fence = -1;
10432 pStream_Buf[index].release_fence = -1;
10433 pStream_Buf[index].buffer = info->buffer;
10434 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10435 pStream_Buf[index].stream = info->stream;
10436 index++;
10437 // Remove buffer from list
10438 info = req->mPendingBufferList.erase(info);
10439 }
10440
10441 // Remove this request from Map
10442 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10443 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10444 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10445
10446 mCallbackOps->process_capture_result(mCallbackOps, &result);
10447 delete [] pStream_Buf;
10448 i = erasePendingRequest(i);
10449 }
10450 }
10451
10452 /* Reset pending frame Drop list and requests list */
10453 mPendingFrameDropList.clear();
10454
10455 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10456 req.mPendingBufferList.clear();
10457 }
10458 mPendingBuffersMap.mPendingBuffersInRequest.clear();
10459 mPendingReprocessResultList.clear();
10460 LOGH("Cleared all the pending buffers ");
10461
10462 return rc;
10463}
10464
10465bool QCamera3HardwareInterface::isOnEncoder(
10466 const cam_dimension_t max_viewfinder_size,
10467 uint32_t width, uint32_t height)
10468{
10469 return (width > (uint32_t)max_viewfinder_size.width ||
10470 height > (uint32_t)max_viewfinder_size.height);
10471}
10472
10473/*===========================================================================
10474 * FUNCTION : setBundleInfo
10475 *
10476 * DESCRIPTION: Set bundle info for all streams that are bundle.
10477 *
10478 * PARAMETERS : None
10479 *
10480 * RETURN : NO_ERROR on success
10481 * Error codes on failure
10482 *==========================================================================*/
10483int32_t QCamera3HardwareInterface::setBundleInfo()
10484{
10485 int32_t rc = NO_ERROR;
10486
10487 if (mChannelHandle) {
10488 cam_bundle_config_t bundleInfo;
10489 memset(&bundleInfo, 0, sizeof(bundleInfo));
10490 rc = mCameraHandle->ops->get_bundle_info(
10491 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10492 if (rc != NO_ERROR) {
10493 LOGE("get_bundle_info failed");
10494 return rc;
10495 }
10496 if (mAnalysisChannel) {
10497 mAnalysisChannel->setBundleInfo(bundleInfo);
10498 }
10499 if (mSupportChannel) {
10500 mSupportChannel->setBundleInfo(bundleInfo);
10501 }
10502 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10503 it != mStreamInfo.end(); it++) {
10504 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10505 channel->setBundleInfo(bundleInfo);
10506 }
10507 if (mRawDumpChannel) {
10508 mRawDumpChannel->setBundleInfo(bundleInfo);
10509 }
10510 }
10511
10512 return rc;
10513}
10514
10515/*===========================================================================
10516 * FUNCTION : get_num_overall_buffers
10517 *
10518 * DESCRIPTION: Estimate number of pending buffers across all requests.
10519 *
10520 * PARAMETERS : None
10521 *
10522 * RETURN : Number of overall pending buffers
10523 *
10524 *==========================================================================*/
10525uint32_t PendingBuffersMap::get_num_overall_buffers()
10526{
10527 uint32_t sum_buffers = 0;
10528 for (auto &req : mPendingBuffersInRequest) {
10529 sum_buffers += req.mPendingBufferList.size();
10530 }
10531 return sum_buffers;
10532}
10533
10534/*===========================================================================
10535 * FUNCTION : removeBuf
10536 *
10537 * DESCRIPTION: Remove a matching buffer from tracker.
10538 *
10539 * PARAMETERS : @buffer: image buffer for the callback
10540 *
10541 * RETURN : None
10542 *
10543 *==========================================================================*/
10544void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10545{
10546 bool buffer_found = false;
10547 for (auto req = mPendingBuffersInRequest.begin();
10548 req != mPendingBuffersInRequest.end(); req++) {
10549 for (auto k = req->mPendingBufferList.begin();
10550 k != req->mPendingBufferList.end(); k++ ) {
10551 if (k->buffer == buffer) {
10552 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10553 req->frame_number, buffer);
10554 k = req->mPendingBufferList.erase(k);
10555 if (req->mPendingBufferList.empty()) {
10556 // Remove this request from Map
10557 req = mPendingBuffersInRequest.erase(req);
10558 }
10559 buffer_found = true;
10560 break;
10561 }
10562 }
10563 if (buffer_found) {
10564 break;
10565 }
10566 }
10567 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10568 get_num_overall_buffers());
10569}
10570
10571/*===========================================================================
10572 * FUNCTION : setPAAFSupport
10573 *
10574 * DESCRIPTION: Set the preview-assisted auto focus support bit in
10575 * feature mask according to stream type and filter
10576 * arrangement
10577 *
10578 * PARAMETERS : @feature_mask: current feature mask, which may be modified
10579 * @stream_type: stream type
10580 * @filter_arrangement: filter arrangement
10581 *
10582 * RETURN : None
10583 *==========================================================================*/
10584void QCamera3HardwareInterface::setPAAFSupport(
10585 cam_feature_mask_t& feature_mask,
10586 cam_stream_type_t stream_type,
10587 cam_color_filter_arrangement_t filter_arrangement)
10588{
10589 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
10590 feature_mask, stream_type, filter_arrangement);
10591
10592 switch (filter_arrangement) {
10593 case CAM_FILTER_ARRANGEMENT_RGGB:
10594 case CAM_FILTER_ARRANGEMENT_GRBG:
10595 case CAM_FILTER_ARRANGEMENT_GBRG:
10596 case CAM_FILTER_ARRANGEMENT_BGGR:
10597 if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
10598 (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
10599 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
10600 feature_mask |= CAM_QCOM_FEATURE_PAAF;
10601 }
10602 break;
10603 case CAM_FILTER_ARRANGEMENT_Y:
10604 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
10605 feature_mask |= CAM_QCOM_FEATURE_PAAF;
10606 }
10607 break;
10608 default:
10609 break;
10610 }
10611}
10612
10613/*===========================================================================
10614* FUNCTION : getSensorMountAngle
10615*
10616* DESCRIPTION: Retrieve sensor mount angle
10617*
10618* PARAMETERS : None
10619*
10620* RETURN : sensor mount angle in uint32_t
10621*==========================================================================*/
10622uint32_t QCamera3HardwareInterface::getSensorMountAngle()
10623{
10624 return gCamCapability[mCameraId]->sensor_mount_angle;
10625}
10626
10627/*===========================================================================
10628* FUNCTION : getRelatedCalibrationData
10629*
10630* DESCRIPTION: Retrieve related system calibration data
10631*
10632* PARAMETERS : None
10633*
10634* RETURN : Pointer of related system calibration data
10635*==========================================================================*/
10636const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
10637{
10638 return (const cam_related_system_calibration_data_t *)
10639 &(gCamCapability[mCameraId]->related_cam_calibration);
10640}
10641}; //end namespace qcamera