blob: 9892265b06519fbb188ac5b0156a5211642e9e5d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080047#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070048
49// Display dependencies
50#include "qdMetaData.h"
51
52// Camera dependencies
53#include "android/QCamera3External.h"
54#include "util/QCameraFlash.h"
55#include "QCamera3HWI.h"
56#include "QCamera3VendorTags.h"
57#include "QCameraTrace.h"
58
59extern "C" {
60#include "mm_camera_dbg.h"
61}
62
63using namespace android;
64
65namespace qcamera {
66
67#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
68
69#define EMPTY_PIPELINE_DELAY 2
70#define PARTIAL_RESULT_COUNT 2
71#define FRAME_SKIP_DELAY 0
72
73#define MAX_VALUE_8BIT ((1<<8)-1)
74#define MAX_VALUE_10BIT ((1<<10)-1)
75#define MAX_VALUE_12BIT ((1<<12)-1)
76
77#define VIDEO_4K_WIDTH 3840
78#define VIDEO_4K_HEIGHT 2160
79
80#define MAX_EIS_WIDTH 1920
81#define MAX_EIS_HEIGHT 1080
82
83#define MAX_RAW_STREAMS 1
84#define MAX_STALLING_STREAMS 1
85#define MAX_PROCESSED_STREAMS 3
86/* Batch mode is enabled only if FPS set is equal to or greater than this */
87#define MIN_FPS_FOR_BATCH_MODE (120)
88#define PREVIEW_FPS_FOR_HFR (30)
89#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080090#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070091#define MAX_HFR_BATCH_SIZE (8)
92#define REGIONS_TUPLE_COUNT 5
93#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070094// Set a threshold for detection of missing buffers //seconds
95#define MISSING_REQUEST_BUF_TIMEOUT 3
96#define FLUSH_TIMEOUT 3
97#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
98
99#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
100 CAM_QCOM_FEATURE_CROP |\
101 CAM_QCOM_FEATURE_ROTATION |\
102 CAM_QCOM_FEATURE_SHARPNESS |\
103 CAM_QCOM_FEATURE_SCALE |\
104 CAM_QCOM_FEATURE_CAC |\
105 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700106/* Per configuration size for static metadata length*/
107#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700108
109#define TIMEOUT_NEVER -1
110
Thierry Strudel04e026f2016-10-10 11:27:36 -0700111/* Face landmarks indices */
112#define LEFT_EYE_X 0
113#define LEFT_EYE_Y 1
114#define RIGHT_EYE_X 2
115#define RIGHT_EYE_Y 3
116#define MOUTH_X 4
117#define MOUTH_Y 5
118#define TOTAL_LANDMARK_INDICES 6
119
Thierry Strudel3d639192016-09-09 11:52:26 -0700120cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
121const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
122extern pthread_mutex_t gCamLock;
123volatile uint32_t gCamHal3LogLevel = 1;
124extern uint8_t gNumCameraSessions;
125
126const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
127 {"On", CAM_CDS_MODE_ON},
128 {"Off", CAM_CDS_MODE_OFF},
129 {"Auto",CAM_CDS_MODE_AUTO}
130};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700131const QCamera3HardwareInterface::QCameraMap<
132 camera_metadata_enum_android_video_hdr_mode_t,
133 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
134 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
135 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
136};
137
138
139const QCamera3HardwareInterface::QCameraMap<
140 camera_metadata_enum_android_ir_mode_t,
141 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
142 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
143 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
144 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
145};
Thierry Strudel3d639192016-09-09 11:52:26 -0700146
147const QCamera3HardwareInterface::QCameraMap<
148 camera_metadata_enum_android_control_effect_mode_t,
149 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
150 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
151 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
152 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
153 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
154 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
155 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
156 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
157 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
158 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
159};
160
161const QCamera3HardwareInterface::QCameraMap<
162 camera_metadata_enum_android_control_awb_mode_t,
163 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
164 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
165 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
166 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
167 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
168 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
169 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
170 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
171 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
172 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
173};
174
175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_control_scene_mode_t,
177 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
178 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
179 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
180 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
181 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
182 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
183 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
184 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
185 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
186 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
187 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
188 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
189 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
190 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
191 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
192 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
193 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
194};
195
196const QCamera3HardwareInterface::QCameraMap<
197 camera_metadata_enum_android_control_af_mode_t,
198 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
199 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
200 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
201 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
202 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
203 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
204 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
205 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_color_correction_aberration_mode_t,
210 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
211 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
212 CAM_COLOR_CORRECTION_ABERRATION_OFF },
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
214 CAM_COLOR_CORRECTION_ABERRATION_FAST },
215 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
216 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_ae_antibanding_mode_t,
221 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
222 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229 camera_metadata_enum_android_control_ae_mode_t,
230 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
231 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
232 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
233 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
234 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
235 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_flash_mode_t,
240 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
241 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
242 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
243 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_statistics_face_detect_mode_t,
248 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
249 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
250 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
252};
253
254const QCamera3HardwareInterface::QCameraMap<
255 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
256 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
257 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
258 CAM_FOCUS_UNCALIBRATED },
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
260 CAM_FOCUS_APPROXIMATE },
261 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
262 CAM_FOCUS_CALIBRATED }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_lens_state_t,
267 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
268 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
269 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
270};
271
272const int32_t available_thumbnail_sizes[] = {0, 0,
273 176, 144,
274 240, 144,
275 256, 144,
276 240, 160,
277 256, 154,
278 240, 240,
279 320, 240};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_sensor_test_pattern_mode_t,
283 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
284 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
290};
291
292/* Since there is no mapping for all the options some Android enum are not listed.
293 * Also, the order in this list is important because while mapping from HAL to Android it will
294 * traverse from lower to higher index which means that for HAL values that are map to different
295 * Android values, the traverse logic will select the first one found.
296 */
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_sensor_reference_illuminant1_t,
299 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
300 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
316};
317
318const QCamera3HardwareInterface::QCameraMap<
319 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
320 { 60, CAM_HFR_MODE_60FPS},
321 { 90, CAM_HFR_MODE_90FPS},
322 { 120, CAM_HFR_MODE_120FPS},
323 { 150, CAM_HFR_MODE_150FPS},
324 { 180, CAM_HFR_MODE_180FPS},
325 { 210, CAM_HFR_MODE_210FPS},
326 { 240, CAM_HFR_MODE_240FPS},
327 { 480, CAM_HFR_MODE_480FPS},
328};
329
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700330const QCamera3HardwareInterface::QCameraMap<
331 qcamera3_ext_instant_aec_mode_t,
332 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
333 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
334 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
335 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
336};
Thierry Strudel3d639192016-09-09 11:52:26 -0700337camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
338 .initialize = QCamera3HardwareInterface::initialize,
339 .configure_streams = QCamera3HardwareInterface::configure_streams,
340 .register_stream_buffers = NULL,
341 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
342 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
343 .get_metadata_vendor_tag_ops = NULL,
344 .dump = QCamera3HardwareInterface::dump,
345 .flush = QCamera3HardwareInterface::flush,
346 .reserved = {0},
347};
348
349// initialise to some default value
350uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
351
352/*===========================================================================
353 * FUNCTION : QCamera3HardwareInterface
354 *
355 * DESCRIPTION: constructor of QCamera3HardwareInterface
356 *
357 * PARAMETERS :
358 * @cameraId : camera ID
359 *
360 * RETURN : none
361 *==========================================================================*/
362QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
363 const camera_module_callbacks_t *callbacks)
364 : mCameraId(cameraId),
365 mCameraHandle(NULL),
366 mCameraInitialized(false),
367 mCallbackOps(NULL),
368 mMetadataChannel(NULL),
369 mPictureChannel(NULL),
370 mRawChannel(NULL),
371 mSupportChannel(NULL),
372 mAnalysisChannel(NULL),
373 mRawDumpChannel(NULL),
374 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800375 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700376 mCommon(),
377 mChannelHandle(0),
378 mFirstConfiguration(true),
379 mFlush(false),
380 mFlushPerf(false),
381 mParamHeap(NULL),
382 mParameters(NULL),
383 mPrevParameters(NULL),
384 m_bIsVideo(false),
385 m_bIs4KVideo(false),
386 m_bEisSupportedSize(false),
387 m_bEisEnable(false),
388 m_MobicatMask(0),
389 mMinProcessedFrameDuration(0),
390 mMinJpegFrameDuration(0),
391 mMinRawFrameDuration(0),
392 mMetaFrameCount(0U),
393 mUpdateDebugLevel(false),
394 mCallbacks(callbacks),
395 mCaptureIntent(0),
396 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800397 /* DevCamDebug metadata internal m control*/
398 mDevCamDebugMetaEnable(0),
399 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700400 mBatchSize(0),
401 mToBeQueuedVidBufs(0),
402 mHFRVideoFps(DEFAULT_VIDEO_FPS),
403 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
404 mFirstFrameNumberInBatch(0),
405 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800406 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700407 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
408 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700409 mInstantAEC(false),
410 mResetInstantAEC(false),
411 mInstantAECSettledFrameNumber(0),
412 mAecSkipDisplayFrameBound(0),
413 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700414 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mLastCustIntentFrmNum(-1),
416 mState(CLOSED),
417 mIsDeviceLinked(false),
418 mIsMainCamera(true),
419 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700420 m_pDualCamCmdHeap(NULL),
421 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700422{
423 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700424 mCommon.init(gCamCapability[cameraId]);
425 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700426#ifndef USE_HAL_3_3
427 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
428#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700429 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700430#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700431 mCameraDevice.common.close = close_camera_device;
432 mCameraDevice.ops = &mCameraOps;
433 mCameraDevice.priv = this;
434 gCamCapability[cameraId]->version = CAM_HAL_V3;
435 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
436 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
437 gCamCapability[cameraId]->min_num_pp_bufs = 3;
438
439 pthread_cond_init(&mBuffersCond, NULL);
440
441 pthread_cond_init(&mRequestCond, NULL);
442 mPendingLiveRequest = 0;
443 mCurrentRequestId = -1;
444 pthread_mutex_init(&mMutex, NULL);
445
446 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
447 mDefaultMetadata[i] = NULL;
448
449 // Getting system props of different kinds
450 char prop[PROPERTY_VALUE_MAX];
451 memset(prop, 0, sizeof(prop));
452 property_get("persist.camera.raw.dump", prop, "0");
453 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800454 property_get("persist.camera.hal3.force.hdr", prop, "0");
455 mForceHdrSnapshot = atoi(prop);
456
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 if (mEnableRawDump)
458 LOGD("Raw dump from Camera HAL enabled");
459
460 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
461 memset(mLdafCalib, 0, sizeof(mLdafCalib));
462
463 memset(prop, 0, sizeof(prop));
464 property_get("persist.camera.tnr.preview", prop, "0");
465 m_bTnrPreview = (uint8_t)atoi(prop);
466
467 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800468 property_get("persist.camera.swtnr.preview", prop, "1");
469 m_bSwTnrPreview = (uint8_t)atoi(prop);
470
471 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 property_get("persist.camera.tnr.video", prop, "0");
473 m_bTnrVideo = (uint8_t)atoi(prop);
474
475 memset(prop, 0, sizeof(prop));
476 property_get("persist.camera.avtimer.debug", prop, "0");
477 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800478 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700479
480 //Load and read GPU library.
481 lib_surface_utils = NULL;
482 LINK_get_surface_pixel_alignment = NULL;
483 mSurfaceStridePadding = CAM_PAD_TO_32;
484 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
485 if (lib_surface_utils) {
486 *(void **)&LINK_get_surface_pixel_alignment =
487 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
488 if (LINK_get_surface_pixel_alignment) {
489 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
490 }
491 dlclose(lib_surface_utils);
492 }
493}
494
495/*===========================================================================
496 * FUNCTION : ~QCamera3HardwareInterface
497 *
498 * DESCRIPTION: destructor of QCamera3HardwareInterface
499 *
500 * PARAMETERS : none
501 *
502 * RETURN : none
503 *==========================================================================*/
504QCamera3HardwareInterface::~QCamera3HardwareInterface()
505{
506 LOGD("E");
507
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800508 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700509
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800510 // Disable power hint and enable the perf lock for close camera
511 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
512 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
513
514 // unlink of dualcam during close camera
515 if (mIsDeviceLinked) {
516 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
517 &m_pDualCamCmdPtr->bundle_info;
518 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
519 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
520 pthread_mutex_lock(&gCamLock);
521
522 if (mIsMainCamera == 1) {
523 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
524 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
525 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
526 // related session id should be session id of linked session
527 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
528 } else {
529 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
530 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
531 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
532 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
533 }
534 pthread_mutex_unlock(&gCamLock);
535
536 rc = mCameraHandle->ops->set_dual_cam_cmd(
537 mCameraHandle->camera_handle);
538 if (rc < 0) {
539 LOGE("Dualcam: Unlink failed, but still proceed to close");
540 }
541 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
543 /* We need to stop all streams before deleting any stream */
544 if (mRawDumpChannel) {
545 mRawDumpChannel->stop();
546 }
547
548 // NOTE: 'camera3_stream_t *' objects are already freed at
549 // this stage by the framework
550 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
551 it != mStreamInfo.end(); it++) {
552 QCamera3ProcessingChannel *channel = (*it)->channel;
553 if (channel) {
554 channel->stop();
555 }
556 }
557 if (mSupportChannel)
558 mSupportChannel->stop();
559
560 if (mAnalysisChannel) {
561 mAnalysisChannel->stop();
562 }
563 if (mMetadataChannel) {
564 mMetadataChannel->stop();
565 }
566 if (mChannelHandle) {
567 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
568 mChannelHandle);
569 LOGD("stopping channel %d", mChannelHandle);
570 }
571
572 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
573 it != mStreamInfo.end(); it++) {
574 QCamera3ProcessingChannel *channel = (*it)->channel;
575 if (channel)
576 delete channel;
577 free (*it);
578 }
579 if (mSupportChannel) {
580 delete mSupportChannel;
581 mSupportChannel = NULL;
582 }
583
584 if (mAnalysisChannel) {
585 delete mAnalysisChannel;
586 mAnalysisChannel = NULL;
587 }
588 if (mRawDumpChannel) {
589 delete mRawDumpChannel;
590 mRawDumpChannel = NULL;
591 }
592 if (mDummyBatchChannel) {
593 delete mDummyBatchChannel;
594 mDummyBatchChannel = NULL;
595 }
596
597 mPictureChannel = NULL;
598
599 if (mMetadataChannel) {
600 delete mMetadataChannel;
601 mMetadataChannel = NULL;
602 }
603
604 /* Clean up all channels */
605 if (mCameraInitialized) {
606 if(!mFirstConfiguration){
607 //send the last unconfigure
608 cam_stream_size_info_t stream_config_info;
609 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
610 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
611 stream_config_info.buffer_info.max_buffers =
612 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700613 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700614 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
615 stream_config_info);
616 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
617 if (rc < 0) {
618 LOGE("set_parms failed for unconfigure");
619 }
620 }
621 deinitParameters();
622 }
623
624 if (mChannelHandle) {
625 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
626 mChannelHandle);
627 LOGH("deleting channel %d", mChannelHandle);
628 mChannelHandle = 0;
629 }
630
631 if (mState != CLOSED)
632 closeCamera();
633
634 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
635 req.mPendingBufferList.clear();
636 }
637 mPendingBuffersMap.mPendingBuffersInRequest.clear();
638 mPendingReprocessResultList.clear();
639 for (pendingRequestIterator i = mPendingRequestsList.begin();
640 i != mPendingRequestsList.end();) {
641 i = erasePendingRequest(i);
642 }
643 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
644 if (mDefaultMetadata[i])
645 free_camera_metadata(mDefaultMetadata[i]);
646
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800647 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700648
649 pthread_cond_destroy(&mRequestCond);
650
651 pthread_cond_destroy(&mBuffersCond);
652
653 pthread_mutex_destroy(&mMutex);
654 LOGD("X");
655}
656
657/*===========================================================================
658 * FUNCTION : erasePendingRequest
659 *
660 * DESCRIPTION: function to erase a desired pending request after freeing any
661 * allocated memory
662 *
663 * PARAMETERS :
664 * @i : iterator pointing to pending request to be erased
665 *
666 * RETURN : iterator pointing to the next request
667 *==========================================================================*/
668QCamera3HardwareInterface::pendingRequestIterator
669 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
670{
671 if (i->input_buffer != NULL) {
672 free(i->input_buffer);
673 i->input_buffer = NULL;
674 }
675 if (i->settings != NULL)
676 free_camera_metadata((camera_metadata_t*)i->settings);
677 return mPendingRequestsList.erase(i);
678}
679
680/*===========================================================================
681 * FUNCTION : camEvtHandle
682 *
683 * DESCRIPTION: Function registered to mm-camera-interface to handle events
684 *
685 * PARAMETERS :
686 * @camera_handle : interface layer camera handle
687 * @evt : ptr to event
688 * @user_data : user data ptr
689 *
690 * RETURN : none
691 *==========================================================================*/
692void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
693 mm_camera_event_t *evt,
694 void *user_data)
695{
696 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
697 if (obj && evt) {
698 switch(evt->server_event_type) {
699 case CAM_EVENT_TYPE_DAEMON_DIED:
700 pthread_mutex_lock(&obj->mMutex);
701 obj->mState = ERROR;
702 pthread_mutex_unlock(&obj->mMutex);
703 LOGE("Fatal, camera daemon died");
704 break;
705
706 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
707 LOGD("HAL got request pull from Daemon");
708 pthread_mutex_lock(&obj->mMutex);
709 obj->mWokenUpByDaemon = true;
710 obj->unblockRequestIfNecessary();
711 pthread_mutex_unlock(&obj->mMutex);
712 break;
713
714 default:
715 LOGW("Warning: Unhandled event %d",
716 evt->server_event_type);
717 break;
718 }
719 } else {
720 LOGE("NULL user_data/evt");
721 }
722}
723
724/*===========================================================================
725 * FUNCTION : openCamera
726 *
727 * DESCRIPTION: open camera
728 *
729 * PARAMETERS :
730 * @hw_device : double ptr for camera device struct
731 *
732 * RETURN : int32_t type of status
733 * NO_ERROR -- success
734 * none-zero failure code
735 *==========================================================================*/
736int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
737{
738 int rc = 0;
739 if (mState != CLOSED) {
740 *hw_device = NULL;
741 return PERMISSION_DENIED;
742 }
743
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800744 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700745 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
746 mCameraId);
747
748 rc = openCamera();
749 if (rc == 0) {
750 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800751 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700752 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800753 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700754
Thierry Strudel3d639192016-09-09 11:52:26 -0700755 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
756 mCameraId, rc);
757
758 if (rc == NO_ERROR) {
759 mState = OPENED;
760 }
761 return rc;
762}
763
764/*===========================================================================
765 * FUNCTION : openCamera
766 *
767 * DESCRIPTION: open camera
768 *
769 * PARAMETERS : none
770 *
771 * RETURN : int32_t type of status
772 * NO_ERROR -- success
773 * none-zero failure code
774 *==========================================================================*/
775int QCamera3HardwareInterface::openCamera()
776{
777 int rc = 0;
778 char value[PROPERTY_VALUE_MAX];
779
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800780 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700781 if (mCameraHandle) {
782 LOGE("Failure: Camera already opened");
783 return ALREADY_EXISTS;
784 }
785
786 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
787 if (rc < 0) {
788 LOGE("Failed to reserve flash for camera id: %d",
789 mCameraId);
790 return UNKNOWN_ERROR;
791 }
792
793 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
794 if (rc) {
795 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
796 return rc;
797 }
798
799 if (!mCameraHandle) {
800 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
801 return -ENODEV;
802 }
803
804 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
805 camEvtHandle, (void *)this);
806
807 if (rc < 0) {
808 LOGE("Error, failed to register event callback");
809 /* Not closing camera here since it is already handled in destructor */
810 return FAILED_TRANSACTION;
811 }
812
813 mExifParams.debug_params =
814 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
815 if (mExifParams.debug_params) {
816 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
817 } else {
818 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
819 return NO_MEMORY;
820 }
821 mFirstConfiguration = true;
822
823 //Notify display HAL that a camera session is active.
824 //But avoid calling the same during bootup because camera service might open/close
825 //cameras at boot time during its initialization and display service will also internally
826 //wait for camera service to initialize first while calling this display API, resulting in a
827 //deadlock situation. Since boot time camera open/close calls are made only to fetch
828 //capabilities, no need of this display bw optimization.
829 //Use "service.bootanim.exit" property to know boot status.
830 property_get("service.bootanim.exit", value, "0");
831 if (atoi(value) == 1) {
832 pthread_mutex_lock(&gCamLock);
833 if (gNumCameraSessions++ == 0) {
834 setCameraLaunchStatus(true);
835 }
836 pthread_mutex_unlock(&gCamLock);
837 }
838
839 //fill the session id needed while linking dual cam
840 pthread_mutex_lock(&gCamLock);
841 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
842 &sessionId[mCameraId]);
843 pthread_mutex_unlock(&gCamLock);
844
845 if (rc < 0) {
846 LOGE("Error, failed to get sessiion id");
847 return UNKNOWN_ERROR;
848 } else {
849 //Allocate related cam sync buffer
850 //this is needed for the payload that goes along with bundling cmd for related
851 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700852 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
853 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700854 if(rc != OK) {
855 rc = NO_MEMORY;
856 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
857 return NO_MEMORY;
858 }
859
860 //Map memory for related cam sync buffer
861 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700862 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
863 m_pDualCamCmdHeap->getFd(0),
864 sizeof(cam_dual_camera_cmd_info_t),
865 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 if(rc < 0) {
867 LOGE("Dualcam: failed to map Related cam sync buffer");
868 rc = FAILED_TRANSACTION;
869 return NO_MEMORY;
870 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700871 m_pDualCamCmdPtr =
872 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 }
874
875 LOGH("mCameraId=%d",mCameraId);
876
877 return NO_ERROR;
878}
879
880/*===========================================================================
881 * FUNCTION : closeCamera
882 *
883 * DESCRIPTION: close camera
884 *
885 * PARAMETERS : none
886 *
887 * RETURN : int32_t type of status
888 * NO_ERROR -- success
889 * none-zero failure code
890 *==========================================================================*/
891int QCamera3HardwareInterface::closeCamera()
892{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 int rc = NO_ERROR;
895 char value[PROPERTY_VALUE_MAX];
896
897 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
898 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700899
900 // unmap memory for related cam sync buffer
901 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800902 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700903 if (NULL != m_pDualCamCmdHeap) {
904 m_pDualCamCmdHeap->deallocate();
905 delete m_pDualCamCmdHeap;
906 m_pDualCamCmdHeap = NULL;
907 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
911 mCameraHandle = NULL;
912
913 //reset session id to some invalid id
914 pthread_mutex_lock(&gCamLock);
915 sessionId[mCameraId] = 0xDEADBEEF;
916 pthread_mutex_unlock(&gCamLock);
917
918 //Notify display HAL that there is no active camera session
919 //but avoid calling the same during bootup. Refer to openCamera
920 //for more details.
921 property_get("service.bootanim.exit", value, "0");
922 if (atoi(value) == 1) {
923 pthread_mutex_lock(&gCamLock);
924 if (--gNumCameraSessions == 0) {
925 setCameraLaunchStatus(false);
926 }
927 pthread_mutex_unlock(&gCamLock);
928 }
929
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 if (mExifParams.debug_params) {
931 free(mExifParams.debug_params);
932 mExifParams.debug_params = NULL;
933 }
934 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
935 LOGW("Failed to release flash for camera id: %d",
936 mCameraId);
937 }
938 mState = CLOSED;
939 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
940 mCameraId, rc);
941 return rc;
942}
943
944/*===========================================================================
945 * FUNCTION : initialize
946 *
947 * DESCRIPTION: Initialize frameworks callback functions
948 *
949 * PARAMETERS :
950 * @callback_ops : callback function to frameworks
951 *
952 * RETURN :
953 *
954 *==========================================================================*/
955int QCamera3HardwareInterface::initialize(
956 const struct camera3_callback_ops *callback_ops)
957{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800958 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700959 int rc;
960
961 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
962 pthread_mutex_lock(&mMutex);
963
964 // Validate current state
965 switch (mState) {
966 case OPENED:
967 /* valid state */
968 break;
969 default:
970 LOGE("Invalid state %d", mState);
971 rc = -ENODEV;
972 goto err1;
973 }
974
975 rc = initParameters();
976 if (rc < 0) {
977 LOGE("initParamters failed %d", rc);
978 goto err1;
979 }
980 mCallbackOps = callback_ops;
981
982 mChannelHandle = mCameraHandle->ops->add_channel(
983 mCameraHandle->camera_handle, NULL, NULL, this);
984 if (mChannelHandle == 0) {
985 LOGE("add_channel failed");
986 rc = -ENOMEM;
987 pthread_mutex_unlock(&mMutex);
988 return rc;
989 }
990
991 pthread_mutex_unlock(&mMutex);
992 mCameraInitialized = true;
993 mState = INITIALIZED;
994 LOGI("X");
995 return 0;
996
997err1:
998 pthread_mutex_unlock(&mMutex);
999 return rc;
1000}
1001
1002/*===========================================================================
1003 * FUNCTION : validateStreamDimensions
1004 *
1005 * DESCRIPTION: Check if the configuration requested are those advertised
1006 *
1007 * PARAMETERS :
1008 * @stream_list : streams to be configured
1009 *
1010 * RETURN :
1011 *
1012 *==========================================================================*/
1013int QCamera3HardwareInterface::validateStreamDimensions(
1014 camera3_stream_configuration_t *streamList)
1015{
1016 int rc = NO_ERROR;
1017 size_t count = 0;
1018
1019 camera3_stream_t *inputStream = NULL;
1020 /*
1021 * Loop through all streams to find input stream if it exists*
1022 */
1023 for (size_t i = 0; i< streamList->num_streams; i++) {
1024 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1025 if (inputStream != NULL) {
1026 LOGE("Error, Multiple input streams requested");
1027 return -EINVAL;
1028 }
1029 inputStream = streamList->streams[i];
1030 }
1031 }
1032 /*
1033 * Loop through all streams requested in configuration
1034 * Check if unsupported sizes have been requested on any of them
1035 */
1036 for (size_t j = 0; j < streamList->num_streams; j++) {
1037 bool sizeFound = false;
1038 camera3_stream_t *newStream = streamList->streams[j];
1039
1040 uint32_t rotatedHeight = newStream->height;
1041 uint32_t rotatedWidth = newStream->width;
1042 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1043 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1044 rotatedHeight = newStream->width;
1045 rotatedWidth = newStream->height;
1046 }
1047
1048 /*
1049 * Sizes are different for each type of stream format check against
1050 * appropriate table.
1051 */
1052 switch (newStream->format) {
1053 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1054 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1055 case HAL_PIXEL_FORMAT_RAW10:
1056 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1057 for (size_t i = 0; i < count; i++) {
1058 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1059 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1060 sizeFound = true;
1061 break;
1062 }
1063 }
1064 break;
1065 case HAL_PIXEL_FORMAT_BLOB:
1066 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1067 /* Verify set size against generated sizes table */
1068 for (size_t i = 0; i < count; i++) {
1069 if (((int32_t)rotatedWidth ==
1070 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1071 ((int32_t)rotatedHeight ==
1072 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1073 sizeFound = true;
1074 break;
1075 }
1076 }
1077 break;
1078 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1079 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1080 default:
1081 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1082 || newStream->stream_type == CAMERA3_STREAM_INPUT
1083 || IS_USAGE_ZSL(newStream->usage)) {
1084 if (((int32_t)rotatedWidth ==
1085 gCamCapability[mCameraId]->active_array_size.width) &&
1086 ((int32_t)rotatedHeight ==
1087 gCamCapability[mCameraId]->active_array_size.height)) {
1088 sizeFound = true;
1089 break;
1090 }
1091 /* We could potentially break here to enforce ZSL stream
1092 * set from frameworks always is full active array size
1093 * but it is not clear from the spc if framework will always
1094 * follow that, also we have logic to override to full array
1095 * size, so keeping the logic lenient at the moment
1096 */
1097 }
1098 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1099 MAX_SIZES_CNT);
1100 for (size_t i = 0; i < count; i++) {
1101 if (((int32_t)rotatedWidth ==
1102 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1103 ((int32_t)rotatedHeight ==
1104 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1105 sizeFound = true;
1106 break;
1107 }
1108 }
1109 break;
1110 } /* End of switch(newStream->format) */
1111
1112 /* We error out even if a single stream has unsupported size set */
1113 if (!sizeFound) {
1114 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1115 rotatedWidth, rotatedHeight, newStream->format,
1116 gCamCapability[mCameraId]->active_array_size.width,
1117 gCamCapability[mCameraId]->active_array_size.height);
1118 rc = -EINVAL;
1119 break;
1120 }
1121 } /* End of for each stream */
1122 return rc;
1123}
1124
1125/*==============================================================================
1126 * FUNCTION : isSupportChannelNeeded
1127 *
1128 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1129 *
1130 * PARAMETERS :
1131 * @stream_list : streams to be configured
1132 * @stream_config_info : the config info for streams to be configured
1133 *
1134 * RETURN : Boolen true/false decision
1135 *
1136 *==========================================================================*/
1137bool QCamera3HardwareInterface::isSupportChannelNeeded(
1138 camera3_stream_configuration_t *streamList,
1139 cam_stream_size_info_t stream_config_info)
1140{
1141 uint32_t i;
1142 bool pprocRequested = false;
1143 /* Check for conditions where PProc pipeline does not have any streams*/
1144 for (i = 0; i < stream_config_info.num_streams; i++) {
1145 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1146 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1147 pprocRequested = true;
1148 break;
1149 }
1150 }
1151
1152 if (pprocRequested == false )
1153 return true;
1154
1155 /* Dummy stream needed if only raw or jpeg streams present */
1156 for (i = 0; i < streamList->num_streams; i++) {
1157 switch(streamList->streams[i]->format) {
1158 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1159 case HAL_PIXEL_FORMAT_RAW10:
1160 case HAL_PIXEL_FORMAT_RAW16:
1161 case HAL_PIXEL_FORMAT_BLOB:
1162 break;
1163 default:
1164 return false;
1165 }
1166 }
1167 return true;
1168}
1169
1170/*==============================================================================
1171 * FUNCTION : getSensorOutputSize
1172 *
1173 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1174 *
1175 * PARAMETERS :
1176 * @sensor_dim : sensor output dimension (output)
1177 *
1178 * RETURN : int32_t type of status
1179 * NO_ERROR -- success
1180 * none-zero failure code
1181 *
1182 *==========================================================================*/
1183int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1184{
1185 int32_t rc = NO_ERROR;
1186
1187 cam_dimension_t max_dim = {0, 0};
1188 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1189 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1190 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1191 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1192 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1193 }
1194
1195 clear_metadata_buffer(mParameters);
1196
1197 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1198 max_dim);
1199 if (rc != NO_ERROR) {
1200 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1201 return rc;
1202 }
1203
1204 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1205 if (rc != NO_ERROR) {
1206 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1207 return rc;
1208 }
1209
1210 clear_metadata_buffer(mParameters);
1211 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1212
1213 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1214 mParameters);
1215 if (rc != NO_ERROR) {
1216 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1217 return rc;
1218 }
1219
1220 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1221 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1222
1223 return rc;
1224}
1225
1226/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001227 * FUNCTION : addToPPFeatureMask
1228 *
1229 * DESCRIPTION: add additional features to pp feature mask based on
1230 * stream type and usecase
1231 *
1232 * PARAMETERS :
1233 * @stream_format : stream type for feature mask
1234 * @stream_idx : stream idx within postprocess_mask list to change
1235 *
1236 * RETURN : NULL
1237 *
1238 *==========================================================================*/
1239void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1240 uint32_t stream_idx)
1241{
1242 char feature_mask_value[PROPERTY_VALUE_MAX];
1243 cam_feature_mask_t feature_mask;
1244 int args_converted;
1245 int property_len;
1246
1247 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001248#ifdef _LE_CAMERA_
1249 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1250 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1251 property_len = property_get("persist.camera.hal3.feature",
1252 feature_mask_value, swtnr_feature_mask_value);
1253#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001254 property_len = property_get("persist.camera.hal3.feature",
1255 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001256#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001257 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1258 (feature_mask_value[1] == 'x')) {
1259 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1260 } else {
1261 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1262 }
1263 if (1 != args_converted) {
1264 feature_mask = 0;
1265 LOGE("Wrong feature mask %s", feature_mask_value);
1266 return;
1267 }
1268
1269 switch (stream_format) {
1270 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1271 /* Add LLVD to pp feature mask only if video hint is enabled */
1272 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1273 mStreamConfigInfo.postprocess_mask[stream_idx]
1274 |= CAM_QTI_FEATURE_SW_TNR;
1275 LOGH("Added SW TNR to pp feature mask");
1276 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1277 mStreamConfigInfo.postprocess_mask[stream_idx]
1278 |= CAM_QCOM_FEATURE_LLVD;
1279 LOGH("Added LLVD SeeMore to pp feature mask");
1280 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001281 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1282 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1283 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1284 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001285 break;
1286 }
1287 default:
1288 break;
1289 }
1290 LOGD("PP feature mask %llx",
1291 mStreamConfigInfo.postprocess_mask[stream_idx]);
1292}
1293
1294/*==============================================================================
1295 * FUNCTION : updateFpsInPreviewBuffer
1296 *
1297 * DESCRIPTION: update FPS information in preview buffer.
1298 *
1299 * PARAMETERS :
1300 * @metadata : pointer to metadata buffer
1301 * @frame_number: frame_number to look for in pending buffer list
1302 *
1303 * RETURN : None
1304 *
1305 *==========================================================================*/
1306void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1307 uint32_t frame_number)
1308{
1309 // Mark all pending buffers for this particular request
1310 // with corresponding framerate information
1311 for (List<PendingBuffersInRequest>::iterator req =
1312 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1313 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1314 for(List<PendingBufferInfo>::iterator j =
1315 req->mPendingBufferList.begin();
1316 j != req->mPendingBufferList.end(); j++) {
1317 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1318 if ((req->frame_number == frame_number) &&
1319 (channel->getStreamTypeMask() &
1320 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1321 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1322 CAM_INTF_PARM_FPS_RANGE, metadata) {
1323 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1324 struct private_handle_t *priv_handle =
1325 (struct private_handle_t *)(*(j->buffer));
1326 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1327 }
1328 }
1329 }
1330 }
1331}
1332
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001333/*==============================================================================
1334 * FUNCTION : updateTimeStampInPendingBuffers
1335 *
1336 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1337 * of a frame number
1338 *
1339 * PARAMETERS :
1340 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1341 * @timestamp : timestamp to be set
1342 *
1343 * RETURN : None
1344 *
1345 *==========================================================================*/
1346void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1347 uint32_t frameNumber, nsecs_t timestamp)
1348{
1349 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1350 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1351 if (req->frame_number != frameNumber)
1352 continue;
1353
1354 for (auto k = req->mPendingBufferList.begin();
1355 k != req->mPendingBufferList.end(); k++ ) {
1356 struct private_handle_t *priv_handle =
1357 (struct private_handle_t *) (*(k->buffer));
1358 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1359 }
1360 }
1361 return;
1362}
1363
Thierry Strudel3d639192016-09-09 11:52:26 -07001364/*===========================================================================
1365 * FUNCTION : configureStreams
1366 *
1367 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1368 * and output streams.
1369 *
1370 * PARAMETERS :
1371 * @stream_list : streams to be configured
1372 *
1373 * RETURN :
1374 *
1375 *==========================================================================*/
1376int QCamera3HardwareInterface::configureStreams(
1377 camera3_stream_configuration_t *streamList)
1378{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001379 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001380 int rc = 0;
1381
1382 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001383 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001384 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001385 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001386
1387 return rc;
1388}
1389
1390/*===========================================================================
1391 * FUNCTION : configureStreamsPerfLocked
1392 *
1393 * DESCRIPTION: configureStreams while perfLock is held.
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 *
1398 * RETURN : int32_t type of status
1399 * NO_ERROR -- success
1400 * none-zero failure code
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::configureStreamsPerfLocked(
1403 camera3_stream_configuration_t *streamList)
1404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001405 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 int rc = 0;
1407
1408 // Sanity check stream_list
1409 if (streamList == NULL) {
1410 LOGE("NULL stream configuration");
1411 return BAD_VALUE;
1412 }
1413 if (streamList->streams == NULL) {
1414 LOGE("NULL stream list");
1415 return BAD_VALUE;
1416 }
1417
1418 if (streamList->num_streams < 1) {
1419 LOGE("Bad number of streams requested: %d",
1420 streamList->num_streams);
1421 return BAD_VALUE;
1422 }
1423
1424 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1425 LOGE("Maximum number of streams %d exceeded: %d",
1426 MAX_NUM_STREAMS, streamList->num_streams);
1427 return BAD_VALUE;
1428 }
1429
1430 mOpMode = streamList->operation_mode;
1431 LOGD("mOpMode: %d", mOpMode);
1432
1433 /* first invalidate all the steams in the mStreamList
1434 * if they appear again, they will be validated */
1435 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1436 it != mStreamInfo.end(); it++) {
1437 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1438 if (channel) {
1439 channel->stop();
1440 }
1441 (*it)->status = INVALID;
1442 }
1443
1444 if (mRawDumpChannel) {
1445 mRawDumpChannel->stop();
1446 delete mRawDumpChannel;
1447 mRawDumpChannel = NULL;
1448 }
1449
1450 if (mSupportChannel)
1451 mSupportChannel->stop();
1452
1453 if (mAnalysisChannel) {
1454 mAnalysisChannel->stop();
1455 }
1456 if (mMetadataChannel) {
1457 /* If content of mStreamInfo is not 0, there is metadata stream */
1458 mMetadataChannel->stop();
1459 }
1460 if (mChannelHandle) {
1461 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1462 mChannelHandle);
1463 LOGD("stopping channel %d", mChannelHandle);
1464 }
1465
1466 pthread_mutex_lock(&mMutex);
1467
1468 // Check state
1469 switch (mState) {
1470 case INITIALIZED:
1471 case CONFIGURED:
1472 case STARTED:
1473 /* valid state */
1474 break;
1475 default:
1476 LOGE("Invalid state %d", mState);
1477 pthread_mutex_unlock(&mMutex);
1478 return -ENODEV;
1479 }
1480
1481 /* Check whether we have video stream */
1482 m_bIs4KVideo = false;
1483 m_bIsVideo = false;
1484 m_bEisSupportedSize = false;
1485 m_bTnrEnabled = false;
1486 bool isZsl = false;
1487 uint32_t videoWidth = 0U;
1488 uint32_t videoHeight = 0U;
1489 size_t rawStreamCnt = 0;
1490 size_t stallStreamCnt = 0;
1491 size_t processedStreamCnt = 0;
1492 // Number of streams on ISP encoder path
1493 size_t numStreamsOnEncoder = 0;
1494 size_t numYuv888OnEncoder = 0;
1495 bool bYuv888OverrideJpeg = false;
1496 cam_dimension_t largeYuv888Size = {0, 0};
1497 cam_dimension_t maxViewfinderSize = {0, 0};
1498 bool bJpegExceeds4K = false;
1499 bool bJpegOnEncoder = false;
1500 bool bUseCommonFeatureMask = false;
1501 cam_feature_mask_t commonFeatureMask = 0;
1502 bool bSmallJpegSize = false;
1503 uint32_t width_ratio;
1504 uint32_t height_ratio;
1505 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1506 camera3_stream_t *inputStream = NULL;
1507 bool isJpeg = false;
1508 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001509 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001510
1511 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1512
1513 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001515 uint8_t eis_prop_set;
1516 uint32_t maxEisWidth = 0;
1517 uint32_t maxEisHeight = 0;
1518
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001519 // Initialize all instant AEC related variables
1520 mInstantAEC = false;
1521 mResetInstantAEC = false;
1522 mInstantAECSettledFrameNumber = 0;
1523 mAecSkipDisplayFrameBound = 0;
1524 mInstantAecFrameIdxCount = 0;
1525
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1527
1528 size_t count = IS_TYPE_MAX;
1529 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1530 for (size_t i = 0; i < count; i++) {
1531 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001532 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1533 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001534 break;
1535 }
1536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001537 count = CAM_OPT_STAB_MAX;
1538 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1539 for (size_t i = 0; i < count; i++) {
1540 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1541 oisSupported = true;
1542 break;
1543 }
1544 }
1545
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001546 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001547 maxEisWidth = MAX_EIS_WIDTH;
1548 maxEisHeight = MAX_EIS_HEIGHT;
1549 }
1550
1551 /* EIS setprop control */
1552 char eis_prop[PROPERTY_VALUE_MAX];
1553 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001554 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001555 eis_prop_set = (uint8_t)atoi(eis_prop);
1556
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001557 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001558 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1559
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001560 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1561 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1562
Thierry Strudel3d639192016-09-09 11:52:26 -07001563 /* stream configurations */
1564 for (size_t i = 0; i < streamList->num_streams; i++) {
1565 camera3_stream_t *newStream = streamList->streams[i];
1566 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1567 "height = %d, rotation = %d, usage = 0x%x",
1568 i, newStream->stream_type, newStream->format,
1569 newStream->width, newStream->height, newStream->rotation,
1570 newStream->usage);
1571 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1572 newStream->stream_type == CAMERA3_STREAM_INPUT){
1573 isZsl = true;
1574 }
1575 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1576 inputStream = newStream;
1577 }
1578
1579 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1580 isJpeg = true;
1581 jpegSize.width = newStream->width;
1582 jpegSize.height = newStream->height;
1583 if (newStream->width > VIDEO_4K_WIDTH ||
1584 newStream->height > VIDEO_4K_HEIGHT)
1585 bJpegExceeds4K = true;
1586 }
1587
1588 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1589 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1590 m_bIsVideo = true;
1591 videoWidth = newStream->width;
1592 videoHeight = newStream->height;
1593 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1594 (VIDEO_4K_HEIGHT <= newStream->height)) {
1595 m_bIs4KVideo = true;
1596 }
1597 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1598 (newStream->height <= maxEisHeight);
1599 }
1600 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1601 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1602 switch (newStream->format) {
1603 case HAL_PIXEL_FORMAT_BLOB:
1604 stallStreamCnt++;
1605 if (isOnEncoder(maxViewfinderSize, newStream->width,
1606 newStream->height)) {
1607 numStreamsOnEncoder++;
1608 bJpegOnEncoder = true;
1609 }
1610 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1611 newStream->width);
1612 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1613 newStream->height);;
1614 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1615 "FATAL: max_downscale_factor cannot be zero and so assert");
1616 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1617 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1618 LOGH("Setting small jpeg size flag to true");
1619 bSmallJpegSize = true;
1620 }
1621 break;
1622 case HAL_PIXEL_FORMAT_RAW10:
1623 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1624 case HAL_PIXEL_FORMAT_RAW16:
1625 rawStreamCnt++;
1626 break;
1627 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1628 processedStreamCnt++;
1629 if (isOnEncoder(maxViewfinderSize, newStream->width,
1630 newStream->height)) {
1631 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1632 !IS_USAGE_ZSL(newStream->usage)) {
1633 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1634 }
1635 numStreamsOnEncoder++;
1636 }
1637 break;
1638 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1639 processedStreamCnt++;
1640 if (isOnEncoder(maxViewfinderSize, newStream->width,
1641 newStream->height)) {
1642 // If Yuv888 size is not greater than 4K, set feature mask
1643 // to SUPERSET so that it support concurrent request on
1644 // YUV and JPEG.
1645 if (newStream->width <= VIDEO_4K_WIDTH &&
1646 newStream->height <= VIDEO_4K_HEIGHT) {
1647 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1648 }
1649 numStreamsOnEncoder++;
1650 numYuv888OnEncoder++;
1651 largeYuv888Size.width = newStream->width;
1652 largeYuv888Size.height = newStream->height;
1653 }
1654 break;
1655 default:
1656 processedStreamCnt++;
1657 if (isOnEncoder(maxViewfinderSize, newStream->width,
1658 newStream->height)) {
1659 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1660 numStreamsOnEncoder++;
1661 }
1662 break;
1663 }
1664
1665 }
1666 }
1667
1668 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1669 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1670 !m_bIsVideo) {
1671 m_bEisEnable = false;
1672 }
1673
1674 /* Logic to enable/disable TNR based on specific config size/etc.*/
1675 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1676 ((videoWidth == 1920 && videoHeight == 1080) ||
1677 (videoWidth == 1280 && videoHeight == 720)) &&
1678 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1679 m_bTnrEnabled = true;
1680
1681 /* Check if num_streams is sane */
1682 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1683 rawStreamCnt > MAX_RAW_STREAMS ||
1684 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1685 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1686 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1687 pthread_mutex_unlock(&mMutex);
1688 return -EINVAL;
1689 }
1690 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001691 if (isZsl && m_bIs4KVideo) {
1692 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001693 pthread_mutex_unlock(&mMutex);
1694 return -EINVAL;
1695 }
1696 /* Check if stream sizes are sane */
1697 if (numStreamsOnEncoder > 2) {
1698 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1699 pthread_mutex_unlock(&mMutex);
1700 return -EINVAL;
1701 } else if (1 < numStreamsOnEncoder){
1702 bUseCommonFeatureMask = true;
1703 LOGH("Multiple streams above max viewfinder size, common mask needed");
1704 }
1705
1706 /* Check if BLOB size is greater than 4k in 4k recording case */
1707 if (m_bIs4KVideo && bJpegExceeds4K) {
1708 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1709 pthread_mutex_unlock(&mMutex);
1710 return -EINVAL;
1711 }
1712
1713 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1714 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1715 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1716 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1717 // configurations:
1718 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1719 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1720 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1721 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1722 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1723 __func__);
1724 pthread_mutex_unlock(&mMutex);
1725 return -EINVAL;
1726 }
1727
1728 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1729 // the YUV stream's size is greater or equal to the JPEG size, set common
1730 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1731 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1732 jpegSize.width, jpegSize.height) &&
1733 largeYuv888Size.width > jpegSize.width &&
1734 largeYuv888Size.height > jpegSize.height) {
1735 bYuv888OverrideJpeg = true;
1736 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1737 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1738 }
1739
1740 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1741 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1742 commonFeatureMask);
1743 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1744 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1745
1746 rc = validateStreamDimensions(streamList);
1747 if (rc == NO_ERROR) {
1748 rc = validateStreamRotations(streamList);
1749 }
1750 if (rc != NO_ERROR) {
1751 LOGE("Invalid stream configuration requested!");
1752 pthread_mutex_unlock(&mMutex);
1753 return rc;
1754 }
1755
1756 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1757 for (size_t i = 0; i < streamList->num_streams; i++) {
1758 camera3_stream_t *newStream = streamList->streams[i];
1759 LOGH("newStream type = %d, stream format = %d "
1760 "stream size : %d x %d, stream rotation = %d",
1761 newStream->stream_type, newStream->format,
1762 newStream->width, newStream->height, newStream->rotation);
1763 //if the stream is in the mStreamList validate it
1764 bool stream_exists = false;
1765 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1766 it != mStreamInfo.end(); it++) {
1767 if ((*it)->stream == newStream) {
1768 QCamera3ProcessingChannel *channel =
1769 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1770 stream_exists = true;
1771 if (channel)
1772 delete channel;
1773 (*it)->status = VALID;
1774 (*it)->stream->priv = NULL;
1775 (*it)->channel = NULL;
1776 }
1777 }
1778 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1779 //new stream
1780 stream_info_t* stream_info;
1781 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1782 if (!stream_info) {
1783 LOGE("Could not allocate stream info");
1784 rc = -ENOMEM;
1785 pthread_mutex_unlock(&mMutex);
1786 return rc;
1787 }
1788 stream_info->stream = newStream;
1789 stream_info->status = VALID;
1790 stream_info->channel = NULL;
1791 mStreamInfo.push_back(stream_info);
1792 }
1793 /* Covers Opaque ZSL and API1 F/W ZSL */
1794 if (IS_USAGE_ZSL(newStream->usage)
1795 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1796 if (zslStream != NULL) {
1797 LOGE("Multiple input/reprocess streams requested!");
1798 pthread_mutex_unlock(&mMutex);
1799 return BAD_VALUE;
1800 }
1801 zslStream = newStream;
1802 }
1803 /* Covers YUV reprocess */
1804 if (inputStream != NULL) {
1805 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1806 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1807 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1808 && inputStream->width == newStream->width
1809 && inputStream->height == newStream->height) {
1810 if (zslStream != NULL) {
1811 /* This scenario indicates multiple YUV streams with same size
1812 * as input stream have been requested, since zsl stream handle
1813 * is solely use for the purpose of overriding the size of streams
1814 * which share h/w streams we will just make a guess here as to
1815 * which of the stream is a ZSL stream, this will be refactored
1816 * once we make generic logic for streams sharing encoder output
1817 */
1818 LOGH("Warning, Multiple ip/reprocess streams requested!");
1819 }
1820 zslStream = newStream;
1821 }
1822 }
1823 }
1824
1825 /* If a zsl stream is set, we know that we have configured at least one input or
1826 bidirectional stream */
1827 if (NULL != zslStream) {
1828 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1829 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1830 mInputStreamInfo.format = zslStream->format;
1831 mInputStreamInfo.usage = zslStream->usage;
1832 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1833 mInputStreamInfo.dim.width,
1834 mInputStreamInfo.dim.height,
1835 mInputStreamInfo.format, mInputStreamInfo.usage);
1836 }
1837
1838 cleanAndSortStreamInfo();
1839 if (mMetadataChannel) {
1840 delete mMetadataChannel;
1841 mMetadataChannel = NULL;
1842 }
1843 if (mSupportChannel) {
1844 delete mSupportChannel;
1845 mSupportChannel = NULL;
1846 }
1847
1848 if (mAnalysisChannel) {
1849 delete mAnalysisChannel;
1850 mAnalysisChannel = NULL;
1851 }
1852
1853 if (mDummyBatchChannel) {
1854 delete mDummyBatchChannel;
1855 mDummyBatchChannel = NULL;
1856 }
1857
1858 //Create metadata channel and initialize it
1859 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1860 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1861 gCamCapability[mCameraId]->color_arrangement);
1862 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1863 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001864 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 if (mMetadataChannel == NULL) {
1866 LOGE("failed to allocate metadata channel");
1867 rc = -ENOMEM;
1868 pthread_mutex_unlock(&mMutex);
1869 return rc;
1870 }
1871 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1872 if (rc < 0) {
1873 LOGE("metadata channel initialization failed");
1874 delete mMetadataChannel;
1875 mMetadataChannel = NULL;
1876 pthread_mutex_unlock(&mMutex);
1877 return rc;
1878 }
1879
Thierry Strudel3d639192016-09-09 11:52:26 -07001880 bool isRawStreamRequested = false;
1881 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1882 /* Allocate channel objects for the requested streams */
1883 for (size_t i = 0; i < streamList->num_streams; i++) {
1884 camera3_stream_t *newStream = streamList->streams[i];
1885 uint32_t stream_usage = newStream->usage;
1886 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1887 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1888 struct camera_info *p_info = NULL;
1889 pthread_mutex_lock(&gCamLock);
1890 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1891 pthread_mutex_unlock(&gCamLock);
1892 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1893 || IS_USAGE_ZSL(newStream->usage)) &&
1894 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1896 if (bUseCommonFeatureMask) {
1897 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1898 commonFeatureMask;
1899 } else {
1900 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1901 CAM_QCOM_FEATURE_NONE;
1902 }
1903
1904 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1905 LOGH("Input stream configured, reprocess config");
1906 } else {
1907 //for non zsl streams find out the format
1908 switch (newStream->format) {
1909 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1910 {
1911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1912 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 /* add additional features to pp feature mask */
1914 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1915 mStreamConfigInfo.num_streams);
1916
1917 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1918 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1919 CAM_STREAM_TYPE_VIDEO;
1920 if (m_bTnrEnabled && m_bTnrVideo) {
1921 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1922 CAM_QCOM_FEATURE_CPP_TNR;
1923 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1924 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1925 ~CAM_QCOM_FEATURE_CDS;
1926 }
1927 } else {
1928 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1929 CAM_STREAM_TYPE_PREVIEW;
1930 if (m_bTnrEnabled && m_bTnrPreview) {
1931 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1932 CAM_QCOM_FEATURE_CPP_TNR;
1933 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1934 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1935 ~CAM_QCOM_FEATURE_CDS;
1936 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001937 if(!m_bSwTnrPreview) {
1938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1939 ~CAM_QTI_FEATURE_SW_TNR;
1940 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001941 padding_info.width_padding = mSurfaceStridePadding;
1942 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001943 previewSize.width = (int32_t)newStream->width;
1944 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001945 }
1946 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1947 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1949 newStream->height;
1950 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1951 newStream->width;
1952 }
1953 }
1954 break;
1955 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1956 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1957 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1958 if (bUseCommonFeatureMask)
1959 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1960 commonFeatureMask;
1961 else
1962 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1963 CAM_QCOM_FEATURE_NONE;
1964 } else {
1965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1966 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1967 }
1968 break;
1969 case HAL_PIXEL_FORMAT_BLOB:
1970 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1971 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1972 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1973 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1974 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1975 } else {
1976 if (bUseCommonFeatureMask &&
1977 isOnEncoder(maxViewfinderSize, newStream->width,
1978 newStream->height)) {
1979 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1980 } else {
1981 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1982 }
1983 }
1984 if (isZsl) {
1985 if (zslStream) {
1986 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1987 (int32_t)zslStream->width;
1988 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1989 (int32_t)zslStream->height;
1990 } else {
1991 LOGE("Error, No ZSL stream identified");
1992 pthread_mutex_unlock(&mMutex);
1993 return -EINVAL;
1994 }
1995 } else if (m_bIs4KVideo) {
1996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1997 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1998 } else if (bYuv888OverrideJpeg) {
1999 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2000 (int32_t)largeYuv888Size.width;
2001 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2002 (int32_t)largeYuv888Size.height;
2003 }
2004 break;
2005 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2006 case HAL_PIXEL_FORMAT_RAW16:
2007 case HAL_PIXEL_FORMAT_RAW10:
2008 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2009 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2010 isRawStreamRequested = true;
2011 break;
2012 default:
2013 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2014 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2015 break;
2016 }
2017 }
2018
2019 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2020 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2021 gCamCapability[mCameraId]->color_arrangement);
2022
2023 if (newStream->priv == NULL) {
2024 //New stream, construct channel
2025 switch (newStream->stream_type) {
2026 case CAMERA3_STREAM_INPUT:
2027 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2028 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2029 break;
2030 case CAMERA3_STREAM_BIDIRECTIONAL:
2031 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2032 GRALLOC_USAGE_HW_CAMERA_WRITE;
2033 break;
2034 case CAMERA3_STREAM_OUTPUT:
2035 /* For video encoding stream, set read/write rarely
2036 * flag so that they may be set to un-cached */
2037 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2038 newStream->usage |=
2039 (GRALLOC_USAGE_SW_READ_RARELY |
2040 GRALLOC_USAGE_SW_WRITE_RARELY |
2041 GRALLOC_USAGE_HW_CAMERA_WRITE);
2042 else if (IS_USAGE_ZSL(newStream->usage))
2043 {
2044 LOGD("ZSL usage flag skipping");
2045 }
2046 else if (newStream == zslStream
2047 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2048 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2049 } else
2050 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2051 break;
2052 default:
2053 LOGE("Invalid stream_type %d", newStream->stream_type);
2054 break;
2055 }
2056
2057 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2058 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2059 QCamera3ProcessingChannel *channel = NULL;
2060 switch (newStream->format) {
2061 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2062 if ((newStream->usage &
2063 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2064 (streamList->operation_mode ==
2065 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2066 ) {
2067 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2068 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002069 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002070 this,
2071 newStream,
2072 (cam_stream_type_t)
2073 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2074 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2075 mMetadataChannel,
2076 0); //heap buffers are not required for HFR video channel
2077 if (channel == NULL) {
2078 LOGE("allocation of channel failed");
2079 pthread_mutex_unlock(&mMutex);
2080 return -ENOMEM;
2081 }
2082 //channel->getNumBuffers() will return 0 here so use
2083 //MAX_INFLIGH_HFR_REQUESTS
2084 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2085 newStream->priv = channel;
2086 LOGI("num video buffers in HFR mode: %d",
2087 MAX_INFLIGHT_HFR_REQUESTS);
2088 } else {
2089 /* Copy stream contents in HFR preview only case to create
2090 * dummy batch channel so that sensor streaming is in
2091 * HFR mode */
2092 if (!m_bIsVideo && (streamList->operation_mode ==
2093 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2094 mDummyBatchStream = *newStream;
2095 }
2096 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2097 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002098 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 this,
2100 newStream,
2101 (cam_stream_type_t)
2102 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2103 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2104 mMetadataChannel,
2105 MAX_INFLIGHT_REQUESTS);
2106 if (channel == NULL) {
2107 LOGE("allocation of channel failed");
2108 pthread_mutex_unlock(&mMutex);
2109 return -ENOMEM;
2110 }
2111 newStream->max_buffers = channel->getNumBuffers();
2112 newStream->priv = channel;
2113 }
2114 break;
2115 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2116 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2117 mChannelHandle,
2118 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002119 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002120 this,
2121 newStream,
2122 (cam_stream_type_t)
2123 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2124 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2125 mMetadataChannel);
2126 if (channel == NULL) {
2127 LOGE("allocation of YUV channel failed");
2128 pthread_mutex_unlock(&mMutex);
2129 return -ENOMEM;
2130 }
2131 newStream->max_buffers = channel->getNumBuffers();
2132 newStream->priv = channel;
2133 break;
2134 }
2135 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2136 case HAL_PIXEL_FORMAT_RAW16:
2137 case HAL_PIXEL_FORMAT_RAW10:
2138 mRawChannel = new QCamera3RawChannel(
2139 mCameraHandle->camera_handle, mChannelHandle,
2140 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002141 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002142 this, newStream,
2143 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2144 mMetadataChannel,
2145 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2146 if (mRawChannel == NULL) {
2147 LOGE("allocation of raw channel failed");
2148 pthread_mutex_unlock(&mMutex);
2149 return -ENOMEM;
2150 }
2151 newStream->max_buffers = mRawChannel->getNumBuffers();
2152 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2153 break;
2154 case HAL_PIXEL_FORMAT_BLOB:
2155 // Max live snapshot inflight buffer is 1. This is to mitigate
2156 // frame drop issues for video snapshot. The more buffers being
2157 // allocated, the more frame drops there are.
2158 mPictureChannel = new QCamera3PicChannel(
2159 mCameraHandle->camera_handle, mChannelHandle,
2160 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002161 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002162 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2163 m_bIs4KVideo, isZsl, mMetadataChannel,
2164 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2165 if (mPictureChannel == NULL) {
2166 LOGE("allocation of channel failed");
2167 pthread_mutex_unlock(&mMutex);
2168 return -ENOMEM;
2169 }
2170 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2171 newStream->max_buffers = mPictureChannel->getNumBuffers();
2172 mPictureChannel->overrideYuvSize(
2173 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2174 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2175 break;
2176
2177 default:
2178 LOGE("not a supported format 0x%x", newStream->format);
2179 break;
2180 }
2181 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2182 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2183 } else {
2184 LOGE("Error, Unknown stream type");
2185 pthread_mutex_unlock(&mMutex);
2186 return -EINVAL;
2187 }
2188
2189 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2190 if (channel != NULL && channel->isUBWCEnabled()) {
2191 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002192 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2193 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002194 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2195 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2196 }
2197 }
2198
2199 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2200 it != mStreamInfo.end(); it++) {
2201 if ((*it)->stream == newStream) {
2202 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2203 break;
2204 }
2205 }
2206 } else {
2207 // Channel already exists for this stream
2208 // Do nothing for now
2209 }
2210 padding_info = gCamCapability[mCameraId]->padding_info;
2211
2212 /* Do not add entries for input stream in metastream info
2213 * since there is no real stream associated with it
2214 */
2215 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2216 mStreamConfigInfo.num_streams++;
2217 }
2218
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002219 // Create analysis stream all the time, even when h/w support is not available
2220 {
2221 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2222 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2223 gCamCapability[mCameraId]->color_arrangement);
2224 cam_analysis_info_t analysisInfo;
2225 int32_t ret = NO_ERROR;
2226 ret = mCommon.getAnalysisInfo(
2227 FALSE,
2228 analysisFeatureMask,
2229 &analysisInfo);
2230 if (ret == NO_ERROR) {
2231 cam_dimension_t analysisDim;
2232 analysisDim = mCommon.getMatchingDimension(previewSize,
2233 analysisInfo.analysis_recommended_res);
2234
2235 mAnalysisChannel = new QCamera3SupportChannel(
2236 mCameraHandle->camera_handle,
2237 mChannelHandle,
2238 mCameraHandle->ops,
2239 &analysisInfo.analysis_padding_info,
2240 analysisFeatureMask,
2241 CAM_STREAM_TYPE_ANALYSIS,
2242 &analysisDim,
2243 (analysisInfo.analysis_format
2244 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2245 : CAM_FORMAT_YUV_420_NV21),
2246 analysisInfo.hw_analysis_supported,
2247 gCamCapability[mCameraId]->color_arrangement,
2248 this,
2249 0); // force buffer count to 0
2250 } else {
2251 LOGW("getAnalysisInfo failed, ret = %d", ret);
2252 }
2253 if (!mAnalysisChannel) {
2254 LOGW("Analysis channel cannot be created");
2255 }
2256 }
2257
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 //RAW DUMP channel
2259 if (mEnableRawDump && isRawStreamRequested == false){
2260 cam_dimension_t rawDumpSize;
2261 rawDumpSize = getMaxRawSize(mCameraId);
2262 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2263 setPAAFSupport(rawDumpFeatureMask,
2264 CAM_STREAM_TYPE_RAW,
2265 gCamCapability[mCameraId]->color_arrangement);
2266 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2267 mChannelHandle,
2268 mCameraHandle->ops,
2269 rawDumpSize,
2270 &padding_info,
2271 this, rawDumpFeatureMask);
2272 if (!mRawDumpChannel) {
2273 LOGE("Raw Dump channel cannot be created");
2274 pthread_mutex_unlock(&mMutex);
2275 return -ENOMEM;
2276 }
2277 }
2278
2279
2280 if (mAnalysisChannel) {
2281 cam_analysis_info_t analysisInfo;
2282 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2283 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2284 CAM_STREAM_TYPE_ANALYSIS;
2285 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2286 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2287 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2289 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002290 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2292 &analysisInfo);
2293 if (rc != NO_ERROR) {
2294 LOGE("getAnalysisInfo failed, ret = %d", rc);
2295 pthread_mutex_unlock(&mMutex);
2296 return rc;
2297 }
2298 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002299 mCommon.getMatchingDimension(previewSize,
2300 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 mStreamConfigInfo.num_streams++;
2302 }
2303
2304 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2305 cam_analysis_info_t supportInfo;
2306 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2307 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2308 setPAAFSupport(callbackFeatureMask,
2309 CAM_STREAM_TYPE_CALLBACK,
2310 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002311 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002312 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002313 if (ret != NO_ERROR) {
2314 /* Ignore the error for Mono camera
2315 * because the PAAF bit mask is only set
2316 * for CAM_STREAM_TYPE_ANALYSIS stream type
2317 */
2318 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2319 LOGW("getAnalysisInfo failed, ret = %d", ret);
2320 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 }
2322 mSupportChannel = new QCamera3SupportChannel(
2323 mCameraHandle->camera_handle,
2324 mChannelHandle,
2325 mCameraHandle->ops,
2326 &gCamCapability[mCameraId]->padding_info,
2327 callbackFeatureMask,
2328 CAM_STREAM_TYPE_CALLBACK,
2329 &QCamera3SupportChannel::kDim,
2330 CAM_FORMAT_YUV_420_NV21,
2331 supportInfo.hw_analysis_supported,
2332 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002333 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 if (!mSupportChannel) {
2335 LOGE("dummy channel cannot be created");
2336 pthread_mutex_unlock(&mMutex);
2337 return -ENOMEM;
2338 }
2339 }
2340
2341 if (mSupportChannel) {
2342 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2343 QCamera3SupportChannel::kDim;
2344 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2345 CAM_STREAM_TYPE_CALLBACK;
2346 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2347 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2348 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2349 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2350 gCamCapability[mCameraId]->color_arrangement);
2351 mStreamConfigInfo.num_streams++;
2352 }
2353
2354 if (mRawDumpChannel) {
2355 cam_dimension_t rawSize;
2356 rawSize = getMaxRawSize(mCameraId);
2357 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2358 rawSize;
2359 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2360 CAM_STREAM_TYPE_RAW;
2361 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2362 CAM_QCOM_FEATURE_NONE;
2363 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2364 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2365 gCamCapability[mCameraId]->color_arrangement);
2366 mStreamConfigInfo.num_streams++;
2367 }
2368 /* In HFR mode, if video stream is not added, create a dummy channel so that
2369 * ISP can create a batch mode even for preview only case. This channel is
2370 * never 'start'ed (no stream-on), it is only 'initialized' */
2371 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2372 !m_bIsVideo) {
2373 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2374 setPAAFSupport(dummyFeatureMask,
2375 CAM_STREAM_TYPE_VIDEO,
2376 gCamCapability[mCameraId]->color_arrangement);
2377 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2378 mChannelHandle,
2379 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002380 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002381 this,
2382 &mDummyBatchStream,
2383 CAM_STREAM_TYPE_VIDEO,
2384 dummyFeatureMask,
2385 mMetadataChannel);
2386 if (NULL == mDummyBatchChannel) {
2387 LOGE("creation of mDummyBatchChannel failed."
2388 "Preview will use non-hfr sensor mode ");
2389 }
2390 }
2391 if (mDummyBatchChannel) {
2392 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2393 mDummyBatchStream.width;
2394 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2395 mDummyBatchStream.height;
2396 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2397 CAM_STREAM_TYPE_VIDEO;
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2401 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2402 gCamCapability[mCameraId]->color_arrangement);
2403 mStreamConfigInfo.num_streams++;
2404 }
2405
2406 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2407 mStreamConfigInfo.buffer_info.max_buffers =
2408 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2409
2410 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2411 for (pendingRequestIterator i = mPendingRequestsList.begin();
2412 i != mPendingRequestsList.end();) {
2413 i = erasePendingRequest(i);
2414 }
2415 mPendingFrameDropList.clear();
2416 // Initialize/Reset the pending buffers list
2417 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2418 req.mPendingBufferList.clear();
2419 }
2420 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2421
2422 mPendingReprocessResultList.clear();
2423
2424 mCurJpegMeta.clear();
2425 //Get min frame duration for this streams configuration
2426 deriveMinFrameDuration();
2427
2428 // Update state
2429 mState = CONFIGURED;
2430
2431 pthread_mutex_unlock(&mMutex);
2432
2433 return rc;
2434}
2435
2436/*===========================================================================
2437 * FUNCTION : validateCaptureRequest
2438 *
2439 * DESCRIPTION: validate a capture request from camera service
2440 *
2441 * PARAMETERS :
2442 * @request : request from framework to process
2443 *
2444 * RETURN :
2445 *
2446 *==========================================================================*/
2447int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002448 camera3_capture_request_t *request,
2449 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002450{
2451 ssize_t idx = 0;
2452 const camera3_stream_buffer_t *b;
2453 CameraMetadata meta;
2454
2455 /* Sanity check the request */
2456 if (request == NULL) {
2457 LOGE("NULL capture request");
2458 return BAD_VALUE;
2459 }
2460
2461 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2462 /*settings cannot be null for the first request*/
2463 return BAD_VALUE;
2464 }
2465
2466 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002467 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2468 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 LOGE("Request %d: No output buffers provided!",
2470 __FUNCTION__, frameNumber);
2471 return BAD_VALUE;
2472 }
2473 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2474 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2475 request->num_output_buffers, MAX_NUM_STREAMS);
2476 return BAD_VALUE;
2477 }
2478 if (request->input_buffer != NULL) {
2479 b = request->input_buffer;
2480 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2481 LOGE("Request %d: Buffer %ld: Status not OK!",
2482 frameNumber, (long)idx);
2483 return BAD_VALUE;
2484 }
2485 if (b->release_fence != -1) {
2486 LOGE("Request %d: Buffer %ld: Has a release fence!",
2487 frameNumber, (long)idx);
2488 return BAD_VALUE;
2489 }
2490 if (b->buffer == NULL) {
2491 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2492 frameNumber, (long)idx);
2493 return BAD_VALUE;
2494 }
2495 }
2496
2497 // Validate all buffers
2498 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002499 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 QCamera3ProcessingChannel *channel =
2501 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2502 if (channel == NULL) {
2503 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2504 frameNumber, (long)idx);
2505 return BAD_VALUE;
2506 }
2507 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2508 LOGE("Request %d: Buffer %ld: Status not OK!",
2509 frameNumber, (long)idx);
2510 return BAD_VALUE;
2511 }
2512 if (b->release_fence != -1) {
2513 LOGE("Request %d: Buffer %ld: Has a release fence!",
2514 frameNumber, (long)idx);
2515 return BAD_VALUE;
2516 }
2517 if (b->buffer == NULL) {
2518 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2519 frameNumber, (long)idx);
2520 return BAD_VALUE;
2521 }
2522 if (*(b->buffer) == NULL) {
2523 LOGE("Request %d: Buffer %ld: NULL private handle!",
2524 frameNumber, (long)idx);
2525 return BAD_VALUE;
2526 }
2527 idx++;
2528 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002529 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002530 return NO_ERROR;
2531}
2532
2533/*===========================================================================
2534 * FUNCTION : deriveMinFrameDuration
2535 *
2536 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2537 * on currently configured streams.
2538 *
2539 * PARAMETERS : NONE
2540 *
2541 * RETURN : NONE
2542 *
2543 *==========================================================================*/
2544void QCamera3HardwareInterface::deriveMinFrameDuration()
2545{
2546 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2547
2548 maxJpegDim = 0;
2549 maxProcessedDim = 0;
2550 maxRawDim = 0;
2551
2552 // Figure out maximum jpeg, processed, and raw dimensions
2553 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2554 it != mStreamInfo.end(); it++) {
2555
2556 // Input stream doesn't have valid stream_type
2557 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2558 continue;
2559
2560 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2561 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2562 if (dimension > maxJpegDim)
2563 maxJpegDim = dimension;
2564 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2565 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2566 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2567 if (dimension > maxRawDim)
2568 maxRawDim = dimension;
2569 } else {
2570 if (dimension > maxProcessedDim)
2571 maxProcessedDim = dimension;
2572 }
2573 }
2574
2575 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2576 MAX_SIZES_CNT);
2577
2578 //Assume all jpeg dimensions are in processed dimensions.
2579 if (maxJpegDim > maxProcessedDim)
2580 maxProcessedDim = maxJpegDim;
2581 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2582 if (maxProcessedDim > maxRawDim) {
2583 maxRawDim = INT32_MAX;
2584
2585 for (size_t i = 0; i < count; i++) {
2586 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2587 gCamCapability[mCameraId]->raw_dim[i].height;
2588 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2589 maxRawDim = dimension;
2590 }
2591 }
2592
2593 //Find minimum durations for processed, jpeg, and raw
2594 for (size_t i = 0; i < count; i++) {
2595 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2596 gCamCapability[mCameraId]->raw_dim[i].height) {
2597 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2598 break;
2599 }
2600 }
2601 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2602 for (size_t i = 0; i < count; i++) {
2603 if (maxProcessedDim ==
2604 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2605 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2606 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2607 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2608 break;
2609 }
2610 }
2611}
2612
2613/*===========================================================================
2614 * FUNCTION : getMinFrameDuration
2615 *
2616 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2617 * and current request configuration.
2618 *
2619 * PARAMETERS : @request: requset sent by the frameworks
2620 *
2621 * RETURN : min farme duration for a particular request
2622 *
2623 *==========================================================================*/
2624int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2625{
2626 bool hasJpegStream = false;
2627 bool hasRawStream = false;
2628 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2629 const camera3_stream_t *stream = request->output_buffers[i].stream;
2630 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2631 hasJpegStream = true;
2632 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2633 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2634 stream->format == HAL_PIXEL_FORMAT_RAW16)
2635 hasRawStream = true;
2636 }
2637
2638 if (!hasJpegStream)
2639 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2640 else
2641 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2642}
2643
2644/*===========================================================================
2645 * FUNCTION : handleBuffersDuringFlushLock
2646 *
2647 * DESCRIPTION: Account for buffers returned from back-end during flush
2648 * This function is executed while mMutex is held by the caller.
2649 *
2650 * PARAMETERS :
2651 * @buffer: image buffer for the callback
2652 *
2653 * RETURN :
2654 *==========================================================================*/
2655void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2656{
2657 bool buffer_found = false;
2658 for (List<PendingBuffersInRequest>::iterator req =
2659 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2660 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2661 for (List<PendingBufferInfo>::iterator i =
2662 req->mPendingBufferList.begin();
2663 i != req->mPendingBufferList.end(); i++) {
2664 if (i->buffer == buffer->buffer) {
2665 mPendingBuffersMap.numPendingBufsAtFlush--;
2666 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2667 buffer->buffer, req->frame_number,
2668 mPendingBuffersMap.numPendingBufsAtFlush);
2669 buffer_found = true;
2670 break;
2671 }
2672 }
2673 if (buffer_found) {
2674 break;
2675 }
2676 }
2677 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2678 //signal the flush()
2679 LOGD("All buffers returned to HAL. Continue flush");
2680 pthread_cond_signal(&mBuffersCond);
2681 }
2682}
2683
2684
2685/*===========================================================================
2686 * FUNCTION : handlePendingReprocResults
2687 *
2688 * DESCRIPTION: check and notify on any pending reprocess results
2689 *
2690 * PARAMETERS :
2691 * @frame_number : Pending request frame number
2692 *
2693 * RETURN : int32_t type of status
2694 * NO_ERROR -- success
2695 * none-zero failure code
2696 *==========================================================================*/
2697int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2698{
2699 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2700 j != mPendingReprocessResultList.end(); j++) {
2701 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002702 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002703
2704 LOGD("Delayed reprocess notify %d",
2705 frame_number);
2706
2707 for (pendingRequestIterator k = mPendingRequestsList.begin();
2708 k != mPendingRequestsList.end(); k++) {
2709
2710 if (k->frame_number == j->frame_number) {
2711 LOGD("Found reprocess frame number %d in pending reprocess List "
2712 "Take it out!!",
2713 k->frame_number);
2714
2715 camera3_capture_result result;
2716 memset(&result, 0, sizeof(camera3_capture_result));
2717 result.frame_number = frame_number;
2718 result.num_output_buffers = 1;
2719 result.output_buffers = &j->buffer;
2720 result.input_buffer = k->input_buffer;
2721 result.result = k->settings;
2722 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002723 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002724
2725 erasePendingRequest(k);
2726 break;
2727 }
2728 }
2729 mPendingReprocessResultList.erase(j);
2730 break;
2731 }
2732 }
2733 return NO_ERROR;
2734}
2735
2736/*===========================================================================
2737 * FUNCTION : handleBatchMetadata
2738 *
2739 * DESCRIPTION: Handles metadata buffer callback in batch mode
2740 *
2741 * PARAMETERS : @metadata_buf: metadata buffer
2742 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2743 * the meta buf in this method
2744 *
2745 * RETURN :
2746 *
2747 *==========================================================================*/
2748void QCamera3HardwareInterface::handleBatchMetadata(
2749 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2750{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002751 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002752
2753 if (NULL == metadata_buf) {
2754 LOGE("metadata_buf is NULL");
2755 return;
2756 }
2757 /* In batch mode, the metdata will contain the frame number and timestamp of
2758 * the last frame in the batch. Eg: a batch containing buffers from request
2759 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2760 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2761 * multiple process_capture_results */
2762 metadata_buffer_t *metadata =
2763 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2764 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2765 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2766 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2767 uint32_t frame_number = 0, urgent_frame_number = 0;
2768 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2769 bool invalid_metadata = false;
2770 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2771 size_t loopCount = 1;
2772
2773 int32_t *p_frame_number_valid =
2774 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2775 uint32_t *p_frame_number =
2776 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2777 int64_t *p_capture_time =
2778 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2779 int32_t *p_urgent_frame_number_valid =
2780 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2781 uint32_t *p_urgent_frame_number =
2782 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2783
2784 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2785 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2786 (NULL == p_urgent_frame_number)) {
2787 LOGE("Invalid metadata");
2788 invalid_metadata = true;
2789 } else {
2790 frame_number_valid = *p_frame_number_valid;
2791 last_frame_number = *p_frame_number;
2792 last_frame_capture_time = *p_capture_time;
2793 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2794 last_urgent_frame_number = *p_urgent_frame_number;
2795 }
2796
2797 /* In batchmode, when no video buffers are requested, set_parms are sent
2798 * for every capture_request. The difference between consecutive urgent
2799 * frame numbers and frame numbers should be used to interpolate the
2800 * corresponding frame numbers and time stamps */
2801 pthread_mutex_lock(&mMutex);
2802 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002803 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2804 if(idx < 0) {
2805 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2806 last_urgent_frame_number);
2807 mState = ERROR;
2808 pthread_mutex_unlock(&mMutex);
2809 return;
2810 }
2811 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2813 first_urgent_frame_number;
2814
2815 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2816 urgent_frame_number_valid,
2817 first_urgent_frame_number, last_urgent_frame_number);
2818 }
2819
2820 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002821 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2822 if(idx < 0) {
2823 LOGE("Invalid frame number received: %d. Irrecoverable error",
2824 last_frame_number);
2825 mState = ERROR;
2826 pthread_mutex_unlock(&mMutex);
2827 return;
2828 }
2829 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 frameNumDiff = last_frame_number + 1 -
2831 first_frame_number;
2832 mPendingBatchMap.removeItem(last_frame_number);
2833
2834 LOGD("frm: valid: %d frm_num: %d - %d",
2835 frame_number_valid,
2836 first_frame_number, last_frame_number);
2837
2838 }
2839 pthread_mutex_unlock(&mMutex);
2840
2841 if (urgent_frame_number_valid || frame_number_valid) {
2842 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2843 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2844 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2845 urgentFrameNumDiff, last_urgent_frame_number);
2846 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2847 LOGE("frameNumDiff: %d frameNum: %d",
2848 frameNumDiff, last_frame_number);
2849 }
2850
2851 for (size_t i = 0; i < loopCount; i++) {
2852 /* handleMetadataWithLock is called even for invalid_metadata for
2853 * pipeline depth calculation */
2854 if (!invalid_metadata) {
2855 /* Infer frame number. Batch metadata contains frame number of the
2856 * last frame */
2857 if (urgent_frame_number_valid) {
2858 if (i < urgentFrameNumDiff) {
2859 urgent_frame_number =
2860 first_urgent_frame_number + i;
2861 LOGD("inferred urgent frame_number: %d",
2862 urgent_frame_number);
2863 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2864 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2865 } else {
2866 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2867 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2868 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2869 }
2870 }
2871
2872 /* Infer frame number. Batch metadata contains frame number of the
2873 * last frame */
2874 if (frame_number_valid) {
2875 if (i < frameNumDiff) {
2876 frame_number = first_frame_number + i;
2877 LOGD("inferred frame_number: %d", frame_number);
2878 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2879 CAM_INTF_META_FRAME_NUMBER, frame_number);
2880 } else {
2881 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2882 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2883 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2884 }
2885 }
2886
2887 if (last_frame_capture_time) {
2888 //Infer timestamp
2889 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002890 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002891 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002892 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002893 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2894 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2895 LOGD("batch capture_time: %lld, capture_time: %lld",
2896 last_frame_capture_time, capture_time);
2897 }
2898 }
2899 pthread_mutex_lock(&mMutex);
2900 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002901 false /* free_and_bufdone_meta_buf */,
2902 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 pthread_mutex_unlock(&mMutex);
2904 }
2905
2906 /* BufDone metadata buffer */
2907 if (free_and_bufdone_meta_buf) {
2908 mMetadataChannel->bufDone(metadata_buf);
2909 free(metadata_buf);
2910 }
2911}
2912
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2914 camera3_error_msg_code_t errorCode)
2915{
2916 camera3_notify_msg_t notify_msg;
2917 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2918 notify_msg.type = CAMERA3_MSG_ERROR;
2919 notify_msg.message.error.error_code = errorCode;
2920 notify_msg.message.error.error_stream = NULL;
2921 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002922 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002923
2924 return;
2925}
Thierry Strudel3d639192016-09-09 11:52:26 -07002926/*===========================================================================
2927 * FUNCTION : handleMetadataWithLock
2928 *
2929 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2930 *
2931 * PARAMETERS : @metadata_buf: metadata buffer
2932 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2933 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002934 * @firstMetadataInBatch: Boolean to indicate whether this is the
2935 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002936 *
2937 * RETURN :
2938 *
2939 *==========================================================================*/
2940void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2942 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002943{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002944 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002945 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2946 //during flush do not send metadata from this thread
2947 LOGD("not sending metadata during flush or when mState is error");
2948 if (free_and_bufdone_meta_buf) {
2949 mMetadataChannel->bufDone(metadata_buf);
2950 free(metadata_buf);
2951 }
2952 return;
2953 }
2954
2955 //not in flush
2956 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2957 int32_t frame_number_valid, urgent_frame_number_valid;
2958 uint32_t frame_number, urgent_frame_number;
2959 int64_t capture_time;
2960 nsecs_t currentSysTime;
2961
2962 int32_t *p_frame_number_valid =
2963 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2964 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2965 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2966 int32_t *p_urgent_frame_number_valid =
2967 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2968 uint32_t *p_urgent_frame_number =
2969 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2970 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2971 metadata) {
2972 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2973 *p_frame_number_valid, *p_frame_number);
2974 }
2975
2976 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2977 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2978 LOGE("Invalid metadata");
2979 if (free_and_bufdone_meta_buf) {
2980 mMetadataChannel->bufDone(metadata_buf);
2981 free(metadata_buf);
2982 }
2983 goto done_metadata;
2984 }
2985 frame_number_valid = *p_frame_number_valid;
2986 frame_number = *p_frame_number;
2987 capture_time = *p_capture_time;
2988 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2989 urgent_frame_number = *p_urgent_frame_number;
2990 currentSysTime = systemTime(CLOCK_MONOTONIC);
2991
2992 // Detect if buffers from any requests are overdue
2993 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2994 if ( (currentSysTime - req.timestamp) >
2995 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2996 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002997 assert(missed.stream->priv);
2998 if (missed.stream->priv) {
2999 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3000 assert(ch->mStreams[0]);
3001 if (ch->mStreams[0]) {
3002 LOGE("Cancel missing frame = %d, buffer = %p,"
3003 "stream type = %d, stream format = %d",
3004 req.frame_number, missed.buffer,
3005 ch->mStreams[0]->getMyType(), missed.stream->format);
3006 ch->timeoutFrame(req.frame_number);
3007 }
3008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 }
3010 }
3011 }
3012 //Partial result on process_capture_result for timestamp
3013 if (urgent_frame_number_valid) {
3014 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3015 urgent_frame_number, capture_time);
3016
3017 //Recieved an urgent Frame Number, handle it
3018 //using partial results
3019 for (pendingRequestIterator i =
3020 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3021 LOGD("Iterator Frame = %d urgent frame = %d",
3022 i->frame_number, urgent_frame_number);
3023
3024 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3025 (i->partial_result_cnt == 0)) {
3026 LOGE("Error: HAL missed urgent metadata for frame number %d",
3027 i->frame_number);
3028 }
3029
3030 if (i->frame_number == urgent_frame_number &&
3031 i->bUrgentReceived == 0) {
3032
3033 camera3_capture_result_t result;
3034 memset(&result, 0, sizeof(camera3_capture_result_t));
3035
3036 i->partial_result_cnt++;
3037 i->bUrgentReceived = 1;
3038 // Extract 3A metadata
3039 result.result =
3040 translateCbUrgentMetadataToResultMetadata(metadata);
3041 // Populate metadata result
3042 result.frame_number = urgent_frame_number;
3043 result.num_output_buffers = 0;
3044 result.output_buffers = NULL;
3045 result.partial_result = i->partial_result_cnt;
3046
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 LOGD("urgent frame_number = %u, capture_time = %lld",
3049 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003050 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3051 // Instant AEC settled for this frame.
3052 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3053 mInstantAECSettledFrameNumber = urgent_frame_number;
3054 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003055 free_camera_metadata((camera_metadata_t *)result.result);
3056 break;
3057 }
3058 }
3059 }
3060
3061 if (!frame_number_valid) {
3062 LOGD("Not a valid normal frame number, used as SOF only");
3063 if (free_and_bufdone_meta_buf) {
3064 mMetadataChannel->bufDone(metadata_buf);
3065 free(metadata_buf);
3066 }
3067 goto done_metadata;
3068 }
3069 LOGH("valid frame_number = %u, capture_time = %lld",
3070 frame_number, capture_time);
3071
3072 for (pendingRequestIterator i = mPendingRequestsList.begin();
3073 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3074 // Flush out all entries with less or equal frame numbers.
3075
3076 camera3_capture_result_t result;
3077 memset(&result, 0, sizeof(camera3_capture_result_t));
3078
3079 LOGD("frame_number in the list is %u", i->frame_number);
3080 i->partial_result_cnt++;
3081 result.partial_result = i->partial_result_cnt;
3082
3083 // Check whether any stream buffer corresponding to this is dropped or not
3084 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003085 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3086 if (p_cam_frame_drop ||
3087 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003088 /* Clear notify_msg structure */
3089 camera3_notify_msg_t notify_msg;
3090 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3091 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3092 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003093 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003094 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3095 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003096 if (p_cam_frame_drop) {
3097 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003098 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003099 // Got the stream ID for drop frame.
3100 dropFrame = true;
3101 break;
3102 }
3103 }
3104 } else {
3105 // This is instant AEC case.
3106 // For instant AEC drop the stream untill AEC is settled.
3107 dropFrame = true;
3108 }
3109 if (dropFrame) {
3110 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3111 if (p_cam_frame_drop) {
3112 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003113 LOGE("Start of reporting error frame#=%u, streamID=%u",
3114 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003115 } else {
3116 // For instant AEC, inform frame drop and frame number
3117 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3118 "AEC settled frame number = %u",
3119 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3120 }
3121 notify_msg.type = CAMERA3_MSG_ERROR;
3122 notify_msg.message.error.frame_number = i->frame_number;
3123 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3124 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003125 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003126 if (p_cam_frame_drop) {
3127 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003128 LOGE("End of reporting error frame#=%u, streamID=%u",
3129 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003130 } else {
3131 // For instant AEC, inform frame drop and frame number
3132 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3133 "AEC settled frame number = %u",
3134 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3135 }
3136 PendingFrameDropInfo PendingFrameDrop;
3137 PendingFrameDrop.frame_number=i->frame_number;
3138 PendingFrameDrop.stream_ID = streamID;
3139 // Add the Frame drop info to mPendingFrameDropList
3140 mPendingFrameDropList.push_back(PendingFrameDrop);
3141 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003142 }
3143 }
3144
3145 // Send empty metadata with already filled buffers for dropped metadata
3146 // and send valid metadata with already filled buffers for current metadata
3147 /* we could hit this case when we either
3148 * 1. have a pending reprocess request or
3149 * 2. miss a metadata buffer callback */
3150 if (i->frame_number < frame_number) {
3151 if (i->input_buffer) {
3152 /* this will be handled in handleInputBufferWithLock */
3153 i++;
3154 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003155 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003156
3157 mPendingLiveRequest--;
3158
3159 CameraMetadata dummyMetadata;
3160 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3161 result.result = dummyMetadata.release();
3162
3163 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003164 }
3165 } else {
3166 mPendingLiveRequest--;
3167 /* Clear notify_msg structure */
3168 camera3_notify_msg_t notify_msg;
3169 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3170
3171 // Send shutter notify to frameworks
3172 notify_msg.type = CAMERA3_MSG_SHUTTER;
3173 notify_msg.message.shutter.frame_number = i->frame_number;
3174 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003175 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003176
3177 i->timestamp = capture_time;
3178
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003179 /* Set the timestamp in display metadata so that clients aware of
3180 private_handle such as VT can use this un-modified timestamps.
3181 Camera framework is unaware of this timestamp and cannot change this */
3182 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3183
Thierry Strudel3d639192016-09-09 11:52:26 -07003184 // Find channel requiring metadata, meaning internal offline postprocess
3185 // is needed.
3186 //TODO: for now, we don't support two streams requiring metadata at the same time.
3187 // (because we are not making copies, and metadata buffer is not reference counted.
3188 bool internalPproc = false;
3189 for (pendingBufferIterator iter = i->buffers.begin();
3190 iter != i->buffers.end(); iter++) {
3191 if (iter->need_metadata) {
3192 internalPproc = true;
3193 QCamera3ProcessingChannel *channel =
3194 (QCamera3ProcessingChannel *)iter->stream->priv;
3195 channel->queueReprocMetadata(metadata_buf);
3196 break;
3197 }
3198 }
3199
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003200 for (auto itr = i->internalRequestList.begin();
3201 itr != i->internalRequestList.end(); itr++) {
3202 if (itr->need_metadata) {
3203 internalPproc = true;
3204 QCamera3ProcessingChannel *channel =
3205 (QCamera3ProcessingChannel *)itr->stream->priv;
3206 channel->queueReprocMetadata(metadata_buf);
3207 break;
3208 }
3209 }
3210
3211
Thierry Strudel3d639192016-09-09 11:52:26 -07003212 result.result = translateFromHalMetadata(metadata,
3213 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003214 i->capture_intent,
3215 /* DevCamDebug metadata translateFromHalMetadata function call*/
3216 i->DevCamDebug_meta_enable,
3217 /* DevCamDebug metadata end */
3218 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003219 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003220
3221 saveExifParams(metadata);
3222
3223 if (i->blob_request) {
3224 {
3225 //Dump tuning metadata if enabled and available
3226 char prop[PROPERTY_VALUE_MAX];
3227 memset(prop, 0, sizeof(prop));
3228 property_get("persist.camera.dumpmetadata", prop, "0");
3229 int32_t enabled = atoi(prop);
3230 if (enabled && metadata->is_tuning_params_valid) {
3231 dumpMetadataToFile(metadata->tuning_params,
3232 mMetaFrameCount,
3233 enabled,
3234 "Snapshot",
3235 frame_number);
3236 }
3237 }
3238 }
3239
3240 if (!internalPproc) {
3241 LOGD("couldn't find need_metadata for this metadata");
3242 // Return metadata buffer
3243 if (free_and_bufdone_meta_buf) {
3244 mMetadataChannel->bufDone(metadata_buf);
3245 free(metadata_buf);
3246 }
3247 }
3248 }
3249 if (!result.result) {
3250 LOGE("metadata is NULL");
3251 }
3252 result.frame_number = i->frame_number;
3253 result.input_buffer = i->input_buffer;
3254 result.num_output_buffers = 0;
3255 result.output_buffers = NULL;
3256 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3257 j != i->buffers.end(); j++) {
3258 if (j->buffer) {
3259 result.num_output_buffers++;
3260 }
3261 }
3262
3263 updateFpsInPreviewBuffer(metadata, i->frame_number);
3264
3265 if (result.num_output_buffers > 0) {
3266 camera3_stream_buffer_t *result_buffers =
3267 new camera3_stream_buffer_t[result.num_output_buffers];
3268 if (result_buffers != NULL) {
3269 size_t result_buffers_idx = 0;
3270 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3271 j != i->buffers.end(); j++) {
3272 if (j->buffer) {
3273 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3274 m != mPendingFrameDropList.end(); m++) {
3275 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3276 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3277 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3278 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3279 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3280 frame_number, streamID);
3281 m = mPendingFrameDropList.erase(m);
3282 break;
3283 }
3284 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003285 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003286 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3287 result_buffers[result_buffers_idx++] = *(j->buffer);
3288 free(j->buffer);
3289 j->buffer = NULL;
3290 }
3291 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003292
Thierry Strudel3d639192016-09-09 11:52:26 -07003293 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003294 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003295 LOGD("meta frame_number = %u, capture_time = %lld",
3296 result.frame_number, i->timestamp);
3297 free_camera_metadata((camera_metadata_t *)result.result);
3298 delete[] result_buffers;
3299 }else {
3300 LOGE("Fatal error: out of memory");
3301 }
3302 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003303 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003304 LOGD("meta frame_number = %u, capture_time = %lld",
3305 result.frame_number, i->timestamp);
3306 free_camera_metadata((camera_metadata_t *)result.result);
3307 }
3308
3309 i = erasePendingRequest(i);
3310
3311 if (!mPendingReprocessResultList.empty()) {
3312 handlePendingReprocResults(frame_number + 1);
3313 }
3314 }
3315
3316done_metadata:
3317 for (pendingRequestIterator i = mPendingRequestsList.begin();
3318 i != mPendingRequestsList.end() ;i++) {
3319 i->pipeline_depth++;
3320 }
3321 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3322 unblockRequestIfNecessary();
3323}
3324
3325/*===========================================================================
3326 * FUNCTION : hdrPlusPerfLock
3327 *
3328 * DESCRIPTION: perf lock for HDR+ using custom intent
3329 *
3330 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3331 *
3332 * RETURN : None
3333 *
3334 *==========================================================================*/
3335void QCamera3HardwareInterface::hdrPlusPerfLock(
3336 mm_camera_super_buf_t *metadata_buf)
3337{
3338 if (NULL == metadata_buf) {
3339 LOGE("metadata_buf is NULL");
3340 return;
3341 }
3342 metadata_buffer_t *metadata =
3343 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3344 int32_t *p_frame_number_valid =
3345 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3346 uint32_t *p_frame_number =
3347 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3348
3349 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3350 LOGE("%s: Invalid metadata", __func__);
3351 return;
3352 }
3353
3354 //acquire perf lock for 5 sec after the last HDR frame is captured
3355 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3356 if ((p_frame_number != NULL) &&
3357 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003358 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003359 }
3360 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003361}
3362
3363/*===========================================================================
3364 * FUNCTION : handleInputBufferWithLock
3365 *
3366 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3367 *
3368 * PARAMETERS : @frame_number: frame number of the input buffer
3369 *
3370 * RETURN :
3371 *
3372 *==========================================================================*/
3373void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3374{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003375 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003376 pendingRequestIterator i = mPendingRequestsList.begin();
3377 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3378 i++;
3379 }
3380 if (i != mPendingRequestsList.end() && i->input_buffer) {
3381 //found the right request
3382 if (!i->shutter_notified) {
3383 CameraMetadata settings;
3384 camera3_notify_msg_t notify_msg;
3385 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3386 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3387 if(i->settings) {
3388 settings = i->settings;
3389 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3390 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3391 } else {
3392 LOGE("No timestamp in input settings! Using current one.");
3393 }
3394 } else {
3395 LOGE("Input settings missing!");
3396 }
3397
3398 notify_msg.type = CAMERA3_MSG_SHUTTER;
3399 notify_msg.message.shutter.frame_number = frame_number;
3400 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003401 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 i->shutter_notified = true;
3403 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3404 i->frame_number, notify_msg.message.shutter.timestamp);
3405 }
3406
3407 if (i->input_buffer->release_fence != -1) {
3408 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3409 close(i->input_buffer->release_fence);
3410 if (rc != OK) {
3411 LOGE("input buffer sync wait failed %d", rc);
3412 }
3413 }
3414
3415 camera3_capture_result result;
3416 memset(&result, 0, sizeof(camera3_capture_result));
3417 result.frame_number = frame_number;
3418 result.result = i->settings;
3419 result.input_buffer = i->input_buffer;
3420 result.partial_result = PARTIAL_RESULT_COUNT;
3421
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003422 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003423 LOGD("Input request metadata and input buffer frame_number = %u",
3424 i->frame_number);
3425 i = erasePendingRequest(i);
3426 } else {
3427 LOGE("Could not find input request for frame number %d", frame_number);
3428 }
3429}
3430
3431/*===========================================================================
3432 * FUNCTION : handleBufferWithLock
3433 *
3434 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3435 *
3436 * PARAMETERS : @buffer: image buffer for the callback
3437 * @frame_number: frame number of the image buffer
3438 *
3439 * RETURN :
3440 *
3441 *==========================================================================*/
3442void QCamera3HardwareInterface::handleBufferWithLock(
3443 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3444{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003445 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003446
3447 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3448 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3449 }
3450
Thierry Strudel3d639192016-09-09 11:52:26 -07003451 /* Nothing to be done during error state */
3452 if ((ERROR == mState) || (DEINIT == mState)) {
3453 return;
3454 }
3455 if (mFlushPerf) {
3456 handleBuffersDuringFlushLock(buffer);
3457 return;
3458 }
3459 //not in flush
3460 // If the frame number doesn't exist in the pending request list,
3461 // directly send the buffer to the frameworks, and update pending buffers map
3462 // Otherwise, book-keep the buffer.
3463 pendingRequestIterator i = mPendingRequestsList.begin();
3464 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3465 i++;
3466 }
3467 if (i == mPendingRequestsList.end()) {
3468 // Verify all pending requests frame_numbers are greater
3469 for (pendingRequestIterator j = mPendingRequestsList.begin();
3470 j != mPendingRequestsList.end(); j++) {
3471 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3472 LOGW("Error: pending live frame number %d is smaller than %d",
3473 j->frame_number, frame_number);
3474 }
3475 }
3476 camera3_capture_result_t result;
3477 memset(&result, 0, sizeof(camera3_capture_result_t));
3478 result.result = NULL;
3479 result.frame_number = frame_number;
3480 result.num_output_buffers = 1;
3481 result.partial_result = 0;
3482 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3483 m != mPendingFrameDropList.end(); m++) {
3484 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3485 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3486 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3487 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3488 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3489 frame_number, streamID);
3490 m = mPendingFrameDropList.erase(m);
3491 break;
3492 }
3493 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003494 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 result.output_buffers = buffer;
3496 LOGH("result frame_number = %d, buffer = %p",
3497 frame_number, buffer->buffer);
3498
3499 mPendingBuffersMap.removeBuf(buffer->buffer);
3500
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003501 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003502 } else {
3503 if (i->input_buffer) {
3504 CameraMetadata settings;
3505 camera3_notify_msg_t notify_msg;
3506 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3507 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3508 if(i->settings) {
3509 settings = i->settings;
3510 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3511 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3512 } else {
3513 LOGW("No timestamp in input settings! Using current one.");
3514 }
3515 } else {
3516 LOGE("Input settings missing!");
3517 }
3518
3519 notify_msg.type = CAMERA3_MSG_SHUTTER;
3520 notify_msg.message.shutter.frame_number = frame_number;
3521 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3522
3523 if (i->input_buffer->release_fence != -1) {
3524 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3525 close(i->input_buffer->release_fence);
3526 if (rc != OK) {
3527 LOGE("input buffer sync wait failed %d", rc);
3528 }
3529 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003530 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003531 mPendingBuffersMap.removeBuf(buffer->buffer);
3532
Thierry Strudel04e026f2016-10-10 11:27:36 -07003533 camera3_capture_result result;
3534 memset(&result, 0, sizeof(camera3_capture_result));
3535 result.frame_number = frame_number;
3536 result.result = i->settings;
3537 result.input_buffer = i->input_buffer;
3538 result.num_output_buffers = 1;
3539 result.output_buffers = buffer;
3540 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003541
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003542 orchestrateNotify(&notify_msg);
3543 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003544 LOGD("Notify reprocess now %d!", frame_number);
3545 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003546 } else {
3547 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3548 j != i->buffers.end(); j++) {
3549 if (j->stream == buffer->stream) {
3550 if (j->buffer != NULL) {
3551 LOGE("Error: buffer is already set");
3552 } else {
3553 j->buffer = (camera3_stream_buffer_t *)malloc(
3554 sizeof(camera3_stream_buffer_t));
3555 *(j->buffer) = *buffer;
3556 LOGH("cache buffer %p at result frame_number %u",
3557 buffer->buffer, frame_number);
3558 }
3559 }
3560 }
3561 }
3562 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003563
3564 if (mPreviewStarted == false) {
3565 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3566 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3567 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3568 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3569 mPreviewStarted = true;
3570
3571 // Set power hint for preview
3572 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3573 }
3574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003575}
3576
3577/*===========================================================================
3578 * FUNCTION : unblockRequestIfNecessary
3579 *
3580 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3581 * that mMutex is held when this function is called.
3582 *
3583 * PARAMETERS :
3584 *
3585 * RETURN :
3586 *
3587 *==========================================================================*/
3588void QCamera3HardwareInterface::unblockRequestIfNecessary()
3589{
3590 // Unblock process_capture_request
3591 pthread_cond_signal(&mRequestCond);
3592}
3593
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003594/*===========================================================================
3595 * FUNCTION : isHdrSnapshotRequest
3596 *
3597 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3598 *
3599 * PARAMETERS : camera3 request structure
3600 *
3601 * RETURN : boolean decision variable
3602 *
3603 *==========================================================================*/
3604bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3605{
3606 if (request == NULL) {
3607 LOGE("Invalid request handle");
3608 assert(0);
3609 return false;
3610 }
3611
3612 if (!mForceHdrSnapshot) {
3613 CameraMetadata frame_settings;
3614 frame_settings = request->settings;
3615
3616 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3617 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3618 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3619 return false;
3620 }
3621 } else {
3622 return false;
3623 }
3624
3625 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3626 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3627 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3628 return false;
3629 }
3630 } else {
3631 return false;
3632 }
3633 }
3634
3635 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3636 if (request->output_buffers[i].stream->format
3637 == HAL_PIXEL_FORMAT_BLOB) {
3638 return true;
3639 }
3640 }
3641
3642 return false;
3643}
3644/*===========================================================================
3645 * FUNCTION : orchestrateRequest
3646 *
3647 * DESCRIPTION: Orchestrates a capture request from camera service
3648 *
3649 * PARAMETERS :
3650 * @request : request from framework to process
3651 *
3652 * RETURN : Error status codes
3653 *
3654 *==========================================================================*/
3655int32_t QCamera3HardwareInterface::orchestrateRequest(
3656 camera3_capture_request_t *request)
3657{
3658
3659 uint32_t originalFrameNumber = request->frame_number;
3660 uint32_t originalOutputCount = request->num_output_buffers;
3661 const camera_metadata_t *original_settings = request->settings;
3662 List<InternalRequest> internallyRequestedStreams;
3663 List<InternalRequest> emptyInternalList;
3664
3665 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3666 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3667 uint32_t internalFrameNumber;
3668 CameraMetadata modified_meta;
3669
3670
3671 /* Add Blob channel to list of internally requested streams */
3672 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3673 if (request->output_buffers[i].stream->format
3674 == HAL_PIXEL_FORMAT_BLOB) {
3675 InternalRequest streamRequested;
3676 streamRequested.meteringOnly = 1;
3677 streamRequested.need_metadata = 0;
3678 streamRequested.stream = request->output_buffers[i].stream;
3679 internallyRequestedStreams.push_back(streamRequested);
3680 }
3681 }
3682 request->num_output_buffers = 0;
3683 auto itr = internallyRequestedStreams.begin();
3684
3685 /* Modify setting to set compensation */
3686 modified_meta = request->settings;
3687 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3688 uint8_t aeLock = 1;
3689 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3690 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3691 camera_metadata_t *modified_settings = modified_meta.release();
3692 request->settings = modified_settings;
3693
3694 /* Capture Settling & -2x frame */
3695 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3696 request->frame_number = internalFrameNumber;
3697 processCaptureRequest(request, internallyRequestedStreams);
3698
3699 request->num_output_buffers = originalOutputCount;
3700 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3701 request->frame_number = internalFrameNumber;
3702 processCaptureRequest(request, emptyInternalList);
3703 request->num_output_buffers = 0;
3704
3705 modified_meta = modified_settings;
3706 expCompensation = 0;
3707 aeLock = 1;
3708 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3709 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3710 modified_settings = modified_meta.release();
3711 request->settings = modified_settings;
3712
3713 /* Capture Settling & 0X frame */
3714
3715 itr = internallyRequestedStreams.begin();
3716 if (itr == internallyRequestedStreams.end()) {
3717 LOGE("Error Internally Requested Stream list is empty");
3718 assert(0);
3719 } else {
3720 itr->need_metadata = 0;
3721 itr->meteringOnly = 1;
3722 }
3723
3724 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3725 request->frame_number = internalFrameNumber;
3726 processCaptureRequest(request, internallyRequestedStreams);
3727
3728 itr = internallyRequestedStreams.begin();
3729 if (itr == internallyRequestedStreams.end()) {
3730 ALOGE("Error Internally Requested Stream list is empty");
3731 assert(0);
3732 } else {
3733 itr->need_metadata = 1;
3734 itr->meteringOnly = 0;
3735 }
3736
3737 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3738 request->frame_number = internalFrameNumber;
3739 processCaptureRequest(request, internallyRequestedStreams);
3740
3741 /* Capture 2X frame*/
3742 modified_meta = modified_settings;
3743 expCompensation = GB_HDR_2X_STEP_EV;
3744 aeLock = 1;
3745 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3746 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3747 modified_settings = modified_meta.release();
3748 request->settings = modified_settings;
3749
3750 itr = internallyRequestedStreams.begin();
3751 if (itr == internallyRequestedStreams.end()) {
3752 ALOGE("Error Internally Requested Stream list is empty");
3753 assert(0);
3754 } else {
3755 itr->need_metadata = 0;
3756 itr->meteringOnly = 1;
3757 }
3758 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3759 request->frame_number = internalFrameNumber;
3760 processCaptureRequest(request, internallyRequestedStreams);
3761
3762 itr = internallyRequestedStreams.begin();
3763 if (itr == internallyRequestedStreams.end()) {
3764 ALOGE("Error Internally Requested Stream list is empty");
3765 assert(0);
3766 } else {
3767 itr->need_metadata = 1;
3768 itr->meteringOnly = 0;
3769 }
3770
3771 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3772 request->frame_number = internalFrameNumber;
3773 processCaptureRequest(request, internallyRequestedStreams);
3774
3775
3776 /* Capture 2X on original streaming config*/
3777 internallyRequestedStreams.clear();
3778
3779 /* Restore original settings pointer */
3780 request->settings = original_settings;
3781 } else {
3782 uint32_t internalFrameNumber;
3783 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3784 request->frame_number = internalFrameNumber;
3785 return processCaptureRequest(request, internallyRequestedStreams);
3786 }
3787
3788 return NO_ERROR;
3789}
3790
3791/*===========================================================================
3792 * FUNCTION : orchestrateResult
3793 *
3794 * DESCRIPTION: Orchestrates a capture result to camera service
3795 *
3796 * PARAMETERS :
3797 * @request : request from framework to process
3798 *
3799 * RETURN :
3800 *
3801 *==========================================================================*/
3802void QCamera3HardwareInterface::orchestrateResult(
3803 camera3_capture_result_t *result)
3804{
3805 uint32_t frameworkFrameNumber;
3806 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3807 frameworkFrameNumber);
3808 if (rc != NO_ERROR) {
3809 LOGE("Cannot find translated frameworkFrameNumber");
3810 assert(0);
3811 } else {
3812 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3813 LOGD("CAM_DEBUG Internal Request drop the result");
3814 } else {
3815 result->frame_number = frameworkFrameNumber;
3816 mCallbackOps->process_capture_result(mCallbackOps, result);
3817 }
3818 }
3819}
3820
3821/*===========================================================================
3822 * FUNCTION : orchestrateNotify
3823 *
3824 * DESCRIPTION: Orchestrates a notify to camera service
3825 *
3826 * PARAMETERS :
3827 * @request : request from framework to process
3828 *
3829 * RETURN :
3830 *
3831 *==========================================================================*/
3832void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3833{
3834 uint32_t frameworkFrameNumber;
3835 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3836 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3837 frameworkFrameNumber);
3838 if (rc != NO_ERROR) {
3839 LOGE("Cannot find translated frameworkFrameNumber");
3840 assert(0);
3841 } else {
3842 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3843 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3844 } else {
3845 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3846 mCallbackOps->notify(mCallbackOps, notify_msg);
3847 }
3848 }
3849}
3850
3851/*===========================================================================
3852 * FUNCTION : FrameNumberRegistry
3853 *
3854 * DESCRIPTION: Constructor
3855 *
3856 * PARAMETERS :
3857 *
3858 * RETURN :
3859 *
3860 *==========================================================================*/
3861FrameNumberRegistry::FrameNumberRegistry()
3862{
3863 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3864}
3865
3866/*===========================================================================
3867 * FUNCTION : ~FrameNumberRegistry
3868 *
3869 * DESCRIPTION: Destructor
3870 *
3871 * PARAMETERS :
3872 *
3873 * RETURN :
3874 *
3875 *==========================================================================*/
3876FrameNumberRegistry::~FrameNumberRegistry()
3877{
3878}
3879
3880/*===========================================================================
3881 * FUNCTION : PurgeOldEntriesLocked
3882 *
3883 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3884 *
3885 * PARAMETERS :
3886 *
3887 * RETURN : NONE
3888 *
3889 *==========================================================================*/
3890void FrameNumberRegistry::purgeOldEntriesLocked()
3891{
3892 while (_register.begin() != _register.end()) {
3893 auto itr = _register.begin();
3894 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3895 _register.erase(itr);
3896 } else {
3897 return;
3898 }
3899 }
3900}
3901
3902/*===========================================================================
3903 * FUNCTION : allocStoreInternalFrameNumber
3904 *
3905 * DESCRIPTION: Method to note down a framework request and associate a new
3906 * internal request number against it
3907 *
3908 * PARAMETERS :
3909 * @fFrameNumber: Identifier given by framework
3910 * @internalFN : Output parameter which will have the newly generated internal
3911 * entry
3912 *
3913 * RETURN : Error code
3914 *
3915 *==========================================================================*/
3916int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3917 uint32_t &internalFrameNumber)
3918{
3919 Mutex::Autolock lock(mRegistryLock);
3920 internalFrameNumber = _nextFreeInternalNumber++;
3921 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3922 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3923 purgeOldEntriesLocked();
3924 return NO_ERROR;
3925}
3926
3927/*===========================================================================
3928 * FUNCTION : generateStoreInternalFrameNumber
3929 *
3930 * DESCRIPTION: Method to associate a new internal request number independent
3931 * of any associate with framework requests
3932 *
3933 * PARAMETERS :
3934 * @internalFrame#: Output parameter which will have the newly generated internal
3935 *
3936 *
3937 * RETURN : Error code
3938 *
3939 *==========================================================================*/
3940int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3941{
3942 Mutex::Autolock lock(mRegistryLock);
3943 internalFrameNumber = _nextFreeInternalNumber++;
3944 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3945 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3946 purgeOldEntriesLocked();
3947 return NO_ERROR;
3948}
3949
3950/*===========================================================================
3951 * FUNCTION : getFrameworkFrameNumber
3952 *
3953 * DESCRIPTION: Method to query the framework framenumber given an internal #
3954 *
3955 * PARAMETERS :
3956 * @internalFrame#: Internal reference
3957 * @frameworkframenumber: Output parameter holding framework frame entry
3958 *
3959 * RETURN : Error code
3960 *
3961 *==========================================================================*/
3962int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3963 uint32_t &frameworkFrameNumber)
3964{
3965 Mutex::Autolock lock(mRegistryLock);
3966 auto itr = _register.find(internalFrameNumber);
3967 if (itr == _register.end()) {
3968 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3969 return -ENOENT;
3970 }
3971
3972 frameworkFrameNumber = itr->second;
3973 purgeOldEntriesLocked();
3974 return NO_ERROR;
3975}
Thierry Strudel3d639192016-09-09 11:52:26 -07003976
3977/*===========================================================================
3978 * FUNCTION : processCaptureRequest
3979 *
3980 * DESCRIPTION: process a capture request from camera service
3981 *
3982 * PARAMETERS :
3983 * @request : request from framework to process
3984 *
3985 * RETURN :
3986 *
3987 *==========================================================================*/
3988int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003989 camera3_capture_request_t *request,
3990 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003991{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003992 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003993 int rc = NO_ERROR;
3994 int32_t request_id;
3995 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003996 bool isVidBufRequested = false;
3997 camera3_stream_buffer_t *pInputBuffer = NULL;
3998
3999 pthread_mutex_lock(&mMutex);
4000
4001 // Validate current state
4002 switch (mState) {
4003 case CONFIGURED:
4004 case STARTED:
4005 /* valid state */
4006 break;
4007
4008 case ERROR:
4009 pthread_mutex_unlock(&mMutex);
4010 handleCameraDeviceError();
4011 return -ENODEV;
4012
4013 default:
4014 LOGE("Invalid state %d", mState);
4015 pthread_mutex_unlock(&mMutex);
4016 return -ENODEV;
4017 }
4018
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004019 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004020 if (rc != NO_ERROR) {
4021 LOGE("incoming request is not valid");
4022 pthread_mutex_unlock(&mMutex);
4023 return rc;
4024 }
4025
4026 meta = request->settings;
4027
4028 // For first capture request, send capture intent, and
4029 // stream on all streams
4030 if (mState == CONFIGURED) {
4031 // send an unconfigure to the backend so that the isp
4032 // resources are deallocated
4033 if (!mFirstConfiguration) {
4034 cam_stream_size_info_t stream_config_info;
4035 int32_t hal_version = CAM_HAL_V3;
4036 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4037 stream_config_info.buffer_info.min_buffers =
4038 MIN_INFLIGHT_REQUESTS;
4039 stream_config_info.buffer_info.max_buffers =
4040 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4041 clear_metadata_buffer(mParameters);
4042 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4043 CAM_INTF_PARM_HAL_VERSION, hal_version);
4044 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4045 CAM_INTF_META_STREAM_INFO, stream_config_info);
4046 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4047 mParameters);
4048 if (rc < 0) {
4049 LOGE("set_parms for unconfigure failed");
4050 pthread_mutex_unlock(&mMutex);
4051 return rc;
4052 }
4053 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004054 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004055 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004056 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004057 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004058 property_get("persist.camera.is_type", is_type_value, "4");
4059 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4060 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4061 property_get("persist.camera.is_type_preview", is_type_value, "4");
4062 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4063 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004064
4065 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4066 int32_t hal_version = CAM_HAL_V3;
4067 uint8_t captureIntent =
4068 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4069 mCaptureIntent = captureIntent;
4070 clear_metadata_buffer(mParameters);
4071 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4072 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4073 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004074 if (mFirstConfiguration) {
4075 // configure instant AEC
4076 // Instant AEC is a session based parameter and it is needed only
4077 // once per complete session after open camera.
4078 // i.e. This is set only once for the first capture request, after open camera.
4079 setInstantAEC(meta);
4080 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004081 uint8_t fwkVideoStabMode=0;
4082 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4083 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4084 }
4085
4086 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4087 // turn it on for video/preview
4088 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4089 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004090 int32_t vsMode;
4091 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4093 rc = BAD_VALUE;
4094 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004095 LOGD("setEis %d", setEis);
4096 bool eis3Supported = false;
4097 size_t count = IS_TYPE_MAX;
4098 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4099 for (size_t i = 0; i < count; i++) {
4100 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4101 eis3Supported = true;
4102 break;
4103 }
4104 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004105
4106 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004107 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4109 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004110 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4111 is_type = isTypePreview;
4112 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4113 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4114 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004115 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004116 } else {
4117 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004119 } else {
4120 is_type = IS_TYPE_NONE;
4121 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004123 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004124 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4125 }
4126 }
4127
4128 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4129 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4130
4131 int32_t tintless_value = 1;
4132 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4133 CAM_INTF_PARM_TINTLESS, tintless_value);
4134 //Disable CDS for HFR mode or if DIS/EIS is on.
4135 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4136 //after every configure_stream
4137 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4138 (m_bIsVideo)) {
4139 int32_t cds = CAM_CDS_MODE_OFF;
4140 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4141 CAM_INTF_PARM_CDS_MODE, cds))
4142 LOGE("Failed to disable CDS for HFR mode");
4143
4144 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004145
4146 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4147 uint8_t* use_av_timer = NULL;
4148
4149 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004150 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004151 use_av_timer = &m_debug_avtimer;
4152 }
4153 else{
4154 use_av_timer =
4155 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004156 if (use_av_timer) {
4157 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4158 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004159 }
4160
4161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4162 rc = BAD_VALUE;
4163 }
4164 }
4165
Thierry Strudel3d639192016-09-09 11:52:26 -07004166 setMobicat();
4167
4168 /* Set fps and hfr mode while sending meta stream info so that sensor
4169 * can configure appropriate streaming mode */
4170 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004171 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4172 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004173 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4174 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004175 if (rc == NO_ERROR) {
4176 int32_t max_fps =
4177 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4178 if (max_fps == 60) {
4179 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4180 }
4181 /* For HFR, more buffers are dequeued upfront to improve the performance */
4182 if (mBatchSize) {
4183 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4184 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4185 }
4186 }
4187 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004188 LOGE("setHalFpsRange failed");
4189 }
4190 }
4191 if (meta.exists(ANDROID_CONTROL_MODE)) {
4192 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4193 rc = extractSceneMode(meta, metaMode, mParameters);
4194 if (rc != NO_ERROR) {
4195 LOGE("extractSceneMode failed");
4196 }
4197 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004198 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004199
Thierry Strudel04e026f2016-10-10 11:27:36 -07004200 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4201 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4202 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4203 rc = setVideoHdrMode(mParameters, vhdr);
4204 if (rc != NO_ERROR) {
4205 LOGE("setVideoHDR is failed");
4206 }
4207 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004208
Thierry Strudel3d639192016-09-09 11:52:26 -07004209 //TODO: validate the arguments, HSV scenemode should have only the
4210 //advertised fps ranges
4211
4212 /*set the capture intent, hal version, tintless, stream info,
4213 *and disenable parameters to the backend*/
4214 LOGD("set_parms META_STREAM_INFO " );
4215 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4216 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004217 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004218 mStreamConfigInfo.type[i],
4219 mStreamConfigInfo.stream_sizes[i].width,
4220 mStreamConfigInfo.stream_sizes[i].height,
4221 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004222 mStreamConfigInfo.format[i],
4223 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004224 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225
Thierry Strudel3d639192016-09-09 11:52:26 -07004226 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4227 mParameters);
4228 if (rc < 0) {
4229 LOGE("set_parms failed for hal version, stream info");
4230 }
4231
4232 cam_dimension_t sensor_dim;
4233 memset(&sensor_dim, 0, sizeof(sensor_dim));
4234 rc = getSensorOutputSize(sensor_dim);
4235 if (rc != NO_ERROR) {
4236 LOGE("Failed to get sensor output size");
4237 pthread_mutex_unlock(&mMutex);
4238 goto error_exit;
4239 }
4240
4241 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4242 gCamCapability[mCameraId]->active_array_size.height,
4243 sensor_dim.width, sensor_dim.height);
4244
4245 /* Set batchmode before initializing channel. Since registerBuffer
4246 * internally initializes some of the channels, better set batchmode
4247 * even before first register buffer */
4248 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4249 it != mStreamInfo.end(); it++) {
4250 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4251 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4252 && mBatchSize) {
4253 rc = channel->setBatchSize(mBatchSize);
4254 //Disable per frame map unmap for HFR/batchmode case
4255 rc |= channel->setPerFrameMapUnmap(false);
4256 if (NO_ERROR != rc) {
4257 LOGE("Channel init failed %d", rc);
4258 pthread_mutex_unlock(&mMutex);
4259 goto error_exit;
4260 }
4261 }
4262 }
4263
4264 //First initialize all streams
4265 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4266 it != mStreamInfo.end(); it++) {
4267 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4268 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4269 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004270 setEis) {
4271 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4272 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4273 is_type = mStreamConfigInfo.is_type[i];
4274 break;
4275 }
4276 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004277 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004278 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004279 rc = channel->initialize(IS_TYPE_NONE);
4280 }
4281 if (NO_ERROR != rc) {
4282 LOGE("Channel initialization failed %d", rc);
4283 pthread_mutex_unlock(&mMutex);
4284 goto error_exit;
4285 }
4286 }
4287
4288 if (mRawDumpChannel) {
4289 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4290 if (rc != NO_ERROR) {
4291 LOGE("Error: Raw Dump Channel init failed");
4292 pthread_mutex_unlock(&mMutex);
4293 goto error_exit;
4294 }
4295 }
4296 if (mSupportChannel) {
4297 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4298 if (rc < 0) {
4299 LOGE("Support channel initialization failed");
4300 pthread_mutex_unlock(&mMutex);
4301 goto error_exit;
4302 }
4303 }
4304 if (mAnalysisChannel) {
4305 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4306 if (rc < 0) {
4307 LOGE("Analysis channel initialization failed");
4308 pthread_mutex_unlock(&mMutex);
4309 goto error_exit;
4310 }
4311 }
4312 if (mDummyBatchChannel) {
4313 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4314 if (rc < 0) {
4315 LOGE("mDummyBatchChannel setBatchSize failed");
4316 pthread_mutex_unlock(&mMutex);
4317 goto error_exit;
4318 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004319 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004320 if (rc < 0) {
4321 LOGE("mDummyBatchChannel initialization failed");
4322 pthread_mutex_unlock(&mMutex);
4323 goto error_exit;
4324 }
4325 }
4326
4327 // Set bundle info
4328 rc = setBundleInfo();
4329 if (rc < 0) {
4330 LOGE("setBundleInfo failed %d", rc);
4331 pthread_mutex_unlock(&mMutex);
4332 goto error_exit;
4333 }
4334
4335 //update settings from app here
4336 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4337 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4338 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4339 }
4340 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4341 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4342 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4343 }
4344 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4345 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4346 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4347
4348 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4349 (mLinkedCameraId != mCameraId) ) {
4350 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4351 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004352 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004353 goto error_exit;
4354 }
4355 }
4356
4357 // add bundle related cameras
4358 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4359 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004360 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4361 &m_pDualCamCmdPtr->bundle_info;
4362 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004363 if (mIsDeviceLinked)
4364 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4365 else
4366 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4367
4368 pthread_mutex_lock(&gCamLock);
4369
4370 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4371 LOGE("Dualcam: Invalid Session Id ");
4372 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004373 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004374 goto error_exit;
4375 }
4376
4377 if (mIsMainCamera == 1) {
4378 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4379 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004380 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004381 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004382 // related session id should be session id of linked session
4383 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4384 } else {
4385 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4386 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004387 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004388 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004389 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4390 }
4391 pthread_mutex_unlock(&gCamLock);
4392
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004393 rc = mCameraHandle->ops->set_dual_cam_cmd(
4394 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004395 if (rc < 0) {
4396 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004397 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004398 goto error_exit;
4399 }
4400 }
4401
4402 //Then start them.
4403 LOGH("Start META Channel");
4404 rc = mMetadataChannel->start();
4405 if (rc < 0) {
4406 LOGE("META channel start failed");
4407 pthread_mutex_unlock(&mMutex);
4408 goto error_exit;
4409 }
4410
4411 if (mAnalysisChannel) {
4412 rc = mAnalysisChannel->start();
4413 if (rc < 0) {
4414 LOGE("Analysis channel start failed");
4415 mMetadataChannel->stop();
4416 pthread_mutex_unlock(&mMutex);
4417 goto error_exit;
4418 }
4419 }
4420
4421 if (mSupportChannel) {
4422 rc = mSupportChannel->start();
4423 if (rc < 0) {
4424 LOGE("Support channel start failed");
4425 mMetadataChannel->stop();
4426 /* Although support and analysis are mutually exclusive today
4427 adding it in anycase for future proofing */
4428 if (mAnalysisChannel) {
4429 mAnalysisChannel->stop();
4430 }
4431 pthread_mutex_unlock(&mMutex);
4432 goto error_exit;
4433 }
4434 }
4435 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4436 it != mStreamInfo.end(); it++) {
4437 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4438 LOGH("Start Processing Channel mask=%d",
4439 channel->getStreamTypeMask());
4440 rc = channel->start();
4441 if (rc < 0) {
4442 LOGE("channel start failed");
4443 pthread_mutex_unlock(&mMutex);
4444 goto error_exit;
4445 }
4446 }
4447
4448 if (mRawDumpChannel) {
4449 LOGD("Starting raw dump stream");
4450 rc = mRawDumpChannel->start();
4451 if (rc != NO_ERROR) {
4452 LOGE("Error Starting Raw Dump Channel");
4453 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4454 it != mStreamInfo.end(); it++) {
4455 QCamera3Channel *channel =
4456 (QCamera3Channel *)(*it)->stream->priv;
4457 LOGH("Stopping Processing Channel mask=%d",
4458 channel->getStreamTypeMask());
4459 channel->stop();
4460 }
4461 if (mSupportChannel)
4462 mSupportChannel->stop();
4463 if (mAnalysisChannel) {
4464 mAnalysisChannel->stop();
4465 }
4466 mMetadataChannel->stop();
4467 pthread_mutex_unlock(&mMutex);
4468 goto error_exit;
4469 }
4470 }
4471
4472 if (mChannelHandle) {
4473
4474 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4475 mChannelHandle);
4476 if (rc != NO_ERROR) {
4477 LOGE("start_channel failed %d", rc);
4478 pthread_mutex_unlock(&mMutex);
4479 goto error_exit;
4480 }
4481 }
4482
4483 goto no_error;
4484error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004485 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004486 return rc;
4487no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004488 mWokenUpByDaemon = false;
4489 mPendingLiveRequest = 0;
4490 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004491 }
4492
4493 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004494 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004495
4496 if (mFlushPerf) {
4497 //we cannot accept any requests during flush
4498 LOGE("process_capture_request cannot proceed during flush");
4499 pthread_mutex_unlock(&mMutex);
4500 return NO_ERROR; //should return an error
4501 }
4502
4503 if (meta.exists(ANDROID_REQUEST_ID)) {
4504 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4505 mCurrentRequestId = request_id;
4506 LOGD("Received request with id: %d", request_id);
4507 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4508 LOGE("Unable to find request id field, \
4509 & no previous id available");
4510 pthread_mutex_unlock(&mMutex);
4511 return NAME_NOT_FOUND;
4512 } else {
4513 LOGD("Re-using old request id");
4514 request_id = mCurrentRequestId;
4515 }
4516
4517 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4518 request->num_output_buffers,
4519 request->input_buffer,
4520 frameNumber);
4521 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004522 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004523 int blob_request = 0;
4524 uint32_t snapshotStreamId = 0;
4525 for (size_t i = 0; i < request->num_output_buffers; i++) {
4526 const camera3_stream_buffer_t& output = request->output_buffers[i];
4527 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4528
4529 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004530 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004531 blob_request = 1;
4532 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4533 }
4534
4535 if (output.acquire_fence != -1) {
4536 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4537 close(output.acquire_fence);
4538 if (rc != OK) {
4539 LOGE("sync wait failed %d", rc);
4540 pthread_mutex_unlock(&mMutex);
4541 return rc;
4542 }
4543 }
4544
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004545 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004546 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004547
4548 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4549 isVidBufRequested = true;
4550 }
4551 }
4552
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004553 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4554 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4555 itr++) {
4556 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4557 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4558 channel->getStreamID(channel->getStreamTypeMask());
4559
4560 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4561 isVidBufRequested = true;
4562 }
4563 }
4564
Thierry Strudel3d639192016-09-09 11:52:26 -07004565 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004566 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004567 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 }
4569 if (blob_request && mRawDumpChannel) {
4570 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004571 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004572 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004573 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004574 }
4575
4576 if(request->input_buffer == NULL) {
4577 /* Parse the settings:
4578 * - For every request in NORMAL MODE
4579 * - For every request in HFR mode during preview only case
4580 * - For first request of every batch in HFR mode during video
4581 * recording. In batchmode the same settings except frame number is
4582 * repeated in each request of the batch.
4583 */
4584 if (!mBatchSize ||
4585 (mBatchSize && !isVidBufRequested) ||
4586 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004587 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004588 if (rc < 0) {
4589 LOGE("fail to set frame parameters");
4590 pthread_mutex_unlock(&mMutex);
4591 return rc;
4592 }
4593 }
4594 /* For batchMode HFR, setFrameParameters is not called for every
4595 * request. But only frame number of the latest request is parsed.
4596 * Keep track of first and last frame numbers in a batch so that
4597 * metadata for the frame numbers of batch can be duplicated in
4598 * handleBatchMetadta */
4599 if (mBatchSize) {
4600 if (!mToBeQueuedVidBufs) {
4601 //start of the batch
4602 mFirstFrameNumberInBatch = request->frame_number;
4603 }
4604 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4605 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4606 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004607 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004608 return BAD_VALUE;
4609 }
4610 }
4611 if (mNeedSensorRestart) {
4612 /* Unlock the mutex as restartSensor waits on the channels to be
4613 * stopped, which in turn calls stream callback functions -
4614 * handleBufferWithLock and handleMetadataWithLock */
4615 pthread_mutex_unlock(&mMutex);
4616 rc = dynamicUpdateMetaStreamInfo();
4617 if (rc != NO_ERROR) {
4618 LOGE("Restarting the sensor failed");
4619 return BAD_VALUE;
4620 }
4621 mNeedSensorRestart = false;
4622 pthread_mutex_lock(&mMutex);
4623 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004624 if(mResetInstantAEC) {
4625 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4626 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4627 mResetInstantAEC = false;
4628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004629 } else {
4630
4631 if (request->input_buffer->acquire_fence != -1) {
4632 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4633 close(request->input_buffer->acquire_fence);
4634 if (rc != OK) {
4635 LOGE("input buffer sync wait failed %d", rc);
4636 pthread_mutex_unlock(&mMutex);
4637 return rc;
4638 }
4639 }
4640 }
4641
4642 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4643 mLastCustIntentFrmNum = frameNumber;
4644 }
4645 /* Update pending request list and pending buffers map */
4646 PendingRequestInfo pendingRequest;
4647 pendingRequestIterator latestRequest;
4648 pendingRequest.frame_number = frameNumber;
4649 pendingRequest.num_buffers = request->num_output_buffers;
4650 pendingRequest.request_id = request_id;
4651 pendingRequest.blob_request = blob_request;
4652 pendingRequest.timestamp = 0;
4653 pendingRequest.bUrgentReceived = 0;
4654 if (request->input_buffer) {
4655 pendingRequest.input_buffer =
4656 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4657 *(pendingRequest.input_buffer) = *(request->input_buffer);
4658 pInputBuffer = pendingRequest.input_buffer;
4659 } else {
4660 pendingRequest.input_buffer = NULL;
4661 pInputBuffer = NULL;
4662 }
4663
4664 pendingRequest.pipeline_depth = 0;
4665 pendingRequest.partial_result_cnt = 0;
4666 extractJpegMetadata(mCurJpegMeta, request);
4667 pendingRequest.jpegMetadata = mCurJpegMeta;
4668 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4669 pendingRequest.shutter_notified = false;
4670
4671 //extract capture intent
4672 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4673 mCaptureIntent =
4674 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4675 }
4676 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004677 /* DevCamDebug metadata processCaptureRequest */
4678 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4679 mDevCamDebugMetaEnable =
4680 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4681 }
4682 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4683 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004684
4685 //extract CAC info
4686 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4687 mCacMode =
4688 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4689 }
4690 pendingRequest.fwkCacMode = mCacMode;
4691
4692 PendingBuffersInRequest bufsForCurRequest;
4693 bufsForCurRequest.frame_number = frameNumber;
4694 // Mark current timestamp for the new request
4695 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4696
4697 for (size_t i = 0; i < request->num_output_buffers; i++) {
4698 RequestedBufferInfo requestedBuf;
4699 memset(&requestedBuf, 0, sizeof(requestedBuf));
4700 requestedBuf.stream = request->output_buffers[i].stream;
4701 requestedBuf.buffer = NULL;
4702 pendingRequest.buffers.push_back(requestedBuf);
4703
4704 // Add to buffer handle the pending buffers list
4705 PendingBufferInfo bufferInfo;
4706 bufferInfo.buffer = request->output_buffers[i].buffer;
4707 bufferInfo.stream = request->output_buffers[i].stream;
4708 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4709 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4710 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4711 frameNumber, bufferInfo.buffer,
4712 channel->getStreamTypeMask(), bufferInfo.stream->format);
4713 }
4714 // Add this request packet into mPendingBuffersMap
4715 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4716 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4717 mPendingBuffersMap.get_num_overall_buffers());
4718
4719 latestRequest = mPendingRequestsList.insert(
4720 mPendingRequestsList.end(), pendingRequest);
4721 if(mFlush) {
4722 LOGI("mFlush is true");
4723 pthread_mutex_unlock(&mMutex);
4724 return NO_ERROR;
4725 }
4726
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004727 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004728 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004729 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004730
4731 if(request->input_buffer != NULL){
4732 LOGD("Input request, frame_number %d", frameNumber);
4733 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4734 if (NO_ERROR != rc) {
4735 LOGE("fail to set reproc parameters");
4736 pthread_mutex_unlock(&mMutex);
4737 return rc;
4738 }
4739 }
4740
4741 // Call request on other streams
4742 uint32_t streams_need_metadata = 0;
4743 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4744 for (size_t i = 0; i < request->num_output_buffers; i++) {
4745 const camera3_stream_buffer_t& output = request->output_buffers[i];
4746 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4747
4748 if (channel == NULL) {
4749 LOGW("invalid channel pointer for stream");
4750 continue;
4751 }
4752
4753 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4754 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4755 output.buffer, request->input_buffer, frameNumber);
4756 if(request->input_buffer != NULL){
4757 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004758 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 if (rc < 0) {
4760 LOGE("Fail to request on picture channel");
4761 pthread_mutex_unlock(&mMutex);
4762 return rc;
4763 }
4764 } else {
4765 LOGD("snapshot request with buffer %p, frame_number %d",
4766 output.buffer, frameNumber);
4767 if (!request->settings) {
4768 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004769 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004770 } else {
4771 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004772 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 }
4774 if (rc < 0) {
4775 LOGE("Fail to request on picture channel");
4776 pthread_mutex_unlock(&mMutex);
4777 return rc;
4778 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004779
4780 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4781 uint32_t j = 0;
4782 for (j = 0; j < streamsArray.num_streams; j++) {
4783 if (streamsArray.stream_request[j].streamID == streamId) {
4784 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4785 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4786 else
4787 streamsArray.stream_request[j].buf_index = indexUsed;
4788 break;
4789 }
4790 }
4791 if (j == streamsArray.num_streams) {
4792 LOGE("Did not find matching stream to update index");
4793 assert(0);
4794 }
4795
Thierry Strudel3d639192016-09-09 11:52:26 -07004796 pendingBufferIter->need_metadata = true;
4797 streams_need_metadata++;
4798 }
4799 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4800 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004801 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4802 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004803 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4804 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 if (rc < 0) {
4806 LOGE("Fail to request on YUV channel");
4807 pthread_mutex_unlock(&mMutex);
4808 return rc;
4809 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004810
4811 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4812 uint32_t j = 0;
4813 for (j = 0; j < streamsArray.num_streams; j++) {
4814 if (streamsArray.stream_request[j].streamID == streamId) {
4815 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4816 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4817 else
4818 streamsArray.stream_request[j].buf_index = indexUsed;
4819 break;
4820 }
4821 }
4822 if (j == streamsArray.num_streams) {
4823 LOGE("Did not find matching stream to update index");
4824 assert(0);
4825 }
4826
Thierry Strudel3d639192016-09-09 11:52:26 -07004827 pendingBufferIter->need_metadata = needMetadata;
4828 if (needMetadata)
4829 streams_need_metadata += 1;
4830 LOGD("calling YUV channel request, need_metadata is %d",
4831 needMetadata);
4832 } else {
4833 LOGD("request with buffer %p, frame_number %d",
4834 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004835
4836 rc = channel->request(output.buffer, frameNumber, indexUsed);
4837
4838 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4839 uint32_t j = 0;
4840 for (j = 0; j < streamsArray.num_streams; j++) {
4841 if (streamsArray.stream_request[j].streamID == streamId) {
4842 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4843 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4844 else
4845 streamsArray.stream_request[j].buf_index = indexUsed;
4846 break;
4847 }
4848 }
4849 if (j == streamsArray.num_streams) {
4850 LOGE("Did not find matching stream to update index");
4851 assert(0);
4852 }
4853
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4855 && mBatchSize) {
4856 mToBeQueuedVidBufs++;
4857 if (mToBeQueuedVidBufs == mBatchSize) {
4858 channel->queueBatchBuf();
4859 }
4860 }
4861 if (rc < 0) {
4862 LOGE("request failed");
4863 pthread_mutex_unlock(&mMutex);
4864 return rc;
4865 }
4866 }
4867 pendingBufferIter++;
4868 }
4869
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004870 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4871 itr++) {
4872 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4873
4874 if (channel == NULL) {
4875 LOGE("invalid channel pointer for stream");
4876 assert(0);
4877 return BAD_VALUE;
4878 }
4879
4880 InternalRequest requestedStream;
4881 requestedStream = (*itr);
4882
4883
4884 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4885 LOGD("snapshot request internally input buffer %p, frame_number %d",
4886 request->input_buffer, frameNumber);
4887 if(request->input_buffer != NULL){
4888 rc = channel->request(NULL, frameNumber,
4889 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4890 if (rc < 0) {
4891 LOGE("Fail to request on picture channel");
4892 pthread_mutex_unlock(&mMutex);
4893 return rc;
4894 }
4895 } else {
4896 LOGD("snapshot request with frame_number %d", frameNumber);
4897 if (!request->settings) {
4898 rc = channel->request(NULL, frameNumber,
4899 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4900 } else {
4901 rc = channel->request(NULL, frameNumber,
4902 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4903 }
4904 if (rc < 0) {
4905 LOGE("Fail to request on picture channel");
4906 pthread_mutex_unlock(&mMutex);
4907 return rc;
4908 }
4909
4910 if ((*itr).meteringOnly != 1) {
4911 requestedStream.need_metadata = 1;
4912 streams_need_metadata++;
4913 }
4914 }
4915
4916 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4917 uint32_t j = 0;
4918 for (j = 0; j < streamsArray.num_streams; j++) {
4919 if (streamsArray.stream_request[j].streamID == streamId) {
4920 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4921 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4922 else
4923 streamsArray.stream_request[j].buf_index = indexUsed;
4924 break;
4925 }
4926 }
4927 if (j == streamsArray.num_streams) {
4928 LOGE("Did not find matching stream to update index");
4929 assert(0);
4930 }
4931
4932 } else {
4933 LOGE("Internal requests not supported on this stream type");
4934 assert(0);
4935 return INVALID_OPERATION;
4936 }
4937 latestRequest->internalRequestList.push_back(requestedStream);
4938 }
4939
Thierry Strudel3d639192016-09-09 11:52:26 -07004940 //If 2 streams have need_metadata set to true, fail the request, unless
4941 //we copy/reference count the metadata buffer
4942 if (streams_need_metadata > 1) {
4943 LOGE("not supporting request in which two streams requires"
4944 " 2 HAL metadata for reprocessing");
4945 pthread_mutex_unlock(&mMutex);
4946 return -EINVAL;
4947 }
4948
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004949 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004950 /* Set the parameters to backend:
4951 * - For every request in NORMAL MODE
4952 * - For every request in HFR mode during preview only case
4953 * - Once every batch in HFR mode during video recording
4954 */
4955 if (!mBatchSize ||
4956 (mBatchSize && !isVidBufRequested) ||
4957 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4958 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4959 mBatchSize, isVidBufRequested,
4960 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004961
4962 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4963 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4964 uint32_t m = 0;
4965 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4966 if (streamsArray.stream_request[k].streamID ==
4967 mBatchedStreamsArray.stream_request[m].streamID)
4968 break;
4969 }
4970 if (m == mBatchedStreamsArray.num_streams) {
4971 mBatchedStreamsArray.stream_request\
4972 [mBatchedStreamsArray.num_streams].streamID =
4973 streamsArray.stream_request[k].streamID;
4974 mBatchedStreamsArray.stream_request\
4975 [mBatchedStreamsArray.num_streams].buf_index =
4976 streamsArray.stream_request[k].buf_index;
4977 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4978 }
4979 }
4980 streamsArray = mBatchedStreamsArray;
4981 }
4982 /* Update stream id of all the requested buffers */
4983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4984 LOGE("Failed to set stream type mask in the parameters");
4985 return BAD_VALUE;
4986 }
4987
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4989 mParameters);
4990 if (rc < 0) {
4991 LOGE("set_parms failed");
4992 }
4993 /* reset to zero coz, the batch is queued */
4994 mToBeQueuedVidBufs = 0;
4995 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004996 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4997 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4998 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4999 uint32_t m = 0;
5000 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5001 if (streamsArray.stream_request[k].streamID ==
5002 mBatchedStreamsArray.stream_request[m].streamID)
5003 break;
5004 }
5005 if (m == mBatchedStreamsArray.num_streams) {
5006 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5007 streamsArray.stream_request[k].streamID;
5008 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5009 streamsArray.stream_request[k].buf_index;
5010 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5011 }
5012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 }
5014 mPendingLiveRequest++;
5015 }
5016
5017 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5018
5019 mState = STARTED;
5020 // Added a timed condition wait
5021 struct timespec ts;
5022 uint8_t isValidTimeout = 1;
5023 rc = clock_gettime(CLOCK_REALTIME, &ts);
5024 if (rc < 0) {
5025 isValidTimeout = 0;
5026 LOGE("Error reading the real time clock!!");
5027 }
5028 else {
5029 // Make timeout as 5 sec for request to be honored
5030 ts.tv_sec += 5;
5031 }
5032 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005033 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 (mState != ERROR) && (mState != DEINIT)) {
5035 if (!isValidTimeout) {
5036 LOGD("Blocking on conditional wait");
5037 pthread_cond_wait(&mRequestCond, &mMutex);
5038 }
5039 else {
5040 LOGD("Blocking on timed conditional wait");
5041 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5042 if (rc == ETIMEDOUT) {
5043 rc = -ENODEV;
5044 LOGE("Unblocked on timeout!!!!");
5045 break;
5046 }
5047 }
5048 LOGD("Unblocked");
5049 if (mWokenUpByDaemon) {
5050 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005051 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005052 break;
5053 }
5054 }
5055 pthread_mutex_unlock(&mMutex);
5056
5057 return rc;
5058}
5059
5060/*===========================================================================
5061 * FUNCTION : dump
5062 *
5063 * DESCRIPTION:
5064 *
5065 * PARAMETERS :
5066 *
5067 *
5068 * RETURN :
5069 *==========================================================================*/
5070void QCamera3HardwareInterface::dump(int fd)
5071{
5072 pthread_mutex_lock(&mMutex);
5073 dprintf(fd, "\n Camera HAL3 information Begin \n");
5074
5075 dprintf(fd, "\nNumber of pending requests: %zu \n",
5076 mPendingRequestsList.size());
5077 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5078 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5079 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5080 for(pendingRequestIterator i = mPendingRequestsList.begin();
5081 i != mPendingRequestsList.end(); i++) {
5082 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5083 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5084 i->input_buffer);
5085 }
5086 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5087 mPendingBuffersMap.get_num_overall_buffers());
5088 dprintf(fd, "-------+------------------\n");
5089 dprintf(fd, " Frame | Stream type mask \n");
5090 dprintf(fd, "-------+------------------\n");
5091 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5092 for(auto &j : req.mPendingBufferList) {
5093 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5094 dprintf(fd, " %5d | %11d \n",
5095 req.frame_number, channel->getStreamTypeMask());
5096 }
5097 }
5098 dprintf(fd, "-------+------------------\n");
5099
5100 dprintf(fd, "\nPending frame drop list: %zu\n",
5101 mPendingFrameDropList.size());
5102 dprintf(fd, "-------+-----------\n");
5103 dprintf(fd, " Frame | Stream ID \n");
5104 dprintf(fd, "-------+-----------\n");
5105 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5106 i != mPendingFrameDropList.end(); i++) {
5107 dprintf(fd, " %5d | %9d \n",
5108 i->frame_number, i->stream_ID);
5109 }
5110 dprintf(fd, "-------+-----------\n");
5111
5112 dprintf(fd, "\n Camera HAL3 information End \n");
5113
5114 /* use dumpsys media.camera as trigger to send update debug level event */
5115 mUpdateDebugLevel = true;
5116 pthread_mutex_unlock(&mMutex);
5117 return;
5118}
5119
5120/*===========================================================================
5121 * FUNCTION : flush
5122 *
5123 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5124 * conditionally restarts channels
5125 *
5126 * PARAMETERS :
5127 * @ restartChannels: re-start all channels
5128 *
5129 *
5130 * RETURN :
5131 * 0 on success
5132 * Error code on failure
5133 *==========================================================================*/
5134int QCamera3HardwareInterface::flush(bool restartChannels)
5135{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005136 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005137 int32_t rc = NO_ERROR;
5138
5139 LOGD("Unblocking Process Capture Request");
5140 pthread_mutex_lock(&mMutex);
5141 mFlush = true;
5142 pthread_mutex_unlock(&mMutex);
5143
5144 rc = stopAllChannels();
5145 // unlink of dualcam
5146 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005147 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5148 &m_pDualCamCmdPtr->bundle_info;
5149 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5151 pthread_mutex_lock(&gCamLock);
5152
5153 if (mIsMainCamera == 1) {
5154 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5155 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005156 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 // related session id should be session id of linked session
5158 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5159 } else {
5160 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5161 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005162 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5164 }
5165 pthread_mutex_unlock(&gCamLock);
5166
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005167 rc = mCameraHandle->ops->set_dual_cam_cmd(
5168 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 if (rc < 0) {
5170 LOGE("Dualcam: Unlink failed, but still proceed to close");
5171 }
5172 }
5173
5174 if (rc < 0) {
5175 LOGE("stopAllChannels failed");
5176 return rc;
5177 }
5178 if (mChannelHandle) {
5179 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5180 mChannelHandle);
5181 }
5182
5183 // Reset bundle info
5184 rc = setBundleInfo();
5185 if (rc < 0) {
5186 LOGE("setBundleInfo failed %d", rc);
5187 return rc;
5188 }
5189
5190 // Mutex Lock
5191 pthread_mutex_lock(&mMutex);
5192
5193 // Unblock process_capture_request
5194 mPendingLiveRequest = 0;
5195 pthread_cond_signal(&mRequestCond);
5196
5197 rc = notifyErrorForPendingRequests();
5198 if (rc < 0) {
5199 LOGE("notifyErrorForPendingRequests failed");
5200 pthread_mutex_unlock(&mMutex);
5201 return rc;
5202 }
5203
5204 mFlush = false;
5205
5206 // Start the Streams/Channels
5207 if (restartChannels) {
5208 rc = startAllChannels();
5209 if (rc < 0) {
5210 LOGE("startAllChannels failed");
5211 pthread_mutex_unlock(&mMutex);
5212 return rc;
5213 }
5214 }
5215
5216 if (mChannelHandle) {
5217 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5218 mChannelHandle);
5219 if (rc < 0) {
5220 LOGE("start_channel failed");
5221 pthread_mutex_unlock(&mMutex);
5222 return rc;
5223 }
5224 }
5225
5226 pthread_mutex_unlock(&mMutex);
5227
5228 return 0;
5229}
5230
5231/*===========================================================================
5232 * FUNCTION : flushPerf
5233 *
5234 * DESCRIPTION: This is the performance optimization version of flush that does
5235 * not use stream off, rather flushes the system
5236 *
5237 * PARAMETERS :
5238 *
5239 *
5240 * RETURN : 0 : success
5241 * -EINVAL: input is malformed (device is not valid)
5242 * -ENODEV: if the device has encountered a serious error
5243 *==========================================================================*/
5244int QCamera3HardwareInterface::flushPerf()
5245{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005246 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005247 int32_t rc = 0;
5248 struct timespec timeout;
5249 bool timed_wait = false;
5250
5251 pthread_mutex_lock(&mMutex);
5252 mFlushPerf = true;
5253 mPendingBuffersMap.numPendingBufsAtFlush =
5254 mPendingBuffersMap.get_num_overall_buffers();
5255 LOGD("Calling flush. Wait for %d buffers to return",
5256 mPendingBuffersMap.numPendingBufsAtFlush);
5257
5258 /* send the flush event to the backend */
5259 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5260 if (rc < 0) {
5261 LOGE("Error in flush: IOCTL failure");
5262 mFlushPerf = false;
5263 pthread_mutex_unlock(&mMutex);
5264 return -ENODEV;
5265 }
5266
5267 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5268 LOGD("No pending buffers in HAL, return flush");
5269 mFlushPerf = false;
5270 pthread_mutex_unlock(&mMutex);
5271 return rc;
5272 }
5273
5274 /* wait on a signal that buffers were received */
5275 rc = clock_gettime(CLOCK_REALTIME, &timeout);
5276 if (rc < 0) {
5277 LOGE("Error reading the real time clock, cannot use timed wait");
5278 } else {
5279 timeout.tv_sec += FLUSH_TIMEOUT;
5280 timed_wait = true;
5281 }
5282
5283 //Block on conditional variable
5284 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5285 LOGD("Waiting on mBuffersCond");
5286 if (!timed_wait) {
5287 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5288 if (rc != 0) {
5289 LOGE("pthread_cond_wait failed due to rc = %s",
5290 strerror(rc));
5291 break;
5292 }
5293 } else {
5294 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5295 if (rc != 0) {
5296 LOGE("pthread_cond_timedwait failed due to rc = %s",
5297 strerror(rc));
5298 break;
5299 }
5300 }
5301 }
5302 if (rc != 0) {
5303 mFlushPerf = false;
5304 pthread_mutex_unlock(&mMutex);
5305 return -ENODEV;
5306 }
5307
5308 LOGD("Received buffers, now safe to return them");
5309
5310 //make sure the channels handle flush
5311 //currently only required for the picture channel to release snapshot resources
5312 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5313 it != mStreamInfo.end(); it++) {
5314 QCamera3Channel *channel = (*it)->channel;
5315 if (channel) {
5316 rc = channel->flush();
5317 if (rc) {
5318 LOGE("Flushing the channels failed with error %d", rc);
5319 // even though the channel flush failed we need to continue and
5320 // return the buffers we have to the framework, however the return
5321 // value will be an error
5322 rc = -ENODEV;
5323 }
5324 }
5325 }
5326
5327 /* notify the frameworks and send errored results */
5328 rc = notifyErrorForPendingRequests();
5329 if (rc < 0) {
5330 LOGE("notifyErrorForPendingRequests failed");
5331 pthread_mutex_unlock(&mMutex);
5332 return rc;
5333 }
5334
5335 //unblock process_capture_request
5336 mPendingLiveRequest = 0;
5337 unblockRequestIfNecessary();
5338
5339 mFlushPerf = false;
5340 pthread_mutex_unlock(&mMutex);
5341 LOGD ("Flush Operation complete. rc = %d", rc);
5342 return rc;
5343}
5344
5345/*===========================================================================
5346 * FUNCTION : handleCameraDeviceError
5347 *
5348 * DESCRIPTION: This function calls internal flush and notifies the error to
5349 * framework and updates the state variable.
5350 *
5351 * PARAMETERS : None
5352 *
5353 * RETURN : NO_ERROR on Success
5354 * Error code on failure
5355 *==========================================================================*/
5356int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5357{
5358 int32_t rc = NO_ERROR;
5359
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005360 {
5361 Mutex::Autolock lock(mFlushLock);
5362 pthread_mutex_lock(&mMutex);
5363 if (mState != ERROR) {
5364 //if mState != ERROR, nothing to be done
5365 pthread_mutex_unlock(&mMutex);
5366 return NO_ERROR;
5367 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005368 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005369
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005370 rc = flush(false /* restart channels */);
5371 if (NO_ERROR != rc) {
5372 LOGE("internal flush to handle mState = ERROR failed");
5373 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005374
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005375 pthread_mutex_lock(&mMutex);
5376 mState = DEINIT;
5377 pthread_mutex_unlock(&mMutex);
5378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 camera3_notify_msg_t notify_msg;
5381 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5382 notify_msg.type = CAMERA3_MSG_ERROR;
5383 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5384 notify_msg.message.error.error_stream = NULL;
5385 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005386 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005387
5388 return rc;
5389}
5390
5391/*===========================================================================
5392 * FUNCTION : captureResultCb
5393 *
5394 * DESCRIPTION: Callback handler for all capture result
5395 * (streams, as well as metadata)
5396 *
5397 * PARAMETERS :
5398 * @metadata : metadata information
5399 * @buffer : actual gralloc buffer to be returned to frameworks.
5400 * NULL if metadata.
5401 *
5402 * RETURN : NONE
5403 *==========================================================================*/
5404void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5405 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5406{
5407 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005408 pthread_mutex_lock(&mMutex);
5409 uint8_t batchSize = mBatchSize;
5410 pthread_mutex_unlock(&mMutex);
5411 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 handleBatchMetadata(metadata_buf,
5413 true /* free_and_bufdone_meta_buf */);
5414 } else { /* mBatchSize = 0 */
5415 hdrPlusPerfLock(metadata_buf);
5416 pthread_mutex_lock(&mMutex);
5417 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005418 true /* free_and_bufdone_meta_buf */,
5419 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 pthread_mutex_unlock(&mMutex);
5421 }
5422 } else if (isInputBuffer) {
5423 pthread_mutex_lock(&mMutex);
5424 handleInputBufferWithLock(frame_number);
5425 pthread_mutex_unlock(&mMutex);
5426 } else {
5427 pthread_mutex_lock(&mMutex);
5428 handleBufferWithLock(buffer, frame_number);
5429 pthread_mutex_unlock(&mMutex);
5430 }
5431 return;
5432}
5433
5434/*===========================================================================
5435 * FUNCTION : getReprocessibleOutputStreamId
5436 *
5437 * DESCRIPTION: Get source output stream id for the input reprocess stream
5438 * based on size and format, which would be the largest
5439 * output stream if an input stream exists.
5440 *
5441 * PARAMETERS :
5442 * @id : return the stream id if found
5443 *
5444 * RETURN : int32_t type of status
5445 * NO_ERROR -- success
5446 * none-zero failure code
5447 *==========================================================================*/
5448int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5449{
5450 /* check if any output or bidirectional stream with the same size and format
5451 and return that stream */
5452 if ((mInputStreamInfo.dim.width > 0) &&
5453 (mInputStreamInfo.dim.height > 0)) {
5454 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5455 it != mStreamInfo.end(); it++) {
5456
5457 camera3_stream_t *stream = (*it)->stream;
5458 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5459 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5460 (stream->format == mInputStreamInfo.format)) {
5461 // Usage flag for an input stream and the source output stream
5462 // may be different.
5463 LOGD("Found reprocessible output stream! %p", *it);
5464 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5465 stream->usage, mInputStreamInfo.usage);
5466
5467 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5468 if (channel != NULL && channel->mStreams[0]) {
5469 id = channel->mStreams[0]->getMyServerID();
5470 return NO_ERROR;
5471 }
5472 }
5473 }
5474 } else {
5475 LOGD("No input stream, so no reprocessible output stream");
5476 }
5477 return NAME_NOT_FOUND;
5478}
5479
5480/*===========================================================================
5481 * FUNCTION : lookupFwkName
5482 *
5483 * DESCRIPTION: In case the enum is not same in fwk and backend
5484 * make sure the parameter is correctly propogated
5485 *
5486 * PARAMETERS :
5487 * @arr : map between the two enums
5488 * @len : len of the map
5489 * @hal_name : name of the hal_parm to map
5490 *
5491 * RETURN : int type of status
5492 * fwk_name -- success
5493 * none-zero failure code
5494 *==========================================================================*/
5495template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5496 size_t len, halType hal_name)
5497{
5498
5499 for (size_t i = 0; i < len; i++) {
5500 if (arr[i].hal_name == hal_name) {
5501 return arr[i].fwk_name;
5502 }
5503 }
5504
5505 /* Not able to find matching framework type is not necessarily
5506 * an error case. This happens when mm-camera supports more attributes
5507 * than the frameworks do */
5508 LOGH("Cannot find matching framework type");
5509 return NAME_NOT_FOUND;
5510}
5511
5512/*===========================================================================
5513 * FUNCTION : lookupHalName
5514 *
5515 * DESCRIPTION: In case the enum is not same in fwk and backend
5516 * make sure the parameter is correctly propogated
5517 *
5518 * PARAMETERS :
5519 * @arr : map between the two enums
5520 * @len : len of the map
5521 * @fwk_name : name of the hal_parm to map
5522 *
5523 * RETURN : int32_t type of status
5524 * hal_name -- success
5525 * none-zero failure code
5526 *==========================================================================*/
5527template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5528 size_t len, fwkType fwk_name)
5529{
5530 for (size_t i = 0; i < len; i++) {
5531 if (arr[i].fwk_name == fwk_name) {
5532 return arr[i].hal_name;
5533 }
5534 }
5535
5536 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5537 return NAME_NOT_FOUND;
5538}
5539
5540/*===========================================================================
5541 * FUNCTION : lookupProp
5542 *
5543 * DESCRIPTION: lookup a value by its name
5544 *
5545 * PARAMETERS :
5546 * @arr : map between the two enums
5547 * @len : size of the map
5548 * @name : name to be looked up
5549 *
5550 * RETURN : Value if found
5551 * CAM_CDS_MODE_MAX if not found
5552 *==========================================================================*/
5553template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5554 size_t len, const char *name)
5555{
5556 if (name) {
5557 for (size_t i = 0; i < len; i++) {
5558 if (!strcmp(arr[i].desc, name)) {
5559 return arr[i].val;
5560 }
5561 }
5562 }
5563 return CAM_CDS_MODE_MAX;
5564}
5565
5566/*===========================================================================
5567 *
5568 * DESCRIPTION:
5569 *
5570 * PARAMETERS :
5571 * @metadata : metadata information from callback
5572 * @timestamp: metadata buffer timestamp
5573 * @request_id: request id
5574 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005575 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5576 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005577 * @pprocDone: whether internal offline postprocsesing is done
5578 *
5579 * RETURN : camera_metadata_t*
5580 * metadata in a format specified by fwk
5581 *==========================================================================*/
5582camera_metadata_t*
5583QCamera3HardwareInterface::translateFromHalMetadata(
5584 metadata_buffer_t *metadata,
5585 nsecs_t timestamp,
5586 int32_t request_id,
5587 const CameraMetadata& jpegMetadata,
5588 uint8_t pipeline_depth,
5589 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005590 /* DevCamDebug metadata translateFromHalMetadata argument */
5591 uint8_t DevCamDebug_meta_enable,
5592 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005593 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005594 uint8_t fwk_cacMode,
5595 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005596{
5597 CameraMetadata camMetadata;
5598 camera_metadata_t *resultMetadata;
5599
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005600 if (mBatchSize && !firstMetadataInBatch) {
5601 /* In batch mode, use cached metadata from the first metadata
5602 in the batch */
5603 camMetadata.clear();
5604 camMetadata = mCachedMetadata;
5605 }
5606
Thierry Strudel3d639192016-09-09 11:52:26 -07005607 if (jpegMetadata.entryCount())
5608 camMetadata.append(jpegMetadata);
5609
5610 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5611 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5612 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5613 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005614 if (mBatchSize == 0) {
5615 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5616 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005618
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005619 if (mBatchSize && !firstMetadataInBatch) {
5620 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5621 resultMetadata = camMetadata.release();
5622 return resultMetadata;
5623 }
5624
Samuel Ha68ba5172016-12-15 18:41:12 -08005625 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5626 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5627 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5628 // DevCamDebug metadata translateFromHalMetadata AF
5629 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5630 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5631 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5632 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5633 }
5634 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5635 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5636 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5637 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5638 }
5639 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5640 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5641 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5642 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5643 }
5644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5645 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5646 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5647 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5648 }
5649 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5650 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5651 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5652 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5653 }
5654 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5655 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5656 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5657 *DevCamDebug_af_monitor_pdaf_target_pos;
5658 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5659 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5660 }
5661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5662 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5663 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5664 *DevCamDebug_af_monitor_pdaf_confidence;
5665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5666 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5667 }
5668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5669 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5670 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5671 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5672 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5673 }
5674 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5675 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5676 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5677 *DevCamDebug_af_monitor_tof_target_pos;
5678 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5679 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5680 }
5681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5682 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5683 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5684 *DevCamDebug_af_monitor_tof_confidence;
5685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5686 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5687 }
5688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5689 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5690 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5691 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5692 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5693 }
5694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5695 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5696 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5697 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5698 &fwk_DevCamDebug_af_monitor_type_select, 1);
5699 }
5700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5701 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5702 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5703 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5704 &fwk_DevCamDebug_af_monitor_refocus, 1);
5705 }
5706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5707 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5708 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5709 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5710 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5711 }
5712 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5713 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5714 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5715 *DevCamDebug_af_search_pdaf_target_pos;
5716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5717 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5718 }
5719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5721 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5722 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5723 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5724 }
5725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5726 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5727 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5729 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5730 }
5731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5732 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5733 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5735 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5736 }
5737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5738 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5739 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5741 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5742 }
5743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5744 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5745 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5746 *DevCamDebug_af_search_tof_target_pos;
5747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5748 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5749 }
5750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5752 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5753 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5754 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5755 }
5756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5757 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5758 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5760 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5761 }
5762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5763 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5764 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5765 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5766 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5767 }
5768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5769 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5770 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5772 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5773 }
5774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5775 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5776 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5778 &fwk_DevCamDebug_af_search_type_select, 1);
5779 }
5780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5781 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5782 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5784 &fwk_DevCamDebug_af_search_next_pos, 1);
5785 }
5786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5787 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5788 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5789 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5790 &fwk_DevCamDebug_af_search_target_pos, 1);
5791 }
5792 // DevCamDebug metadata translateFromHalMetadata AEC
5793 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5794 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5795 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5796 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5797 }
5798 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5799 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5800 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5801 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5802 }
5803 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5804 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5805 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5806 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5807 }
5808 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5809 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5810 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5811 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5812 }
5813 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5814 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5815 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5816 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5817 }
5818 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5819 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5820 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5821 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5822 }
5823 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5824 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5825 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5826 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5827 }
5828 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5829 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5830 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5831 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5832 }
5833 // DevCamDebug metadata translateFromHalMetadata AWB
5834 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5835 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5836 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5837 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5838 }
5839 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5840 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5841 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5842 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5843 }
5844 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5845 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5846 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5847 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5848 }
5849 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5850 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5851 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5852 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5853 }
5854 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5855 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5856 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5857 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5858 }
5859 }
5860 // atrace_end(ATRACE_TAG_ALWAYS);
5861
Thierry Strudel3d639192016-09-09 11:52:26 -07005862 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5863 int64_t fwk_frame_number = *frame_number;
5864 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5865 }
5866
5867 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5868 int32_t fps_range[2];
5869 fps_range[0] = (int32_t)float_range->min_fps;
5870 fps_range[1] = (int32_t)float_range->max_fps;
5871 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5872 fps_range, 2);
5873 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5874 fps_range[0], fps_range[1]);
5875 }
5876
5877 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5878 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5879 }
5880
5881 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5882 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5883 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5884 *sceneMode);
5885 if (NAME_NOT_FOUND != val) {
5886 uint8_t fwkSceneMode = (uint8_t)val;
5887 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5888 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5889 fwkSceneMode);
5890 }
5891 }
5892
5893 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5894 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5895 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5896 }
5897
5898 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5899 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5900 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5901 }
5902
5903 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5904 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5905 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5906 }
5907
5908 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5909 CAM_INTF_META_EDGE_MODE, metadata) {
5910 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5911 }
5912
5913 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5914 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5915 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5916 }
5917
5918 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5919 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5920 }
5921
5922 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5923 if (0 <= *flashState) {
5924 uint8_t fwk_flashState = (uint8_t) *flashState;
5925 if (!gCamCapability[mCameraId]->flash_available) {
5926 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5927 }
5928 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5929 }
5930 }
5931
5932 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5933 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5934 if (NAME_NOT_FOUND != val) {
5935 uint8_t fwk_flashMode = (uint8_t)val;
5936 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5937 }
5938 }
5939
5940 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5941 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5942 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5943 }
5944
5945 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5946 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5947 }
5948
5949 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5950 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5951 }
5952
5953 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5954 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5955 }
5956
5957 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5958 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5959 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5960 }
5961
5962 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5963 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5964 LOGD("fwk_videoStab = %d", fwk_videoStab);
5965 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5966 } else {
5967 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5968 // and so hardcoding the Video Stab result to OFF mode.
5969 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5970 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005971 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005972 }
5973
5974 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5975 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5976 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5977 }
5978
5979 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5980 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5981 }
5982
5983 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5984 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5985
5986 LOGD("dynamicblackLevel = %f %f %f %f",
5987 blackLevelSourcePattern->cam_black_level[0],
5988 blackLevelSourcePattern->cam_black_level[1],
5989 blackLevelSourcePattern->cam_black_level[2],
5990 blackLevelSourcePattern->cam_black_level[3]);
5991 }
5992
5993 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5994 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5995 float fwk_blackLevelInd[4];
5996
5997 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
5998 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
5999 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
6000 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
6001
6002 LOGD("applied dynamicblackLevel = %f %f %f %f",
6003 blackLevelAppliedPattern->cam_black_level[0],
6004 blackLevelAppliedPattern->cam_black_level[1],
6005 blackLevelAppliedPattern->cam_black_level[2],
6006 blackLevelAppliedPattern->cam_black_level[3]);
6007 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006008
6009#ifndef USE_HAL_3_3
6010 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
6011 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
6012 // depth space.
6013 fwk_blackLevelInd[0] /= 64.0;
6014 fwk_blackLevelInd[1] /= 64.0;
6015 fwk_blackLevelInd[2] /= 64.0;
6016 fwk_blackLevelInd[3] /= 64.0;
6017 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
6018#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006019 }
6020
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006021#ifndef USE_HAL_3_3
6022 // Fixed whitelevel is used by ISP/Sensor
6023 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6024 &gCamCapability[mCameraId]->white_level, 1);
6025#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006026
6027 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6028 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6029 int32_t scalerCropRegion[4];
6030 scalerCropRegion[0] = hScalerCropRegion->left;
6031 scalerCropRegion[1] = hScalerCropRegion->top;
6032 scalerCropRegion[2] = hScalerCropRegion->width;
6033 scalerCropRegion[3] = hScalerCropRegion->height;
6034
6035 // Adjust crop region from sensor output coordinate system to active
6036 // array coordinate system.
6037 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6038 scalerCropRegion[2], scalerCropRegion[3]);
6039
6040 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6041 }
6042
6043 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6044 LOGD("sensorExpTime = %lld", *sensorExpTime);
6045 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6046 }
6047
6048 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6049 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6050 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6051 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6052 }
6053
6054 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6055 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6056 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6057 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6058 sensorRollingShutterSkew, 1);
6059 }
6060
6061 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6062 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6063 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6064
6065 //calculate the noise profile based on sensitivity
6066 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6067 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6068 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6069 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6070 noise_profile[i] = noise_profile_S;
6071 noise_profile[i+1] = noise_profile_O;
6072 }
6073 LOGD("noise model entry (S, O) is (%f, %f)",
6074 noise_profile_S, noise_profile_O);
6075 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6076 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6077 }
6078
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006079#ifndef USE_HAL_3_3
6080 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
6081 int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
6082 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
6083 }
6084#endif
6085
Thierry Strudel3d639192016-09-09 11:52:26 -07006086 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6087 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6088 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6089 }
6090
6091 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6092 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6093 *faceDetectMode);
6094 if (NAME_NOT_FOUND != val) {
6095 uint8_t fwk_faceDetectMode = (uint8_t)val;
6096 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6097
6098 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6099 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6100 CAM_INTF_META_FACE_DETECTION, metadata) {
6101 uint8_t numFaces = MIN(
6102 faceDetectionInfo->num_faces_detected, MAX_ROI);
6103 int32_t faceIds[MAX_ROI];
6104 uint8_t faceScores[MAX_ROI];
6105 int32_t faceRectangles[MAX_ROI * 4];
6106 int32_t faceLandmarks[MAX_ROI * 6];
6107 size_t j = 0, k = 0;
6108
6109 for (size_t i = 0; i < numFaces; i++) {
6110 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6111 // Adjust crop region from sensor output coordinate system to active
6112 // array coordinate system.
6113 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6114 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6115 rect.width, rect.height);
6116
6117 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6118 faceRectangles+j, -1);
6119
6120 j+= 4;
6121 }
6122 if (numFaces <= 0) {
6123 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6124 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6125 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6126 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6127 }
6128
6129 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6130 numFaces);
6131 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6132 faceRectangles, numFaces * 4U);
6133 if (fwk_faceDetectMode ==
6134 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6135 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6136 CAM_INTF_META_FACE_LANDMARK, metadata) {
6137
6138 for (size_t i = 0; i < numFaces; i++) {
6139 // Map the co-ordinate sensor output coordinate system to active
6140 // array coordinate system.
6141 mCropRegionMapper.toActiveArray(
6142 landmarks->face_landmarks[i].left_eye_center.x,
6143 landmarks->face_landmarks[i].left_eye_center.y);
6144 mCropRegionMapper.toActiveArray(
6145 landmarks->face_landmarks[i].right_eye_center.x,
6146 landmarks->face_landmarks[i].right_eye_center.y);
6147 mCropRegionMapper.toActiveArray(
6148 landmarks->face_landmarks[i].mouth_center.x,
6149 landmarks->face_landmarks[i].mouth_center.y);
6150
6151 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006152 k+= TOTAL_LANDMARK_INDICES;
6153 }
6154 } else {
6155 for (size_t i = 0; i < numFaces; i++) {
6156 setInvalidLandmarks(faceLandmarks+k);
6157 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006158 }
6159 }
6160
6161 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6162 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6163 faceLandmarks, numFaces * 6U);
6164 }
6165 }
6166 }
6167 }
6168 }
6169
6170 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6171 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6172 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006173
6174 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6175 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6176 // process histogram statistics info
6177 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6178 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6179 cam_histogram_data_t rHistData, gHistData, bHistData;
6180 memset(&rHistData, 0, sizeof(rHistData));
6181 memset(&gHistData, 0, sizeof(gHistData));
6182 memset(&bHistData, 0, sizeof(bHistData));
6183
6184 switch (stats_data->type) {
6185 case CAM_HISTOGRAM_TYPE_BAYER:
6186 switch (stats_data->bayer_stats.data_type) {
6187 case CAM_STATS_CHANNEL_GR:
6188 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6189 break;
6190 case CAM_STATS_CHANNEL_GB:
6191 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6192 break;
6193 case CAM_STATS_CHANNEL_B:
6194 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6195 break;
6196 case CAM_STATS_CHANNEL_ALL:
6197 rHistData = stats_data->bayer_stats.r_stats;
6198 //Framework expects only 3 channels. So, for now,
6199 //use gb stats for G channel.
6200 gHistData = stats_data->bayer_stats.gb_stats;
6201 bHistData = stats_data->bayer_stats.b_stats;
6202 break;
6203 case CAM_STATS_CHANNEL_Y:
6204 case CAM_STATS_CHANNEL_R:
6205 default:
6206 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6207 break;
6208 }
6209 break;
6210 case CAM_HISTOGRAM_TYPE_YUV:
6211 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6212 break;
6213 }
6214
6215 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6216 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6217 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6218
6219 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6220 }
6221 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006222 }
6223
6224 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6225 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6226 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6227 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6228 }
6229
6230 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6231 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6232 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6233 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6234 }
6235
6236 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6237 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6238 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6239 CAM_MAX_SHADING_MAP_HEIGHT);
6240 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6241 CAM_MAX_SHADING_MAP_WIDTH);
6242 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6243 lensShadingMap->lens_shading, 4U * map_width * map_height);
6244 }
6245
6246 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6247 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6248 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6249 }
6250
6251 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6252 //Populate CAM_INTF_META_TONEMAP_CURVES
6253 /* ch0 = G, ch 1 = B, ch 2 = R*/
6254 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6255 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6256 tonemap->tonemap_points_cnt,
6257 CAM_MAX_TONEMAP_CURVE_SIZE);
6258 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6259 }
6260
6261 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6262 &tonemap->curves[0].tonemap_points[0][0],
6263 tonemap->tonemap_points_cnt * 2);
6264
6265 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6266 &tonemap->curves[1].tonemap_points[0][0],
6267 tonemap->tonemap_points_cnt * 2);
6268
6269 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6270 &tonemap->curves[2].tonemap_points[0][0],
6271 tonemap->tonemap_points_cnt * 2);
6272 }
6273
6274 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6275 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6276 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6277 CC_GAIN_MAX);
6278 }
6279
6280 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6281 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6282 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6283 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6284 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6285 }
6286
6287 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6288 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6289 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6290 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6291 toneCurve->tonemap_points_cnt,
6292 CAM_MAX_TONEMAP_CURVE_SIZE);
6293 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6294 }
6295 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6296 (float*)toneCurve->curve.tonemap_points,
6297 toneCurve->tonemap_points_cnt * 2);
6298 }
6299
6300 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6301 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6302 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6303 predColorCorrectionGains->gains, 4);
6304 }
6305
6306 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6307 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6308 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6309 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6310 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6311 }
6312
6313 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6314 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6315 }
6316
6317 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6318 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6319 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6320 }
6321
6322 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6323 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6324 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6325 }
6326
6327 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6328 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6329 *effectMode);
6330 if (NAME_NOT_FOUND != val) {
6331 uint8_t fwk_effectMode = (uint8_t)val;
6332 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6333 }
6334 }
6335
6336 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6337 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6338 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6339 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6340 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6341 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6342 }
6343 int32_t fwk_testPatternData[4];
6344 fwk_testPatternData[0] = testPatternData->r;
6345 fwk_testPatternData[3] = testPatternData->b;
6346 switch (gCamCapability[mCameraId]->color_arrangement) {
6347 case CAM_FILTER_ARRANGEMENT_RGGB:
6348 case CAM_FILTER_ARRANGEMENT_GRBG:
6349 fwk_testPatternData[1] = testPatternData->gr;
6350 fwk_testPatternData[2] = testPatternData->gb;
6351 break;
6352 case CAM_FILTER_ARRANGEMENT_GBRG:
6353 case CAM_FILTER_ARRANGEMENT_BGGR:
6354 fwk_testPatternData[2] = testPatternData->gr;
6355 fwk_testPatternData[1] = testPatternData->gb;
6356 break;
6357 default:
6358 LOGE("color arrangement %d is not supported",
6359 gCamCapability[mCameraId]->color_arrangement);
6360 break;
6361 }
6362 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6363 }
6364
6365 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6366 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6367 }
6368
6369 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6370 String8 str((const char *)gps_methods);
6371 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6372 }
6373
6374 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6375 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6376 }
6377
6378 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6379 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6380 }
6381
6382 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6383 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6384 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6385 }
6386
6387 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6388 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6389 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6390 }
6391
6392 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6393 int32_t fwk_thumb_size[2];
6394 fwk_thumb_size[0] = thumb_size->width;
6395 fwk_thumb_size[1] = thumb_size->height;
6396 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6397 }
6398
6399 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6400 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6401 privateData,
6402 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6403 }
6404
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006405 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6406 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6407 meteringMode, 1);
6408 }
6409
Thierry Strudel3d639192016-09-09 11:52:26 -07006410 if (metadata->is_tuning_params_valid) {
6411 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6412 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6413 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6414
6415
6416 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6417 sizeof(uint32_t));
6418 data += sizeof(uint32_t);
6419
6420 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6421 sizeof(uint32_t));
6422 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6423 data += sizeof(uint32_t);
6424
6425 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6426 sizeof(uint32_t));
6427 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6428 data += sizeof(uint32_t);
6429
6430 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6431 sizeof(uint32_t));
6432 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6433 data += sizeof(uint32_t);
6434
6435 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6436 sizeof(uint32_t));
6437 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6438 data += sizeof(uint32_t);
6439
6440 metadata->tuning_params.tuning_mod3_data_size = 0;
6441 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6442 sizeof(uint32_t));
6443 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6444 data += sizeof(uint32_t);
6445
6446 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6447 TUNING_SENSOR_DATA_MAX);
6448 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6449 count);
6450 data += count;
6451
6452 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6453 TUNING_VFE_DATA_MAX);
6454 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6455 count);
6456 data += count;
6457
6458 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6459 TUNING_CPP_DATA_MAX);
6460 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6461 count);
6462 data += count;
6463
6464 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6465 TUNING_CAC_DATA_MAX);
6466 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6467 count);
6468 data += count;
6469
6470 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6471 (int32_t *)(void *)tuning_meta_data_blob,
6472 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6473 }
6474
6475 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6476 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6477 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6478 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6479 NEUTRAL_COL_POINTS);
6480 }
6481
6482 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6483 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6484 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6485 }
6486
6487 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6488 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6489 // Adjust crop region from sensor output coordinate system to active
6490 // array coordinate system.
6491 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6492 hAeRegions->rect.width, hAeRegions->rect.height);
6493
6494 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6495 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6496 REGIONS_TUPLE_COUNT);
6497 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6498 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6499 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6500 hAeRegions->rect.height);
6501 }
6502
6503 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6504 uint8_t fwk_afState = (uint8_t) *afState;
6505 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
6506 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
6507 }
6508
6509 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6510 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6511 }
6512
6513 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6514 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6515 }
6516
6517 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6518 uint8_t fwk_lensState = *lensState;
6519 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6520 }
6521
6522 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6523 /*af regions*/
6524 int32_t afRegions[REGIONS_TUPLE_COUNT];
6525 // Adjust crop region from sensor output coordinate system to active
6526 // array coordinate system.
6527 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6528 hAfRegions->rect.width, hAfRegions->rect.height);
6529
6530 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6531 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6532 REGIONS_TUPLE_COUNT);
6533 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6534 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6535 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6536 hAfRegions->rect.height);
6537 }
6538
6539 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
6540 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6541 *hal_ab_mode);
6542 if (NAME_NOT_FOUND != val) {
6543 uint8_t fwk_ab_mode = (uint8_t)val;
6544 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6545 }
6546 }
6547
6548 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6549 int val = lookupFwkName(SCENE_MODES_MAP,
6550 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6551 if (NAME_NOT_FOUND != val) {
6552 uint8_t fwkBestshotMode = (uint8_t)val;
6553 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6554 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6555 } else {
6556 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6557 }
6558 }
6559
6560 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6561 uint8_t fwk_mode = (uint8_t) *mode;
6562 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6563 }
6564
6565 /* Constant metadata values to be update*/
6566 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6567 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6568
6569 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6570 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6571
6572 int32_t hotPixelMap[2];
6573 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6574
6575 // CDS
6576 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6577 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6578 }
6579
Thierry Strudel04e026f2016-10-10 11:27:36 -07006580 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6581 int32_t fwk_hdr;
6582 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6583 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6584 } else {
6585 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6586 }
6587 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6588 }
6589
6590 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006591 int32_t fwk_ir = (int32_t) *ir;
6592 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006593 }
6594
Thierry Strudel269c81a2016-10-12 12:13:59 -07006595 // AEC SPEED
6596 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6597 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6598 }
6599
6600 // AWB SPEED
6601 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6602 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6603 }
6604
Thierry Strudel3d639192016-09-09 11:52:26 -07006605 // TNR
6606 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6607 uint8_t tnr_enable = tnr->denoise_enable;
6608 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6609
6610 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6611 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6612 }
6613
6614 // Reprocess crop data
6615 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6616 uint8_t cnt = crop_data->num_of_streams;
6617 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6618 // mm-qcamera-daemon only posts crop_data for streams
6619 // not linked to pproc. So no valid crop metadata is not
6620 // necessarily an error case.
6621 LOGD("No valid crop metadata entries");
6622 } else {
6623 uint32_t reproc_stream_id;
6624 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6625 LOGD("No reprocessible stream found, ignore crop data");
6626 } else {
6627 int rc = NO_ERROR;
6628 Vector<int32_t> roi_map;
6629 int32_t *crop = new int32_t[cnt*4];
6630 if (NULL == crop) {
6631 rc = NO_MEMORY;
6632 }
6633 if (NO_ERROR == rc) {
6634 int32_t streams_found = 0;
6635 for (size_t i = 0; i < cnt; i++) {
6636 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6637 if (pprocDone) {
6638 // HAL already does internal reprocessing,
6639 // either via reprocessing before JPEG encoding,
6640 // or offline postprocessing for pproc bypass case.
6641 crop[0] = 0;
6642 crop[1] = 0;
6643 crop[2] = mInputStreamInfo.dim.width;
6644 crop[3] = mInputStreamInfo.dim.height;
6645 } else {
6646 crop[0] = crop_data->crop_info[i].crop.left;
6647 crop[1] = crop_data->crop_info[i].crop.top;
6648 crop[2] = crop_data->crop_info[i].crop.width;
6649 crop[3] = crop_data->crop_info[i].crop.height;
6650 }
6651 roi_map.add(crop_data->crop_info[i].roi_map.left);
6652 roi_map.add(crop_data->crop_info[i].roi_map.top);
6653 roi_map.add(crop_data->crop_info[i].roi_map.width);
6654 roi_map.add(crop_data->crop_info[i].roi_map.height);
6655 streams_found++;
6656 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6657 crop[0], crop[1], crop[2], crop[3]);
6658 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6659 crop_data->crop_info[i].roi_map.left,
6660 crop_data->crop_info[i].roi_map.top,
6661 crop_data->crop_info[i].roi_map.width,
6662 crop_data->crop_info[i].roi_map.height);
6663 break;
6664
6665 }
6666 }
6667 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6668 &streams_found, 1);
6669 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6670 crop, (size_t)(streams_found * 4));
6671 if (roi_map.array()) {
6672 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6673 roi_map.array(), roi_map.size());
6674 }
6675 }
6676 if (crop) {
6677 delete [] crop;
6678 }
6679 }
6680 }
6681 }
6682
6683 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6684 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6685 // so hardcoding the CAC result to OFF mode.
6686 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6687 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6688 } else {
6689 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6690 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6691 *cacMode);
6692 if (NAME_NOT_FOUND != val) {
6693 uint8_t resultCacMode = (uint8_t)val;
6694 // check whether CAC result from CB is equal to Framework set CAC mode
6695 // If not equal then set the CAC mode came in corresponding request
6696 if (fwk_cacMode != resultCacMode) {
6697 resultCacMode = fwk_cacMode;
6698 }
6699 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6700 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6701 } else {
6702 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6703 }
6704 }
6705 }
6706
6707 // Post blob of cam_cds_data through vendor tag.
6708 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6709 uint8_t cnt = cdsInfo->num_of_streams;
6710 cam_cds_data_t cdsDataOverride;
6711 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6712 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6713 cdsDataOverride.num_of_streams = 1;
6714 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6715 uint32_t reproc_stream_id;
6716 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6717 LOGD("No reprocessible stream found, ignore cds data");
6718 } else {
6719 for (size_t i = 0; i < cnt; i++) {
6720 if (cdsInfo->cds_info[i].stream_id ==
6721 reproc_stream_id) {
6722 cdsDataOverride.cds_info[0].cds_enable =
6723 cdsInfo->cds_info[i].cds_enable;
6724 break;
6725 }
6726 }
6727 }
6728 } else {
6729 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6730 }
6731 camMetadata.update(QCAMERA3_CDS_INFO,
6732 (uint8_t *)&cdsDataOverride,
6733 sizeof(cam_cds_data_t));
6734 }
6735
6736 // Ldaf calibration data
6737 if (!mLdafCalibExist) {
6738 IF_META_AVAILABLE(uint32_t, ldafCalib,
6739 CAM_INTF_META_LDAF_EXIF, metadata) {
6740 mLdafCalibExist = true;
6741 mLdafCalib[0] = ldafCalib[0];
6742 mLdafCalib[1] = ldafCalib[1];
6743 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6744 ldafCalib[0], ldafCalib[1]);
6745 }
6746 }
6747
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006748 // Reprocess and DDM debug data through vendor tag
6749 cam_reprocess_info_t repro_info;
6750 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6752 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006753 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006754 }
6755 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6756 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006757 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006758 }
6759 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6760 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006761 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006762 }
6763 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6764 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006765 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006766 }
6767 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6768 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006769 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006770 }
6771 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006772 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006773 }
6774 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6775 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006776 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006777 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006778 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6779 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6780 }
6781 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6782 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6783 }
6784 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6785 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006786
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006787 // INSTANT AEC MODE
6788 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6789 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6790 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6791 }
6792
Shuzhen Wange763e802016-03-31 10:24:29 -07006793 // AF scene change
6794 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6795 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6796 }
6797
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006798 /* In batch mode, cache the first metadata in the batch */
6799 if (mBatchSize && firstMetadataInBatch) {
6800 mCachedMetadata.clear();
6801 mCachedMetadata = camMetadata;
6802 }
6803
Thierry Strudel3d639192016-09-09 11:52:26 -07006804 resultMetadata = camMetadata.release();
6805 return resultMetadata;
6806}
6807
6808/*===========================================================================
6809 * FUNCTION : saveExifParams
6810 *
6811 * DESCRIPTION:
6812 *
6813 * PARAMETERS :
6814 * @metadata : metadata information from callback
6815 *
6816 * RETURN : none
6817 *
6818 *==========================================================================*/
6819void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6820{
6821 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6822 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6823 if (mExifParams.debug_params) {
6824 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6825 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6826 }
6827 }
6828 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6829 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6830 if (mExifParams.debug_params) {
6831 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6832 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6833 }
6834 }
6835 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6836 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6837 if (mExifParams.debug_params) {
6838 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6839 mExifParams.debug_params->af_debug_params_valid = TRUE;
6840 }
6841 }
6842 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6843 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6844 if (mExifParams.debug_params) {
6845 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6846 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6847 }
6848 }
6849 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6850 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6851 if (mExifParams.debug_params) {
6852 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6853 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6854 }
6855 }
6856 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6857 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6858 if (mExifParams.debug_params) {
6859 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6860 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6861 }
6862 }
6863 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6864 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6865 if (mExifParams.debug_params) {
6866 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6867 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6868 }
6869 }
6870 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6871 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6872 if (mExifParams.debug_params) {
6873 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6874 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6875 }
6876 }
6877}
6878
6879/*===========================================================================
6880 * FUNCTION : get3AExifParams
6881 *
6882 * DESCRIPTION:
6883 *
6884 * PARAMETERS : none
6885 *
6886 *
6887 * RETURN : mm_jpeg_exif_params_t
6888 *
6889 *==========================================================================*/
6890mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6891{
6892 return mExifParams;
6893}
6894
6895/*===========================================================================
6896 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6897 *
6898 * DESCRIPTION:
6899 *
6900 * PARAMETERS :
6901 * @metadata : metadata information from callback
6902 *
6903 * RETURN : camera_metadata_t*
6904 * metadata in a format specified by fwk
6905 *==========================================================================*/
6906camera_metadata_t*
6907QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6908 (metadata_buffer_t *metadata)
6909{
6910 CameraMetadata camMetadata;
6911 camera_metadata_t *resultMetadata;
6912
6913
6914 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6915 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6916 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6917 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6918 }
6919
6920 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6921 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6922 &aecTrigger->trigger, 1);
6923 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6924 &aecTrigger->trigger_id, 1);
6925 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6926 aecTrigger->trigger);
6927 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6928 aecTrigger->trigger_id);
6929 }
6930
6931 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6932 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6933 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6934 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6935 }
6936
6937 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6938 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6939 if (NAME_NOT_FOUND != val) {
6940 uint8_t fwkAfMode = (uint8_t)val;
6941 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6942 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6943 } else {
6944 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6945 val);
6946 }
6947 }
6948
6949 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6950 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6951 &af_trigger->trigger, 1);
6952 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6953 af_trigger->trigger);
6954 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6955 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6956 af_trigger->trigger_id);
6957 }
6958
6959 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6960 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6961 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6962 if (NAME_NOT_FOUND != val) {
6963 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6964 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6965 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6966 } else {
6967 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6968 }
6969 }
6970
6971 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6972 uint32_t aeMode = CAM_AE_MODE_MAX;
6973 int32_t flashMode = CAM_FLASH_MODE_MAX;
6974 int32_t redeye = -1;
6975 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6976 aeMode = *pAeMode;
6977 }
6978 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6979 flashMode = *pFlashMode;
6980 }
6981 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6982 redeye = *pRedeye;
6983 }
6984
6985 if (1 == redeye) {
6986 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6987 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6988 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6989 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6990 flashMode);
6991 if (NAME_NOT_FOUND != val) {
6992 fwk_aeMode = (uint8_t)val;
6993 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6994 } else {
6995 LOGE("Unsupported flash mode %d", flashMode);
6996 }
6997 } else if (aeMode == CAM_AE_MODE_ON) {
6998 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6999 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7000 } else if (aeMode == CAM_AE_MODE_OFF) {
7001 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7002 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7003 } else {
7004 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7005 "flashMode:%d, aeMode:%u!!!",
7006 redeye, flashMode, aeMode);
7007 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007008 if (mInstantAEC) {
7009 // Increment frame Idx count untill a bound reached for instant AEC.
7010 mInstantAecFrameIdxCount++;
7011 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7012 CAM_INTF_META_AEC_INFO, metadata) {
7013 LOGH("ae_params->settled = %d",ae_params->settled);
7014 // If AEC settled, or if number of frames reached bound value,
7015 // should reset instant AEC.
7016 if (ae_params->settled ||
7017 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7018 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7019 mInstantAEC = false;
7020 mResetInstantAEC = true;
7021 mInstantAecFrameIdxCount = 0;
7022 }
7023 }
7024 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007025 resultMetadata = camMetadata.release();
7026 return resultMetadata;
7027}
7028
7029/*===========================================================================
7030 * FUNCTION : dumpMetadataToFile
7031 *
7032 * DESCRIPTION: Dumps tuning metadata to file system
7033 *
7034 * PARAMETERS :
7035 * @meta : tuning metadata
7036 * @dumpFrameCount : current dump frame count
7037 * @enabled : Enable mask
7038 *
7039 *==========================================================================*/
7040void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7041 uint32_t &dumpFrameCount,
7042 bool enabled,
7043 const char *type,
7044 uint32_t frameNumber)
7045{
7046 //Some sanity checks
7047 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7048 LOGE("Tuning sensor data size bigger than expected %d: %d",
7049 meta.tuning_sensor_data_size,
7050 TUNING_SENSOR_DATA_MAX);
7051 return;
7052 }
7053
7054 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7055 LOGE("Tuning VFE data size bigger than expected %d: %d",
7056 meta.tuning_vfe_data_size,
7057 TUNING_VFE_DATA_MAX);
7058 return;
7059 }
7060
7061 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7062 LOGE("Tuning CPP data size bigger than expected %d: %d",
7063 meta.tuning_cpp_data_size,
7064 TUNING_CPP_DATA_MAX);
7065 return;
7066 }
7067
7068 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7069 LOGE("Tuning CAC data size bigger than expected %d: %d",
7070 meta.tuning_cac_data_size,
7071 TUNING_CAC_DATA_MAX);
7072 return;
7073 }
7074 //
7075
7076 if(enabled){
7077 char timeBuf[FILENAME_MAX];
7078 char buf[FILENAME_MAX];
7079 memset(buf, 0, sizeof(buf));
7080 memset(timeBuf, 0, sizeof(timeBuf));
7081 time_t current_time;
7082 struct tm * timeinfo;
7083 time (&current_time);
7084 timeinfo = localtime (&current_time);
7085 if (timeinfo != NULL) {
7086 strftime (timeBuf, sizeof(timeBuf),
7087 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7088 }
7089 String8 filePath(timeBuf);
7090 snprintf(buf,
7091 sizeof(buf),
7092 "%dm_%s_%d.bin",
7093 dumpFrameCount,
7094 type,
7095 frameNumber);
7096 filePath.append(buf);
7097 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7098 if (file_fd >= 0) {
7099 ssize_t written_len = 0;
7100 meta.tuning_data_version = TUNING_DATA_VERSION;
7101 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7102 written_len += write(file_fd, data, sizeof(uint32_t));
7103 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7104 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7105 written_len += write(file_fd, data, sizeof(uint32_t));
7106 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7107 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7108 written_len += write(file_fd, data, sizeof(uint32_t));
7109 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7110 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7111 written_len += write(file_fd, data, sizeof(uint32_t));
7112 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7113 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7114 written_len += write(file_fd, data, sizeof(uint32_t));
7115 meta.tuning_mod3_data_size = 0;
7116 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7117 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7118 written_len += write(file_fd, data, sizeof(uint32_t));
7119 size_t total_size = meta.tuning_sensor_data_size;
7120 data = (void *)((uint8_t *)&meta.data);
7121 written_len += write(file_fd, data, total_size);
7122 total_size = meta.tuning_vfe_data_size;
7123 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7124 written_len += write(file_fd, data, total_size);
7125 total_size = meta.tuning_cpp_data_size;
7126 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7127 written_len += write(file_fd, data, total_size);
7128 total_size = meta.tuning_cac_data_size;
7129 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7130 written_len += write(file_fd, data, total_size);
7131 close(file_fd);
7132 }else {
7133 LOGE("fail to open file for metadata dumping");
7134 }
7135 }
7136}
7137
7138/*===========================================================================
7139 * FUNCTION : cleanAndSortStreamInfo
7140 *
7141 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7142 * and sort them such that raw stream is at the end of the list
7143 * This is a workaround for camera daemon constraint.
7144 *
7145 * PARAMETERS : None
7146 *
7147 *==========================================================================*/
7148void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7149{
7150 List<stream_info_t *> newStreamInfo;
7151
7152 /*clean up invalid streams*/
7153 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7154 it != mStreamInfo.end();) {
7155 if(((*it)->status) == INVALID){
7156 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7157 delete channel;
7158 free(*it);
7159 it = mStreamInfo.erase(it);
7160 } else {
7161 it++;
7162 }
7163 }
7164
7165 // Move preview/video/callback/snapshot streams into newList
7166 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7167 it != mStreamInfo.end();) {
7168 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7169 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7170 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7171 newStreamInfo.push_back(*it);
7172 it = mStreamInfo.erase(it);
7173 } else
7174 it++;
7175 }
7176 // Move raw streams into newList
7177 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7178 it != mStreamInfo.end();) {
7179 newStreamInfo.push_back(*it);
7180 it = mStreamInfo.erase(it);
7181 }
7182
7183 mStreamInfo = newStreamInfo;
7184}
7185
7186/*===========================================================================
7187 * FUNCTION : extractJpegMetadata
7188 *
7189 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7190 * JPEG metadata is cached in HAL, and return as part of capture
7191 * result when metadata is returned from camera daemon.
7192 *
7193 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7194 * @request: capture request
7195 *
7196 *==========================================================================*/
7197void QCamera3HardwareInterface::extractJpegMetadata(
7198 CameraMetadata& jpegMetadata,
7199 const camera3_capture_request_t *request)
7200{
7201 CameraMetadata frame_settings;
7202 frame_settings = request->settings;
7203
7204 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7205 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7206 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7207 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7208
7209 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7210 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7211 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7212 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7213
7214 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7215 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7216 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7217 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7218
7219 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7220 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7221 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7222 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7223
7224 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7225 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7226 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7227 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7228
7229 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7230 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7231 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7232 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7233
7234 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7235 int32_t thumbnail_size[2];
7236 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7237 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7238 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7239 int32_t orientation =
7240 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007241 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007242 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7243 int32_t temp;
7244 temp = thumbnail_size[0];
7245 thumbnail_size[0] = thumbnail_size[1];
7246 thumbnail_size[1] = temp;
7247 }
7248 }
7249 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7250 thumbnail_size,
7251 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7252 }
7253
7254}
7255
7256/*===========================================================================
7257 * FUNCTION : convertToRegions
7258 *
7259 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7260 *
7261 * PARAMETERS :
7262 * @rect : cam_rect_t struct to convert
7263 * @region : int32_t destination array
7264 * @weight : if we are converting from cam_area_t, weight is valid
7265 * else weight = -1
7266 *
7267 *==========================================================================*/
7268void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7269 int32_t *region, int weight)
7270{
7271 region[0] = rect.left;
7272 region[1] = rect.top;
7273 region[2] = rect.left + rect.width;
7274 region[3] = rect.top + rect.height;
7275 if (weight > -1) {
7276 region[4] = weight;
7277 }
7278}
7279
7280/*===========================================================================
7281 * FUNCTION : convertFromRegions
7282 *
7283 * DESCRIPTION: helper method to convert from array to cam_rect_t
7284 *
7285 * PARAMETERS :
7286 * @rect : cam_rect_t struct to convert
7287 * @region : int32_t destination array
7288 * @weight : if we are converting from cam_area_t, weight is valid
7289 * else weight = -1
7290 *
7291 *==========================================================================*/
7292void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7293 const camera_metadata_t *settings, uint32_t tag)
7294{
7295 CameraMetadata frame_settings;
7296 frame_settings = settings;
7297 int32_t x_min = frame_settings.find(tag).data.i32[0];
7298 int32_t y_min = frame_settings.find(tag).data.i32[1];
7299 int32_t x_max = frame_settings.find(tag).data.i32[2];
7300 int32_t y_max = frame_settings.find(tag).data.i32[3];
7301 roi.weight = frame_settings.find(tag).data.i32[4];
7302 roi.rect.left = x_min;
7303 roi.rect.top = y_min;
7304 roi.rect.width = x_max - x_min;
7305 roi.rect.height = y_max - y_min;
7306}
7307
7308/*===========================================================================
7309 * FUNCTION : resetIfNeededROI
7310 *
7311 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7312 * crop region
7313 *
7314 * PARAMETERS :
7315 * @roi : cam_area_t struct to resize
7316 * @scalerCropRegion : cam_crop_region_t region to compare against
7317 *
7318 *
7319 *==========================================================================*/
7320bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7321 const cam_crop_region_t* scalerCropRegion)
7322{
7323 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7324 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7325 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7326 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7327
7328 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7329 * without having this check the calculations below to validate if the roi
7330 * is inside scalar crop region will fail resulting in the roi not being
7331 * reset causing algorithm to continue to use stale roi window
7332 */
7333 if (roi->weight == 0) {
7334 return true;
7335 }
7336
7337 if ((roi_x_max < scalerCropRegion->left) ||
7338 // right edge of roi window is left of scalar crop's left edge
7339 (roi_y_max < scalerCropRegion->top) ||
7340 // bottom edge of roi window is above scalar crop's top edge
7341 (roi->rect.left > crop_x_max) ||
7342 // left edge of roi window is beyond(right) of scalar crop's right edge
7343 (roi->rect.top > crop_y_max)){
7344 // top edge of roi windo is above scalar crop's top edge
7345 return false;
7346 }
7347 if (roi->rect.left < scalerCropRegion->left) {
7348 roi->rect.left = scalerCropRegion->left;
7349 }
7350 if (roi->rect.top < scalerCropRegion->top) {
7351 roi->rect.top = scalerCropRegion->top;
7352 }
7353 if (roi_x_max > crop_x_max) {
7354 roi_x_max = crop_x_max;
7355 }
7356 if (roi_y_max > crop_y_max) {
7357 roi_y_max = crop_y_max;
7358 }
7359 roi->rect.width = roi_x_max - roi->rect.left;
7360 roi->rect.height = roi_y_max - roi->rect.top;
7361 return true;
7362}
7363
7364/*===========================================================================
7365 * FUNCTION : convertLandmarks
7366 *
7367 * DESCRIPTION: helper method to extract the landmarks from face detection info
7368 *
7369 * PARAMETERS :
7370 * @landmark_data : input landmark data to be converted
7371 * @landmarks : int32_t destination array
7372 *
7373 *
7374 *==========================================================================*/
7375void QCamera3HardwareInterface::convertLandmarks(
7376 cam_face_landmarks_info_t landmark_data,
7377 int32_t *landmarks)
7378{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007379 if (landmark_data.is_left_eye_valid) {
7380 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7381 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7382 } else {
7383 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7384 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7385 }
7386
7387 if (landmark_data.is_right_eye_valid) {
7388 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7389 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7390 } else {
7391 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7392 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7393 }
7394
7395 if (landmark_data.is_mouth_valid) {
7396 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7397 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7398 } else {
7399 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7400 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7401 }
7402}
7403
7404/*===========================================================================
7405 * FUNCTION : setInvalidLandmarks
7406 *
7407 * DESCRIPTION: helper method to set invalid landmarks
7408 *
7409 * PARAMETERS :
7410 * @landmarks : int32_t destination array
7411 *
7412 *
7413 *==========================================================================*/
7414void QCamera3HardwareInterface::setInvalidLandmarks(
7415 int32_t *landmarks)
7416{
7417 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7418 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7419 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7420 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7421 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7422 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007423}
7424
7425#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007426
7427/*===========================================================================
7428 * FUNCTION : getCapabilities
7429 *
7430 * DESCRIPTION: query camera capability from back-end
7431 *
7432 * PARAMETERS :
7433 * @ops : mm-interface ops structure
7434 * @cam_handle : camera handle for which we need capability
7435 *
7436 * RETURN : ptr type of capability structure
7437 * capability for success
7438 * NULL for failure
7439 *==========================================================================*/
7440cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7441 uint32_t cam_handle)
7442{
7443 int rc = NO_ERROR;
7444 QCamera3HeapMemory *capabilityHeap = NULL;
7445 cam_capability_t *cap_ptr = NULL;
7446
7447 if (ops == NULL) {
7448 LOGE("Invalid arguments");
7449 return NULL;
7450 }
7451
7452 capabilityHeap = new QCamera3HeapMemory(1);
7453 if (capabilityHeap == NULL) {
7454 LOGE("creation of capabilityHeap failed");
7455 return NULL;
7456 }
7457
7458 /* Allocate memory for capability buffer */
7459 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7460 if(rc != OK) {
7461 LOGE("No memory for cappability");
7462 goto allocate_failed;
7463 }
7464
7465 /* Map memory for capability buffer */
7466 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7467
7468 rc = ops->map_buf(cam_handle,
7469 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7470 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7471 if(rc < 0) {
7472 LOGE("failed to map capability buffer");
7473 rc = FAILED_TRANSACTION;
7474 goto map_failed;
7475 }
7476
7477 /* Query Capability */
7478 rc = ops->query_capability(cam_handle);
7479 if(rc < 0) {
7480 LOGE("failed to query capability");
7481 rc = FAILED_TRANSACTION;
7482 goto query_failed;
7483 }
7484
7485 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7486 if (cap_ptr == NULL) {
7487 LOGE("out of memory");
7488 rc = NO_MEMORY;
7489 goto query_failed;
7490 }
7491
7492 memset(cap_ptr, 0, sizeof(cam_capability_t));
7493 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7494
7495 int index;
7496 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7497 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7498 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7499 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7500 }
7501
7502query_failed:
7503 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7504map_failed:
7505 capabilityHeap->deallocate();
7506allocate_failed:
7507 delete capabilityHeap;
7508
7509 if (rc != NO_ERROR) {
7510 return NULL;
7511 } else {
7512 return cap_ptr;
7513 }
7514}
7515
Thierry Strudel3d639192016-09-09 11:52:26 -07007516/*===========================================================================
7517 * FUNCTION : initCapabilities
7518 *
7519 * DESCRIPTION: initialize camera capabilities in static data struct
7520 *
7521 * PARAMETERS :
7522 * @cameraId : camera Id
7523 *
7524 * RETURN : int32_t type of status
7525 * NO_ERROR -- success
7526 * none-zero failure code
7527 *==========================================================================*/
7528int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7529{
7530 int rc = 0;
7531 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007532 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007533
7534 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7535 if (rc) {
7536 LOGE("camera_open failed. rc = %d", rc);
7537 goto open_failed;
7538 }
7539 if (!cameraHandle) {
7540 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7541 goto open_failed;
7542 }
7543
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007544 handle = get_main_camera_handle(cameraHandle->camera_handle);
7545 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7546 if (gCamCapability[cameraId] == NULL) {
7547 rc = FAILED_TRANSACTION;
7548 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007549 }
7550
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007551 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007552 if (is_dual_camera_by_idx(cameraId)) {
7553 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7554 gCamCapability[cameraId]->aux_cam_cap =
7555 getCapabilities(cameraHandle->ops, handle);
7556 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7557 rc = FAILED_TRANSACTION;
7558 free(gCamCapability[cameraId]);
7559 goto failed_op;
7560 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007561
7562 // Copy the main camera capability to main_cam_cap struct
7563 gCamCapability[cameraId]->main_cam_cap =
7564 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7565 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7566 LOGE("out of memory");
7567 rc = NO_MEMORY;
7568 goto failed_op;
7569 }
7570 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7571 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007572 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007573failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007574 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7575 cameraHandle = NULL;
7576open_failed:
7577 return rc;
7578}
7579
7580/*==========================================================================
7581 * FUNCTION : get3Aversion
7582 *
7583 * DESCRIPTION: get the Q3A S/W version
7584 *
7585 * PARAMETERS :
7586 * @sw_version: Reference of Q3A structure which will hold version info upon
7587 * return
7588 *
7589 * RETURN : None
7590 *
7591 *==========================================================================*/
7592void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7593{
7594 if(gCamCapability[mCameraId])
7595 sw_version = gCamCapability[mCameraId]->q3a_version;
7596 else
7597 LOGE("Capability structure NULL!");
7598}
7599
7600
7601/*===========================================================================
7602 * FUNCTION : initParameters
7603 *
7604 * DESCRIPTION: initialize camera parameters
7605 *
7606 * PARAMETERS :
7607 *
7608 * RETURN : int32_t type of status
7609 * NO_ERROR -- success
7610 * none-zero failure code
7611 *==========================================================================*/
7612int QCamera3HardwareInterface::initParameters()
7613{
7614 int rc = 0;
7615
7616 //Allocate Set Param Buffer
7617 mParamHeap = new QCamera3HeapMemory(1);
7618 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7619 if(rc != OK) {
7620 rc = NO_MEMORY;
7621 LOGE("Failed to allocate SETPARM Heap memory");
7622 delete mParamHeap;
7623 mParamHeap = NULL;
7624 return rc;
7625 }
7626
7627 //Map memory for parameters buffer
7628 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7629 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7630 mParamHeap->getFd(0),
7631 sizeof(metadata_buffer_t),
7632 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7633 if(rc < 0) {
7634 LOGE("failed to map SETPARM buffer");
7635 rc = FAILED_TRANSACTION;
7636 mParamHeap->deallocate();
7637 delete mParamHeap;
7638 mParamHeap = NULL;
7639 return rc;
7640 }
7641
7642 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7643
7644 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7645 return rc;
7646}
7647
7648/*===========================================================================
7649 * FUNCTION : deinitParameters
7650 *
7651 * DESCRIPTION: de-initialize camera parameters
7652 *
7653 * PARAMETERS :
7654 *
7655 * RETURN : NONE
7656 *==========================================================================*/
7657void QCamera3HardwareInterface::deinitParameters()
7658{
7659 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7660 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7661
7662 mParamHeap->deallocate();
7663 delete mParamHeap;
7664 mParamHeap = NULL;
7665
7666 mParameters = NULL;
7667
7668 free(mPrevParameters);
7669 mPrevParameters = NULL;
7670}
7671
7672/*===========================================================================
7673 * FUNCTION : calcMaxJpegSize
7674 *
7675 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7676 *
7677 * PARAMETERS :
7678 *
7679 * RETURN : max_jpeg_size
7680 *==========================================================================*/
7681size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7682{
7683 size_t max_jpeg_size = 0;
7684 size_t temp_width, temp_height;
7685 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7686 MAX_SIZES_CNT);
7687 for (size_t i = 0; i < count; i++) {
7688 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7689 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7690 if (temp_width * temp_height > max_jpeg_size ) {
7691 max_jpeg_size = temp_width * temp_height;
7692 }
7693 }
7694 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7695 return max_jpeg_size;
7696}
7697
7698/*===========================================================================
7699 * FUNCTION : getMaxRawSize
7700 *
7701 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7702 *
7703 * PARAMETERS :
7704 *
7705 * RETURN : Largest supported Raw Dimension
7706 *==========================================================================*/
7707cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7708{
7709 int max_width = 0;
7710 cam_dimension_t maxRawSize;
7711
7712 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7713 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7714 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7715 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7716 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7717 }
7718 }
7719 return maxRawSize;
7720}
7721
7722
7723/*===========================================================================
7724 * FUNCTION : calcMaxJpegDim
7725 *
7726 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7727 *
7728 * PARAMETERS :
7729 *
7730 * RETURN : max_jpeg_dim
7731 *==========================================================================*/
7732cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7733{
7734 cam_dimension_t max_jpeg_dim;
7735 cam_dimension_t curr_jpeg_dim;
7736 max_jpeg_dim.width = 0;
7737 max_jpeg_dim.height = 0;
7738 curr_jpeg_dim.width = 0;
7739 curr_jpeg_dim.height = 0;
7740 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7741 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7742 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7743 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7744 max_jpeg_dim.width * max_jpeg_dim.height ) {
7745 max_jpeg_dim.width = curr_jpeg_dim.width;
7746 max_jpeg_dim.height = curr_jpeg_dim.height;
7747 }
7748 }
7749 return max_jpeg_dim;
7750}
7751
7752/*===========================================================================
7753 * FUNCTION : addStreamConfig
7754 *
7755 * DESCRIPTION: adds the stream configuration to the array
7756 *
7757 * PARAMETERS :
7758 * @available_stream_configs : pointer to stream configuration array
7759 * @scalar_format : scalar format
7760 * @dim : configuration dimension
7761 * @config_type : input or output configuration type
7762 *
7763 * RETURN : NONE
7764 *==========================================================================*/
7765void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7766 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7767{
7768 available_stream_configs.add(scalar_format);
7769 available_stream_configs.add(dim.width);
7770 available_stream_configs.add(dim.height);
7771 available_stream_configs.add(config_type);
7772}
7773
7774/*===========================================================================
7775 * FUNCTION : suppportBurstCapture
7776 *
7777 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7778 *
7779 * PARAMETERS :
7780 * @cameraId : camera Id
7781 *
7782 * RETURN : true if camera supports BURST_CAPTURE
7783 * false otherwise
7784 *==========================================================================*/
7785bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7786{
7787 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7788 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7789 const int32_t highResWidth = 3264;
7790 const int32_t highResHeight = 2448;
7791
7792 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7793 // Maximum resolution images cannot be captured at >= 10fps
7794 // -> not supporting BURST_CAPTURE
7795 return false;
7796 }
7797
7798 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7799 // Maximum resolution images can be captured at >= 20fps
7800 // --> supporting BURST_CAPTURE
7801 return true;
7802 }
7803
7804 // Find the smallest highRes resolution, or largest resolution if there is none
7805 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7806 MAX_SIZES_CNT);
7807 size_t highRes = 0;
7808 while ((highRes + 1 < totalCnt) &&
7809 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7810 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7811 highResWidth * highResHeight)) {
7812 highRes++;
7813 }
7814 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7815 return true;
7816 } else {
7817 return false;
7818 }
7819}
7820
7821/*===========================================================================
7822 * FUNCTION : initStaticMetadata
7823 *
7824 * DESCRIPTION: initialize the static metadata
7825 *
7826 * PARAMETERS :
7827 * @cameraId : camera Id
7828 *
7829 * RETURN : int32_t type of status
7830 * 0 -- success
7831 * non-zero failure code
7832 *==========================================================================*/
7833int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7834{
7835 int rc = 0;
7836 CameraMetadata staticInfo;
7837 size_t count = 0;
7838 bool limitedDevice = false;
7839 char prop[PROPERTY_VALUE_MAX];
7840 bool supportBurst = false;
7841
7842 supportBurst = supportBurstCapture(cameraId);
7843
7844 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7845 * guaranteed or if min fps of max resolution is less than 20 fps, its
7846 * advertised as limited device*/
7847 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7848 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7849 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7850 !supportBurst;
7851
7852 uint8_t supportedHwLvl = limitedDevice ?
7853 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007854#ifndef USE_HAL_3_3
7855 // LEVEL_3 - This device will support level 3.
7856 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7857#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007858 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007859#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007860
7861 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7862 &supportedHwLvl, 1);
7863
7864 bool facingBack = false;
7865 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7866 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7867 facingBack = true;
7868 }
7869 /*HAL 3 only*/
7870 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7871 &gCamCapability[cameraId]->min_focus_distance, 1);
7872
7873 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7874 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7875
7876 /*should be using focal lengths but sensor doesn't provide that info now*/
7877 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7878 &gCamCapability[cameraId]->focal_length,
7879 1);
7880
7881 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7882 gCamCapability[cameraId]->apertures,
7883 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7884
7885 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7886 gCamCapability[cameraId]->filter_densities,
7887 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7888
7889
7890 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7891 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7892 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7893
7894 int32_t lens_shading_map_size[] = {
7895 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7896 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7897 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7898 lens_shading_map_size,
7899 sizeof(lens_shading_map_size)/sizeof(int32_t));
7900
7901 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7902 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7903
7904 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7905 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7906
7907 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7908 &gCamCapability[cameraId]->max_frame_duration, 1);
7909
7910 camera_metadata_rational baseGainFactor = {
7911 gCamCapability[cameraId]->base_gain_factor.numerator,
7912 gCamCapability[cameraId]->base_gain_factor.denominator};
7913 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7914 &baseGainFactor, 1);
7915
7916 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7917 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7918
7919 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7920 gCamCapability[cameraId]->pixel_array_size.height};
7921 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7922 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7923
7924 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7925 gCamCapability[cameraId]->active_array_size.top,
7926 gCamCapability[cameraId]->active_array_size.width,
7927 gCamCapability[cameraId]->active_array_size.height};
7928 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7929 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7930
7931 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7932 &gCamCapability[cameraId]->white_level, 1);
7933
7934 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7935 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
7936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007937#ifndef USE_HAL_3_3
7938 bool hasBlackRegions = false;
7939 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7940 LOGW("black_region_count: %d is bounded to %d",
7941 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7942 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7943 }
7944 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7945 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7946 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7947 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7948 }
7949 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7950 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7951 hasBlackRegions = true;
7952 }
7953#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007954 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7955 &gCamCapability[cameraId]->flash_charge_duration, 1);
7956
7957 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7958 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7959
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007960 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7961 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7962 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7963 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007964 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7965 &timestampSource, 1);
7966
7967 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7968 &gCamCapability[cameraId]->histogram_size, 1);
7969
7970 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7971 &gCamCapability[cameraId]->max_histogram_count, 1);
7972
7973 int32_t sharpness_map_size[] = {
7974 gCamCapability[cameraId]->sharpness_map_size.width,
7975 gCamCapability[cameraId]->sharpness_map_size.height};
7976
7977 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7978 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7979
7980 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7981 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7982
7983 int32_t scalar_formats[] = {
7984 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7985 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7986 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7987 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7988 HAL_PIXEL_FORMAT_RAW10,
7989 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7990 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7991 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7992 scalar_formats,
7993 scalar_formats_count);
7994
7995 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7996 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7997 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7998 count, MAX_SIZES_CNT, available_processed_sizes);
7999 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8000 available_processed_sizes, count * 2);
8001
8002 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8003 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8004 makeTable(gCamCapability[cameraId]->raw_dim,
8005 count, MAX_SIZES_CNT, available_raw_sizes);
8006 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8007 available_raw_sizes, count * 2);
8008
8009 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8010 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8011 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8012 count, MAX_SIZES_CNT, available_fps_ranges);
8013 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8014 available_fps_ranges, count * 2);
8015
8016 camera_metadata_rational exposureCompensationStep = {
8017 gCamCapability[cameraId]->exp_compensation_step.numerator,
8018 gCamCapability[cameraId]->exp_compensation_step.denominator};
8019 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8020 &exposureCompensationStep, 1);
8021
8022 Vector<uint8_t> availableVstabModes;
8023 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8024 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008025 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008026 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008027 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008028 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008029 count = IS_TYPE_MAX;
8030 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8031 for (size_t i = 0; i < count; i++) {
8032 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8033 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8034 eisSupported = true;
8035 break;
8036 }
8037 }
8038 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008039 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8040 }
8041 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8042 availableVstabModes.array(), availableVstabModes.size());
8043
8044 /*HAL 1 and HAL 3 common*/
8045 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8046 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8047 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8048 float maxZoom = maxZoomStep/minZoomStep;
8049 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8050 &maxZoom, 1);
8051
8052 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8053 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8054
8055 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8056 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8057 max3aRegions[2] = 0; /* AF not supported */
8058 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8059 max3aRegions, 3);
8060
8061 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8062 memset(prop, 0, sizeof(prop));
8063 property_get("persist.camera.facedetect", prop, "1");
8064 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8065 LOGD("Support face detection mode: %d",
8066 supportedFaceDetectMode);
8067
8068 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008069 /* support mode should be OFF if max number of face is 0 */
8070 if (maxFaces <= 0) {
8071 supportedFaceDetectMode = 0;
8072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008073 Vector<uint8_t> availableFaceDetectModes;
8074 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8075 if (supportedFaceDetectMode == 1) {
8076 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8077 } else if (supportedFaceDetectMode == 2) {
8078 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8079 } else if (supportedFaceDetectMode == 3) {
8080 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8081 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8082 } else {
8083 maxFaces = 0;
8084 }
8085 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8086 availableFaceDetectModes.array(),
8087 availableFaceDetectModes.size());
8088 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8089 (int32_t *)&maxFaces, 1);
8090
8091 int32_t exposureCompensationRange[] = {
8092 gCamCapability[cameraId]->exposure_compensation_min,
8093 gCamCapability[cameraId]->exposure_compensation_max};
8094 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8095 exposureCompensationRange,
8096 sizeof(exposureCompensationRange)/sizeof(int32_t));
8097
8098 uint8_t lensFacing = (facingBack) ?
8099 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8100 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8101
8102 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8103 available_thumbnail_sizes,
8104 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8105
8106 /*all sizes will be clubbed into this tag*/
8107 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8108 /*android.scaler.availableStreamConfigurations*/
8109 Vector<int32_t> available_stream_configs;
8110 cam_dimension_t active_array_dim;
8111 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8112 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8113 /* Add input/output stream configurations for each scalar formats*/
8114 for (size_t j = 0; j < scalar_formats_count; j++) {
8115 switch (scalar_formats[j]) {
8116 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8117 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8118 case HAL_PIXEL_FORMAT_RAW10:
8119 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8120 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8121 addStreamConfig(available_stream_configs, scalar_formats[j],
8122 gCamCapability[cameraId]->raw_dim[i],
8123 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8124 }
8125 break;
8126 case HAL_PIXEL_FORMAT_BLOB:
8127 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8128 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8129 addStreamConfig(available_stream_configs, scalar_formats[j],
8130 gCamCapability[cameraId]->picture_sizes_tbl[i],
8131 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8132 }
8133 break;
8134 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8135 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8136 default:
8137 cam_dimension_t largest_picture_size;
8138 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8139 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8140 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8141 addStreamConfig(available_stream_configs, scalar_formats[j],
8142 gCamCapability[cameraId]->picture_sizes_tbl[i],
8143 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8144 /* Book keep largest */
8145 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8146 >= largest_picture_size.width &&
8147 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8148 >= largest_picture_size.height)
8149 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8150 }
8151 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8152 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8153 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8154 addStreamConfig(available_stream_configs, scalar_formats[j],
8155 largest_picture_size,
8156 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8157 }
8158 break;
8159 }
8160 }
8161
8162 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8163 available_stream_configs.array(), available_stream_configs.size());
8164 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8165 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8166
8167 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8168 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8169
8170 /* android.scaler.availableMinFrameDurations */
8171 Vector<int64_t> available_min_durations;
8172 for (size_t j = 0; j < scalar_formats_count; j++) {
8173 switch (scalar_formats[j]) {
8174 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8175 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8176 case HAL_PIXEL_FORMAT_RAW10:
8177 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8178 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8179 available_min_durations.add(scalar_formats[j]);
8180 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8181 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8182 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8183 }
8184 break;
8185 default:
8186 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8187 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8188 available_min_durations.add(scalar_formats[j]);
8189 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8190 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8191 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8192 }
8193 break;
8194 }
8195 }
8196 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8197 available_min_durations.array(), available_min_durations.size());
8198
8199 Vector<int32_t> available_hfr_configs;
8200 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8201 int32_t fps = 0;
8202 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8203 case CAM_HFR_MODE_60FPS:
8204 fps = 60;
8205 break;
8206 case CAM_HFR_MODE_90FPS:
8207 fps = 90;
8208 break;
8209 case CAM_HFR_MODE_120FPS:
8210 fps = 120;
8211 break;
8212 case CAM_HFR_MODE_150FPS:
8213 fps = 150;
8214 break;
8215 case CAM_HFR_MODE_180FPS:
8216 fps = 180;
8217 break;
8218 case CAM_HFR_MODE_210FPS:
8219 fps = 210;
8220 break;
8221 case CAM_HFR_MODE_240FPS:
8222 fps = 240;
8223 break;
8224 case CAM_HFR_MODE_480FPS:
8225 fps = 480;
8226 break;
8227 case CAM_HFR_MODE_OFF:
8228 case CAM_HFR_MODE_MAX:
8229 default:
8230 break;
8231 }
8232
8233 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8234 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8235 /* For each HFR frame rate, need to advertise one variable fps range
8236 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8237 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8238 * set by the app. When video recording is started, [120, 120] is
8239 * set. This way sensor configuration does not change when recording
8240 * is started */
8241
8242 /* (width, height, fps_min, fps_max, batch_size_max) */
8243 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8244 j < MAX_SIZES_CNT; j++) {
8245 available_hfr_configs.add(
8246 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8247 available_hfr_configs.add(
8248 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8249 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8250 available_hfr_configs.add(fps);
8251 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8252
8253 /* (width, height, fps_min, fps_max, batch_size_max) */
8254 available_hfr_configs.add(
8255 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8256 available_hfr_configs.add(
8257 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8258 available_hfr_configs.add(fps);
8259 available_hfr_configs.add(fps);
8260 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8261 }
8262 }
8263 }
8264 //Advertise HFR capability only if the property is set
8265 memset(prop, 0, sizeof(prop));
8266 property_get("persist.camera.hal3hfr.enable", prop, "1");
8267 uint8_t hfrEnable = (uint8_t)atoi(prop);
8268
8269 if(hfrEnable && available_hfr_configs.array()) {
8270 staticInfo.update(
8271 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8272 available_hfr_configs.array(), available_hfr_configs.size());
8273 }
8274
8275 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8276 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8277 &max_jpeg_size, 1);
8278
8279 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8280 size_t size = 0;
8281 count = CAM_EFFECT_MODE_MAX;
8282 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8283 for (size_t i = 0; i < count; i++) {
8284 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8285 gCamCapability[cameraId]->supported_effects[i]);
8286 if (NAME_NOT_FOUND != val) {
8287 avail_effects[size] = (uint8_t)val;
8288 size++;
8289 }
8290 }
8291 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8292 avail_effects,
8293 size);
8294
8295 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8296 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8297 size_t supported_scene_modes_cnt = 0;
8298 count = CAM_SCENE_MODE_MAX;
8299 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8300 for (size_t i = 0; i < count; i++) {
8301 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8302 CAM_SCENE_MODE_OFF) {
8303 int val = lookupFwkName(SCENE_MODES_MAP,
8304 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8305 gCamCapability[cameraId]->supported_scene_modes[i]);
8306 if (NAME_NOT_FOUND != val) {
8307 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8308 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8309 supported_scene_modes_cnt++;
8310 }
8311 }
8312 }
8313 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8314 avail_scene_modes,
8315 supported_scene_modes_cnt);
8316
8317 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8318 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8319 supported_scene_modes_cnt,
8320 CAM_SCENE_MODE_MAX,
8321 scene_mode_overrides,
8322 supported_indexes,
8323 cameraId);
8324
8325 if (supported_scene_modes_cnt == 0) {
8326 supported_scene_modes_cnt = 1;
8327 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8328 }
8329
8330 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8331 scene_mode_overrides, supported_scene_modes_cnt * 3);
8332
8333 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8334 ANDROID_CONTROL_MODE_AUTO,
8335 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8336 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8337 available_control_modes,
8338 3);
8339
8340 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8341 size = 0;
8342 count = CAM_ANTIBANDING_MODE_MAX;
8343 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8344 for (size_t i = 0; i < count; i++) {
8345 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8346 gCamCapability[cameraId]->supported_antibandings[i]);
8347 if (NAME_NOT_FOUND != val) {
8348 avail_antibanding_modes[size] = (uint8_t)val;
8349 size++;
8350 }
8351
8352 }
8353 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8354 avail_antibanding_modes,
8355 size);
8356
8357 uint8_t avail_abberation_modes[] = {
8358 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8359 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8360 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8361 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8362 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8363 if (0 == count) {
8364 // If no aberration correction modes are available for a device, this advertise OFF mode
8365 size = 1;
8366 } else {
8367 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8368 // So, advertize all 3 modes if atleast any one mode is supported as per the
8369 // new M requirement
8370 size = 3;
8371 }
8372 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8373 avail_abberation_modes,
8374 size);
8375
8376 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8377 size = 0;
8378 count = CAM_FOCUS_MODE_MAX;
8379 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8380 for (size_t i = 0; i < count; i++) {
8381 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8382 gCamCapability[cameraId]->supported_focus_modes[i]);
8383 if (NAME_NOT_FOUND != val) {
8384 avail_af_modes[size] = (uint8_t)val;
8385 size++;
8386 }
8387 }
8388 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8389 avail_af_modes,
8390 size);
8391
8392 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8393 size = 0;
8394 count = CAM_WB_MODE_MAX;
8395 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8396 for (size_t i = 0; i < count; i++) {
8397 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8398 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8399 gCamCapability[cameraId]->supported_white_balances[i]);
8400 if (NAME_NOT_FOUND != val) {
8401 avail_awb_modes[size] = (uint8_t)val;
8402 size++;
8403 }
8404 }
8405 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8406 avail_awb_modes,
8407 size);
8408
8409 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8410 count = CAM_FLASH_FIRING_LEVEL_MAX;
8411 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8412 count);
8413 for (size_t i = 0; i < count; i++) {
8414 available_flash_levels[i] =
8415 gCamCapability[cameraId]->supported_firing_levels[i];
8416 }
8417 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8418 available_flash_levels, count);
8419
8420 uint8_t flashAvailable;
8421 if (gCamCapability[cameraId]->flash_available)
8422 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8423 else
8424 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8425 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8426 &flashAvailable, 1);
8427
8428 Vector<uint8_t> avail_ae_modes;
8429 count = CAM_AE_MODE_MAX;
8430 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8431 for (size_t i = 0; i < count; i++) {
8432 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8433 }
8434 if (flashAvailable) {
8435 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8436 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8437 }
8438 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8439 avail_ae_modes.array(),
8440 avail_ae_modes.size());
8441
8442 int32_t sensitivity_range[2];
8443 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8444 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8445 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8446 sensitivity_range,
8447 sizeof(sensitivity_range) / sizeof(int32_t));
8448
8449 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8450 &gCamCapability[cameraId]->max_analog_sensitivity,
8451 1);
8452
8453 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8454 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8455 &sensor_orientation,
8456 1);
8457
8458 int32_t max_output_streams[] = {
8459 MAX_STALLING_STREAMS,
8460 MAX_PROCESSED_STREAMS,
8461 MAX_RAW_STREAMS};
8462 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8463 max_output_streams,
8464 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8465
8466 uint8_t avail_leds = 0;
8467 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8468 &avail_leds, 0);
8469
8470 uint8_t focus_dist_calibrated;
8471 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8472 gCamCapability[cameraId]->focus_dist_calibrated);
8473 if (NAME_NOT_FOUND != val) {
8474 focus_dist_calibrated = (uint8_t)val;
8475 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8476 &focus_dist_calibrated, 1);
8477 }
8478
8479 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8480 size = 0;
8481 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8482 MAX_TEST_PATTERN_CNT);
8483 for (size_t i = 0; i < count; i++) {
8484 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8485 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8486 if (NAME_NOT_FOUND != testpatternMode) {
8487 avail_testpattern_modes[size] = testpatternMode;
8488 size++;
8489 }
8490 }
8491 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8492 avail_testpattern_modes,
8493 size);
8494
8495 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8496 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8497 &max_pipeline_depth,
8498 1);
8499
8500 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8501 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8502 &partial_result_count,
8503 1);
8504
8505 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8506 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8507
8508 Vector<uint8_t> available_capabilities;
8509 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8510 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8511 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8512 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8513 if (supportBurst) {
8514 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8515 }
8516 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8517 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8518 if (hfrEnable && available_hfr_configs.array()) {
8519 available_capabilities.add(
8520 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8521 }
8522
8523 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8524 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8525 }
8526 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8527 available_capabilities.array(),
8528 available_capabilities.size());
8529
8530 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8531 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8532 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8533 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8534
8535 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8536 &aeLockAvailable, 1);
8537
8538 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8539 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8540 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8541 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8542
8543 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8544 &awbLockAvailable, 1);
8545
8546 int32_t max_input_streams = 1;
8547 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8548 &max_input_streams,
8549 1);
8550
8551 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8552 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8553 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8554 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8555 HAL_PIXEL_FORMAT_YCbCr_420_888};
8556 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8557 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8558
8559 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8560 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8561 &max_latency,
8562 1);
8563
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008564#ifndef USE_HAL_3_3
8565 int32_t isp_sensitivity_range[2];
8566 isp_sensitivity_range[0] =
8567 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8568 isp_sensitivity_range[1] =
8569 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8570 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8571 isp_sensitivity_range,
8572 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8573#endif
8574
Thierry Strudel3d639192016-09-09 11:52:26 -07008575 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8576 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8577 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8578 available_hot_pixel_modes,
8579 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8580
8581 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8582 ANDROID_SHADING_MODE_FAST,
8583 ANDROID_SHADING_MODE_HIGH_QUALITY};
8584 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8585 available_shading_modes,
8586 3);
8587
8588 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8589 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8590 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8591 available_lens_shading_map_modes,
8592 2);
8593
8594 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8595 ANDROID_EDGE_MODE_FAST,
8596 ANDROID_EDGE_MODE_HIGH_QUALITY,
8597 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8598 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8599 available_edge_modes,
8600 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8601
8602 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8603 ANDROID_NOISE_REDUCTION_MODE_FAST,
8604 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8605 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8606 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8607 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8608 available_noise_red_modes,
8609 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8610
8611 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8612 ANDROID_TONEMAP_MODE_FAST,
8613 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8614 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8615 available_tonemap_modes,
8616 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8617
8618 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8619 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8620 available_hot_pixel_map_modes,
8621 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8622
8623 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8624 gCamCapability[cameraId]->reference_illuminant1);
8625 if (NAME_NOT_FOUND != val) {
8626 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8627 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8628 }
8629
8630 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8631 gCamCapability[cameraId]->reference_illuminant2);
8632 if (NAME_NOT_FOUND != val) {
8633 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8634 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8635 }
8636
8637 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8638 (void *)gCamCapability[cameraId]->forward_matrix1,
8639 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8640
8641 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8642 (void *)gCamCapability[cameraId]->forward_matrix2,
8643 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8644
8645 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8646 (void *)gCamCapability[cameraId]->color_transform1,
8647 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8648
8649 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8650 (void *)gCamCapability[cameraId]->color_transform2,
8651 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8652
8653 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8654 (void *)gCamCapability[cameraId]->calibration_transform1,
8655 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8656
8657 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8658 (void *)gCamCapability[cameraId]->calibration_transform2,
8659 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8660
8661 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8662 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8663 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8664 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8665 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8666 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8667 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8668 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8669 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8670 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8671 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8672 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8673 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8674 ANDROID_JPEG_GPS_COORDINATES,
8675 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8676 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8677 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8678 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8679 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8680 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8681 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8682 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8683 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8684 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008685#ifndef USE_HAL_3_3
8686 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8687#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008688 ANDROID_STATISTICS_FACE_DETECT_MODE,
8689 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8690 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8691 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008692 ANDROID_BLACK_LEVEL_LOCK,
8693 /* DevCamDebug metadata request_keys_basic */
8694 DEVCAMDEBUG_META_ENABLE,
8695 /* DevCamDebug metadata end */
8696 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008697
8698 size_t request_keys_cnt =
8699 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8700 Vector<int32_t> available_request_keys;
8701 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8702 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8703 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8704 }
8705
8706 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8707 available_request_keys.array(), available_request_keys.size());
8708
8709 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8710 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8711 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8712 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8713 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8714 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8715 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8716 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8717 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8718 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8719 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8720 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8721 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8722 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8723 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8724 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8725 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8726 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8727 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8728 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8729 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008730 ANDROID_STATISTICS_FACE_SCORES,
8731#ifndef USE_HAL_3_3
8732 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8733#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07008734 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008735 // DevCamDebug metadata result_keys_basic
8736 DEVCAMDEBUG_META_ENABLE,
8737 // DevCamDebug metadata result_keys AF
8738 DEVCAMDEBUG_AF_LENS_POSITION,
8739 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8740 DEVCAMDEBUG_AF_TOF_DISTANCE,
8741 DEVCAMDEBUG_AF_LUMA,
8742 DEVCAMDEBUG_AF_HAF_STATE,
8743 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8744 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8745 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8746 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8747 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8748 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8749 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8750 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8751 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8752 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8753 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8754 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8755 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8756 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8757 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8758 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8759 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8760 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8761 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8762 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8763 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8764 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8765 // DevCamDebug metadata result_keys AEC
8766 DEVCAMDEBUG_AEC_TARGET_LUMA,
8767 DEVCAMDEBUG_AEC_COMP_LUMA,
8768 DEVCAMDEBUG_AEC_AVG_LUMA,
8769 DEVCAMDEBUG_AEC_CUR_LUMA,
8770 DEVCAMDEBUG_AEC_LINECOUNT,
8771 DEVCAMDEBUG_AEC_REAL_GAIN,
8772 DEVCAMDEBUG_AEC_EXP_INDEX,
8773 DEVCAMDEBUG_AEC_LUX_IDX,
8774 // DevCamDebug metadata result_keys AWB
8775 DEVCAMDEBUG_AWB_R_GAIN,
8776 DEVCAMDEBUG_AWB_G_GAIN,
8777 DEVCAMDEBUG_AWB_B_GAIN,
8778 DEVCAMDEBUG_AWB_CCT,
8779 DEVCAMDEBUG_AWB_DECISION,
8780 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008781 };
8782
Thierry Strudel3d639192016-09-09 11:52:26 -07008783 size_t result_keys_cnt =
8784 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8785
8786 Vector<int32_t> available_result_keys;
8787 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8788 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8789 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8790 }
8791 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8792 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8793 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8794 }
8795 if (supportedFaceDetectMode == 1) {
8796 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8797 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8798 } else if ((supportedFaceDetectMode == 2) ||
8799 (supportedFaceDetectMode == 3)) {
8800 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8801 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8802 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008803#ifndef USE_HAL_3_3
8804 if (hasBlackRegions) {
8805 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8806 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8807 }
8808#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008809 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8810 available_result_keys.array(), available_result_keys.size());
8811
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008812 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008813 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8814 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8815 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8816 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8817 ANDROID_SCALER_CROPPING_TYPE,
8818 ANDROID_SYNC_MAX_LATENCY,
8819 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8820 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8821 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8822 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8823 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8824 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8825 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8826 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8827 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8828 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8829 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8830 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8831 ANDROID_LENS_FACING,
8832 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8833 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8834 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8835 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8836 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8837 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8838 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8839 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8840 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8841 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8842 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8843 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8844 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8845 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8846 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8847 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8848 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8849 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8850 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8851 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8852 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8853 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8854 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8855 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8856 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8857 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8858 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8859 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8860 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8861 ANDROID_CONTROL_AVAILABLE_MODES,
8862 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8863 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8864 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8865 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008866 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8867#ifndef USE_HAL_3_3
8868 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8869 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8870#endif
8871 };
8872
8873 Vector<int32_t> available_characteristics_keys;
8874 available_characteristics_keys.appendArray(characteristics_keys_basic,
8875 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8876#ifndef USE_HAL_3_3
8877 if (hasBlackRegions) {
8878 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8879 }
8880#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008881 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008882 available_characteristics_keys.array(),
8883 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008884
8885 /*available stall durations depend on the hw + sw and will be different for different devices */
8886 /*have to add for raw after implementation*/
8887 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8888 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8889
8890 Vector<int64_t> available_stall_durations;
8891 for (uint32_t j = 0; j < stall_formats_count; j++) {
8892 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8893 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8894 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8895 available_stall_durations.add(stall_formats[j]);
8896 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8897 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8898 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8899 }
8900 } else {
8901 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8902 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8903 available_stall_durations.add(stall_formats[j]);
8904 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8905 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8906 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8907 }
8908 }
8909 }
8910 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8911 available_stall_durations.array(),
8912 available_stall_durations.size());
8913
8914 //QCAMERA3_OPAQUE_RAW
8915 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8916 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8917 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8918 case LEGACY_RAW:
8919 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8920 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8921 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8922 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8923 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8924 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8925 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8926 break;
8927 case MIPI_RAW:
8928 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8929 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8930 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8931 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8932 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8933 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8934 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8935 break;
8936 default:
8937 LOGE("unknown opaque_raw_format %d",
8938 gCamCapability[cameraId]->opaque_raw_fmt);
8939 break;
8940 }
8941 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8942
8943 Vector<int32_t> strides;
8944 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8945 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8946 cam_stream_buf_plane_info_t buf_planes;
8947 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8948 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8949 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8950 &gCamCapability[cameraId]->padding_info, &buf_planes);
8951 strides.add(buf_planes.plane_info.mp[0].stride);
8952 }
8953 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8954 strides.size());
8955
Thierry Strudel04e026f2016-10-10 11:27:36 -07008956 //Video HDR default
8957 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8958 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8959 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8960 int32_t vhdr_mode[] = {
8961 QCAMERA3_VIDEO_HDR_MODE_OFF,
8962 QCAMERA3_VIDEO_HDR_MODE_ON};
8963
8964 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8965 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8966 vhdr_mode, vhdr_mode_count);
8967 }
8968
Thierry Strudel3d639192016-09-09 11:52:26 -07008969 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8970 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8971 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8972
8973 uint8_t isMonoOnly =
8974 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8975 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8976 &isMonoOnly, 1);
8977
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008978#ifndef USE_HAL_3_3
8979 Vector<int32_t> opaque_size;
8980 for (size_t j = 0; j < scalar_formats_count; j++) {
8981 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8982 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8983 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8984 cam_stream_buf_plane_info_t buf_planes;
8985
8986 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8987 &gCamCapability[cameraId]->padding_info, &buf_planes);
8988
8989 if (rc == 0) {
8990 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8991 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8992 opaque_size.add(buf_planes.plane_info.frame_len);
8993 }else {
8994 LOGE("raw frame calculation failed!");
8995 }
8996 }
8997 }
8998 }
8999
9000 if ((opaque_size.size() > 0) &&
9001 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9002 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9003 else
9004 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9005#endif
9006
Thierry Strudel04e026f2016-10-10 11:27:36 -07009007 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9008 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9009 size = 0;
9010 count = CAM_IR_MODE_MAX;
9011 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9012 for (size_t i = 0; i < count; i++) {
9013 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9014 gCamCapability[cameraId]->supported_ir_modes[i]);
9015 if (NAME_NOT_FOUND != val) {
9016 avail_ir_modes[size] = (int32_t)val;
9017 size++;
9018 }
9019 }
9020 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9021 avail_ir_modes, size);
9022 }
9023
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009024 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9025 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9026 size = 0;
9027 count = CAM_AEC_CONVERGENCE_MAX;
9028 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9029 for (size_t i = 0; i < count; i++) {
9030 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9031 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9032 if (NAME_NOT_FOUND != val) {
9033 available_instant_aec_modes[size] = (int32_t)val;
9034 size++;
9035 }
9036 }
9037 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9038 available_instant_aec_modes, size);
9039 }
9040
Thierry Strudel3d639192016-09-09 11:52:26 -07009041 gStaticMetadata[cameraId] = staticInfo.release();
9042 return rc;
9043}
9044
9045/*===========================================================================
9046 * FUNCTION : makeTable
9047 *
9048 * DESCRIPTION: make a table of sizes
9049 *
9050 * PARAMETERS :
9051 *
9052 *
9053 *==========================================================================*/
9054void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9055 size_t max_size, int32_t *sizeTable)
9056{
9057 size_t j = 0;
9058 if (size > max_size) {
9059 size = max_size;
9060 }
9061 for (size_t i = 0; i < size; i++) {
9062 sizeTable[j] = dimTable[i].width;
9063 sizeTable[j+1] = dimTable[i].height;
9064 j+=2;
9065 }
9066}
9067
9068/*===========================================================================
9069 * FUNCTION : makeFPSTable
9070 *
9071 * DESCRIPTION: make a table of fps ranges
9072 *
9073 * PARAMETERS :
9074 *
9075 *==========================================================================*/
9076void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9077 size_t max_size, int32_t *fpsRangesTable)
9078{
9079 size_t j = 0;
9080 if (size > max_size) {
9081 size = max_size;
9082 }
9083 for (size_t i = 0; i < size; i++) {
9084 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9085 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9086 j+=2;
9087 }
9088}
9089
9090/*===========================================================================
9091 * FUNCTION : makeOverridesList
9092 *
9093 * DESCRIPTION: make a list of scene mode overrides
9094 *
9095 * PARAMETERS :
9096 *
9097 *
9098 *==========================================================================*/
9099void QCamera3HardwareInterface::makeOverridesList(
9100 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9101 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9102{
9103 /*daemon will give a list of overrides for all scene modes.
9104 However we should send the fwk only the overrides for the scene modes
9105 supported by the framework*/
9106 size_t j = 0;
9107 if (size > max_size) {
9108 size = max_size;
9109 }
9110 size_t focus_count = CAM_FOCUS_MODE_MAX;
9111 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9112 focus_count);
9113 for (size_t i = 0; i < size; i++) {
9114 bool supt = false;
9115 size_t index = supported_indexes[i];
9116 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9117 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9118 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9119 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9120 overridesTable[index].awb_mode);
9121 if (NAME_NOT_FOUND != val) {
9122 overridesList[j+1] = (uint8_t)val;
9123 }
9124 uint8_t focus_override = overridesTable[index].af_mode;
9125 for (size_t k = 0; k < focus_count; k++) {
9126 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9127 supt = true;
9128 break;
9129 }
9130 }
9131 if (supt) {
9132 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9133 focus_override);
9134 if (NAME_NOT_FOUND != val) {
9135 overridesList[j+2] = (uint8_t)val;
9136 }
9137 } else {
9138 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9139 }
9140 j+=3;
9141 }
9142}
9143
9144/*===========================================================================
9145 * FUNCTION : filterJpegSizes
9146 *
9147 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9148 * could be downscaled to
9149 *
9150 * PARAMETERS :
9151 *
9152 * RETURN : length of jpegSizes array
9153 *==========================================================================*/
9154
9155size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9156 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9157 uint8_t downscale_factor)
9158{
9159 if (0 == downscale_factor) {
9160 downscale_factor = 1;
9161 }
9162
9163 int32_t min_width = active_array_size.width / downscale_factor;
9164 int32_t min_height = active_array_size.height / downscale_factor;
9165 size_t jpegSizesCnt = 0;
9166 if (processedSizesCnt > maxCount) {
9167 processedSizesCnt = maxCount;
9168 }
9169 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9170 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9171 jpegSizes[jpegSizesCnt] = processedSizes[i];
9172 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9173 jpegSizesCnt += 2;
9174 }
9175 }
9176 return jpegSizesCnt;
9177}
9178
9179/*===========================================================================
9180 * FUNCTION : computeNoiseModelEntryS
9181 *
9182 * DESCRIPTION: function to map a given sensitivity to the S noise
9183 * model parameters in the DNG noise model.
9184 *
9185 * PARAMETERS : sens : the sensor sensitivity
9186 *
9187 ** RETURN : S (sensor amplification) noise
9188 *
9189 *==========================================================================*/
9190double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9191 double s = gCamCapability[mCameraId]->gradient_S * sens +
9192 gCamCapability[mCameraId]->offset_S;
9193 return ((s < 0.0) ? 0.0 : s);
9194}
9195
9196/*===========================================================================
9197 * FUNCTION : computeNoiseModelEntryO
9198 *
9199 * DESCRIPTION: function to map a given sensitivity to the O noise
9200 * model parameters in the DNG noise model.
9201 *
9202 * PARAMETERS : sens : the sensor sensitivity
9203 *
9204 ** RETURN : O (sensor readout) noise
9205 *
9206 *==========================================================================*/
9207double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9208 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9209 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9210 1.0 : (1.0 * sens / max_analog_sens);
9211 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9212 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9213 return ((o < 0.0) ? 0.0 : o);
9214}
9215
9216/*===========================================================================
9217 * FUNCTION : getSensorSensitivity
9218 *
9219 * DESCRIPTION: convert iso_mode to an integer value
9220 *
9221 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9222 *
9223 ** RETURN : sensitivity supported by sensor
9224 *
9225 *==========================================================================*/
9226int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9227{
9228 int32_t sensitivity;
9229
9230 switch (iso_mode) {
9231 case CAM_ISO_MODE_100:
9232 sensitivity = 100;
9233 break;
9234 case CAM_ISO_MODE_200:
9235 sensitivity = 200;
9236 break;
9237 case CAM_ISO_MODE_400:
9238 sensitivity = 400;
9239 break;
9240 case CAM_ISO_MODE_800:
9241 sensitivity = 800;
9242 break;
9243 case CAM_ISO_MODE_1600:
9244 sensitivity = 1600;
9245 break;
9246 default:
9247 sensitivity = -1;
9248 break;
9249 }
9250 return sensitivity;
9251}
9252
9253/*===========================================================================
9254 * FUNCTION : getCamInfo
9255 *
9256 * DESCRIPTION: query camera capabilities
9257 *
9258 * PARAMETERS :
9259 * @cameraId : camera Id
9260 * @info : camera info struct to be filled in with camera capabilities
9261 *
9262 * RETURN : int type of status
9263 * NO_ERROR -- success
9264 * none-zero failure code
9265 *==========================================================================*/
9266int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9267 struct camera_info *info)
9268{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009269 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009270 int rc = 0;
9271
9272 pthread_mutex_lock(&gCamLock);
9273 if (NULL == gCamCapability[cameraId]) {
9274 rc = initCapabilities(cameraId);
9275 if (rc < 0) {
9276 pthread_mutex_unlock(&gCamLock);
9277 return rc;
9278 }
9279 }
9280
9281 if (NULL == gStaticMetadata[cameraId]) {
9282 rc = initStaticMetadata(cameraId);
9283 if (rc < 0) {
9284 pthread_mutex_unlock(&gCamLock);
9285 return rc;
9286 }
9287 }
9288
9289 switch(gCamCapability[cameraId]->position) {
9290 case CAM_POSITION_BACK:
9291 case CAM_POSITION_BACK_AUX:
9292 info->facing = CAMERA_FACING_BACK;
9293 break;
9294
9295 case CAM_POSITION_FRONT:
9296 case CAM_POSITION_FRONT_AUX:
9297 info->facing = CAMERA_FACING_FRONT;
9298 break;
9299
9300 default:
9301 LOGE("Unknown position type %d for camera id:%d",
9302 gCamCapability[cameraId]->position, cameraId);
9303 rc = -1;
9304 break;
9305 }
9306
9307
9308 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009309#ifndef USE_HAL_3_3
9310 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9311#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009312 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009313#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009314 info->static_camera_characteristics = gStaticMetadata[cameraId];
9315
9316 //For now assume both cameras can operate independently.
9317 info->conflicting_devices = NULL;
9318 info->conflicting_devices_length = 0;
9319
9320 //resource cost is 100 * MIN(1.0, m/M),
9321 //where m is throughput requirement with maximum stream configuration
9322 //and M is CPP maximum throughput.
9323 float max_fps = 0.0;
9324 for (uint32_t i = 0;
9325 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9326 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9327 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9328 }
9329 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9330 gCamCapability[cameraId]->active_array_size.width *
9331 gCamCapability[cameraId]->active_array_size.height * max_fps /
9332 gCamCapability[cameraId]->max_pixel_bandwidth;
9333 info->resource_cost = 100 * MIN(1.0, ratio);
9334 LOGI("camera %d resource cost is %d", cameraId,
9335 info->resource_cost);
9336
9337 pthread_mutex_unlock(&gCamLock);
9338 return rc;
9339}
9340
9341/*===========================================================================
9342 * FUNCTION : translateCapabilityToMetadata
9343 *
9344 * DESCRIPTION: translate the capability into camera_metadata_t
9345 *
9346 * PARAMETERS : type of the request
9347 *
9348 *
9349 * RETURN : success: camera_metadata_t*
9350 * failure: NULL
9351 *
9352 *==========================================================================*/
9353camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9354{
9355 if (mDefaultMetadata[type] != NULL) {
9356 return mDefaultMetadata[type];
9357 }
9358 //first time we are handling this request
9359 //fill up the metadata structure using the wrapper class
9360 CameraMetadata settings;
9361 //translate from cam_capability_t to camera_metadata_tag_t
9362 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9363 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9364 int32_t defaultRequestID = 0;
9365 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9366
9367 /* OIS disable */
9368 char ois_prop[PROPERTY_VALUE_MAX];
9369 memset(ois_prop, 0, sizeof(ois_prop));
9370 property_get("persist.camera.ois.disable", ois_prop, "0");
9371 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9372
9373 /* Force video to use OIS */
9374 char videoOisProp[PROPERTY_VALUE_MAX];
9375 memset(videoOisProp, 0, sizeof(videoOisProp));
9376 property_get("persist.camera.ois.video", videoOisProp, "1");
9377 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009378
9379 // Hybrid AE enable/disable
9380 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9381 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9382 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9383 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9384
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 uint8_t controlIntent = 0;
9386 uint8_t focusMode;
9387 uint8_t vsMode;
9388 uint8_t optStabMode;
9389 uint8_t cacMode;
9390 uint8_t edge_mode;
9391 uint8_t noise_red_mode;
9392 uint8_t tonemap_mode;
9393 bool highQualityModeEntryAvailable = FALSE;
9394 bool fastModeEntryAvailable = FALSE;
9395 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9396 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9397 switch (type) {
9398 case CAMERA3_TEMPLATE_PREVIEW:
9399 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9400 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9401 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9402 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9403 edge_mode = ANDROID_EDGE_MODE_FAST;
9404 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9405 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9406 break;
9407 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9408 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9409 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9410 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9411 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9412 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9413 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9414 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9415 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9416 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9417 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9418 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9419 highQualityModeEntryAvailable = TRUE;
9420 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9421 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9422 fastModeEntryAvailable = TRUE;
9423 }
9424 }
9425 if (highQualityModeEntryAvailable) {
9426 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9427 } else if (fastModeEntryAvailable) {
9428 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9429 }
9430 break;
9431 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9432 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9433 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9434 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009435 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9436 edge_mode = ANDROID_EDGE_MODE_FAST;
9437 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9438 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9439 if (forceVideoOis)
9440 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9441 break;
9442 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9443 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9444 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9445 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009446 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9447 edge_mode = ANDROID_EDGE_MODE_FAST;
9448 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9449 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9450 if (forceVideoOis)
9451 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9452 break;
9453 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9454 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9455 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9456 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9457 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9458 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9459 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9460 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9461 break;
9462 case CAMERA3_TEMPLATE_MANUAL:
9463 edge_mode = ANDROID_EDGE_MODE_FAST;
9464 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9465 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9466 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9467 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9468 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9469 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9470 break;
9471 default:
9472 edge_mode = ANDROID_EDGE_MODE_FAST;
9473 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9474 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9475 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9476 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9477 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9478 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9479 break;
9480 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009481 // Set CAC to OFF if underlying device doesn't support
9482 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9483 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9484 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009485 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9486 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9487 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9488 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9489 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9490 }
9491 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9492
9493 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9494 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9495 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9496 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9497 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9498 || ois_disable)
9499 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9500 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9501
9502 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9503 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9504
9505 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9506 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9507
9508 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9509 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9510
9511 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9512 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9513
9514 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9515 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9516
9517 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9518 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9519
9520 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9521 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9522
9523 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9524 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9525
9526 /*flash*/
9527 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9528 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9529
9530 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9531 settings.update(ANDROID_FLASH_FIRING_POWER,
9532 &flashFiringLevel, 1);
9533
9534 /* lens */
9535 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9536 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9537
9538 if (gCamCapability[mCameraId]->filter_densities_count) {
9539 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9540 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9541 gCamCapability[mCameraId]->filter_densities_count);
9542 }
9543
9544 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9545 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9546
9547 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9548 float default_focus_distance = 0;
9549 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9550 }
9551
9552 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9553 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9554
9555 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9556 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9557
9558 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9559 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9560
9561 /* face detection (default to OFF) */
9562 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9563 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9564
9565 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9566 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9567
9568 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9569 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9570
9571 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9572 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9573
9574 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9575 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9576
9577 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9578 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9579
9580 /* Exposure time(Update the Min Exposure Time)*/
9581 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9582 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9583
9584 /* frame duration */
9585 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9586 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9587
9588 /* sensitivity */
9589 static const int32_t default_sensitivity = 100;
9590 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009591#ifndef USE_HAL_3_3
9592 static const int32_t default_isp_sensitivity =
9593 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9594 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9595#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009596
9597 /*edge mode*/
9598 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9599
9600 /*noise reduction mode*/
9601 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9602
9603 /*color correction mode*/
9604 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9605 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9606
9607 /*transform matrix mode*/
9608 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9609
9610 int32_t scaler_crop_region[4];
9611 scaler_crop_region[0] = 0;
9612 scaler_crop_region[1] = 0;
9613 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9614 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9615 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9616
9617 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9618 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9619
9620 /*focus distance*/
9621 float focus_distance = 0.0;
9622 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9623
9624 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009625 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009626 float max_range = 0.0;
9627 float max_fixed_fps = 0.0;
9628 int32_t fps_range[2] = {0, 0};
9629 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9630 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009631 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9632 TEMPLATE_MAX_PREVIEW_FPS) {
9633 continue;
9634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009635 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9636 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9637 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9638 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9639 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9640 if (range > max_range) {
9641 fps_range[0] =
9642 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9643 fps_range[1] =
9644 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9645 max_range = range;
9646 }
9647 } else {
9648 if (range < 0.01 && max_fixed_fps <
9649 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9650 fps_range[0] =
9651 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9652 fps_range[1] =
9653 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9654 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9655 }
9656 }
9657 }
9658 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9659
9660 /*precapture trigger*/
9661 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9662 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9663
9664 /*af trigger*/
9665 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9666 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9667
9668 /* ae & af regions */
9669 int32_t active_region[] = {
9670 gCamCapability[mCameraId]->active_array_size.left,
9671 gCamCapability[mCameraId]->active_array_size.top,
9672 gCamCapability[mCameraId]->active_array_size.left +
9673 gCamCapability[mCameraId]->active_array_size.width,
9674 gCamCapability[mCameraId]->active_array_size.top +
9675 gCamCapability[mCameraId]->active_array_size.height,
9676 0};
9677 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9678 sizeof(active_region) / sizeof(active_region[0]));
9679 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9680 sizeof(active_region) / sizeof(active_region[0]));
9681
9682 /* black level lock */
9683 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9684 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9685
9686 /* lens shading map mode */
9687 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9688 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9689 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9690 }
9691 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9692
9693 //special defaults for manual template
9694 if (type == CAMERA3_TEMPLATE_MANUAL) {
9695 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9696 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9697
9698 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9699 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9700
9701 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9702 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9703
9704 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9705 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9706
9707 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9708 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9709
9710 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9711 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9712 }
9713
9714
9715 /* TNR
9716 * We'll use this location to determine which modes TNR will be set.
9717 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9718 * This is not to be confused with linking on a per stream basis that decision
9719 * is still on per-session basis and will be handled as part of config stream
9720 */
9721 uint8_t tnr_enable = 0;
9722
9723 if (m_bTnrPreview || m_bTnrVideo) {
9724
9725 switch (type) {
9726 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9727 tnr_enable = 1;
9728 break;
9729
9730 default:
9731 tnr_enable = 0;
9732 break;
9733 }
9734
9735 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9736 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9737 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9738
9739 LOGD("TNR:%d with process plate %d for template:%d",
9740 tnr_enable, tnr_process_type, type);
9741 }
9742
9743 //Update Link tags to default
9744 int32_t sync_type = CAM_TYPE_STANDALONE;
9745 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9746
9747 int32_t is_main = 0; //this doesn't matter as app should overwrite
9748 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9749
9750 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9751
9752 /* CDS default */
9753 char prop[PROPERTY_VALUE_MAX];
9754 memset(prop, 0, sizeof(prop));
9755 property_get("persist.camera.CDS", prop, "Auto");
9756 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9757 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9758 if (CAM_CDS_MODE_MAX == cds_mode) {
9759 cds_mode = CAM_CDS_MODE_AUTO;
9760 }
9761
9762 /* Disabling CDS in templates which have TNR enabled*/
9763 if (tnr_enable)
9764 cds_mode = CAM_CDS_MODE_OFF;
9765
9766 int32_t mode = cds_mode;
9767 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009768
9769 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9770 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9771
9772 /* IR Mode Default Off */
9773 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9774 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9775
Thierry Strudel269c81a2016-10-12 12:13:59 -07009776 /* Manual Convergence AEC Speed is disabled by default*/
9777 float default_aec_speed = 0;
9778 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9779
9780 /* Manual Convergence AWB Speed is disabled by default*/
9781 float default_awb_speed = 0;
9782 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9783
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009784 // Set instant AEC to normal convergence by default
9785 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9786 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9787
Shuzhen Wang19463d72016-03-08 11:09:52 -08009788 /* hybrid ae */
9789 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9790
Thierry Strudel3d639192016-09-09 11:52:26 -07009791 mDefaultMetadata[type] = settings.release();
9792
9793 return mDefaultMetadata[type];
9794}
9795
9796/*===========================================================================
9797 * FUNCTION : setFrameParameters
9798 *
9799 * DESCRIPTION: set parameters per frame as requested in the metadata from
9800 * framework
9801 *
9802 * PARAMETERS :
9803 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009804 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009805 * @blob_request: Whether this request is a blob request or not
9806 *
9807 * RETURN : success: NO_ERROR
9808 * failure:
9809 *==========================================================================*/
9810int QCamera3HardwareInterface::setFrameParameters(
9811 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009812 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009813 int blob_request,
9814 uint32_t snapshotStreamId)
9815{
9816 /*translate from camera_metadata_t type to parm_type_t*/
9817 int rc = 0;
9818 int32_t hal_version = CAM_HAL_V3;
9819
9820 clear_metadata_buffer(mParameters);
9821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9822 LOGE("Failed to set hal version in the parameters");
9823 return BAD_VALUE;
9824 }
9825
9826 /*we need to update the frame number in the parameters*/
9827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9828 request->frame_number)) {
9829 LOGE("Failed to set the frame number in the parameters");
9830 return BAD_VALUE;
9831 }
9832
9833 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009835 LOGE("Failed to set stream type mask in the parameters");
9836 return BAD_VALUE;
9837 }
9838
9839 if (mUpdateDebugLevel) {
9840 uint32_t dummyDebugLevel = 0;
9841 /* The value of dummyDebugLevel is irrelavent. On
9842 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9844 dummyDebugLevel)) {
9845 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9846 return BAD_VALUE;
9847 }
9848 mUpdateDebugLevel = false;
9849 }
9850
9851 if(request->settings != NULL){
9852 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9853 if (blob_request)
9854 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9855 }
9856
9857 return rc;
9858}
9859
9860/*===========================================================================
9861 * FUNCTION : setReprocParameters
9862 *
9863 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9864 * return it.
9865 *
9866 * PARAMETERS :
9867 * @request : request that needs to be serviced
9868 *
9869 * RETURN : success: NO_ERROR
9870 * failure:
9871 *==========================================================================*/
9872int32_t QCamera3HardwareInterface::setReprocParameters(
9873 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9874 uint32_t snapshotStreamId)
9875{
9876 /*translate from camera_metadata_t type to parm_type_t*/
9877 int rc = 0;
9878
9879 if (NULL == request->settings){
9880 LOGE("Reprocess settings cannot be NULL");
9881 return BAD_VALUE;
9882 }
9883
9884 if (NULL == reprocParam) {
9885 LOGE("Invalid reprocessing metadata buffer");
9886 return BAD_VALUE;
9887 }
9888 clear_metadata_buffer(reprocParam);
9889
9890 /*we need to update the frame number in the parameters*/
9891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9892 request->frame_number)) {
9893 LOGE("Failed to set the frame number in the parameters");
9894 return BAD_VALUE;
9895 }
9896
9897 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9898 if (rc < 0) {
9899 LOGE("Failed to translate reproc request");
9900 return rc;
9901 }
9902
9903 CameraMetadata frame_settings;
9904 frame_settings = request->settings;
9905 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9906 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9907 int32_t *crop_count =
9908 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9909 int32_t *crop_data =
9910 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9911 int32_t *roi_map =
9912 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9913 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9914 cam_crop_data_t crop_meta;
9915 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9916 crop_meta.num_of_streams = 1;
9917 crop_meta.crop_info[0].crop.left = crop_data[0];
9918 crop_meta.crop_info[0].crop.top = crop_data[1];
9919 crop_meta.crop_info[0].crop.width = crop_data[2];
9920 crop_meta.crop_info[0].crop.height = crop_data[3];
9921
9922 crop_meta.crop_info[0].roi_map.left =
9923 roi_map[0];
9924 crop_meta.crop_info[0].roi_map.top =
9925 roi_map[1];
9926 crop_meta.crop_info[0].roi_map.width =
9927 roi_map[2];
9928 crop_meta.crop_info[0].roi_map.height =
9929 roi_map[3];
9930
9931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9932 rc = BAD_VALUE;
9933 }
9934 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9935 request->input_buffer->stream,
9936 crop_meta.crop_info[0].crop.left,
9937 crop_meta.crop_info[0].crop.top,
9938 crop_meta.crop_info[0].crop.width,
9939 crop_meta.crop_info[0].crop.height);
9940 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9941 request->input_buffer->stream,
9942 crop_meta.crop_info[0].roi_map.left,
9943 crop_meta.crop_info[0].roi_map.top,
9944 crop_meta.crop_info[0].roi_map.width,
9945 crop_meta.crop_info[0].roi_map.height);
9946 } else {
9947 LOGE("Invalid reprocess crop count %d!", *crop_count);
9948 }
9949 } else {
9950 LOGE("No crop data from matching output stream");
9951 }
9952
9953 /* These settings are not needed for regular requests so handle them specially for
9954 reprocess requests; information needed for EXIF tags */
9955 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9956 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9957 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9958 if (NAME_NOT_FOUND != val) {
9959 uint32_t flashMode = (uint32_t)val;
9960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9961 rc = BAD_VALUE;
9962 }
9963 } else {
9964 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9965 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9966 }
9967 } else {
9968 LOGH("No flash mode in reprocess settings");
9969 }
9970
9971 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9972 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9974 rc = BAD_VALUE;
9975 }
9976 } else {
9977 LOGH("No flash state in reprocess settings");
9978 }
9979
9980 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9981 uint8_t *reprocessFlags =
9982 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9984 *reprocessFlags)) {
9985 rc = BAD_VALUE;
9986 }
9987 }
9988
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009989 // Add metadata which reprocess needs
9990 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9991 cam_reprocess_info_t *repro_info =
9992 (cam_reprocess_info_t *)frame_settings.find
9993 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -07009994 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009995 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009996 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009997 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009998 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009999 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010000 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010001 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010002 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010003 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010004 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010005 repro_info->pipeline_flip);
10006 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10007 repro_info->af_roi);
10008 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10009 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010010 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10011 CAM_INTF_PARM_ROTATION metadata then has been added in
10012 translateToHalMetadata. HAL need to keep this new rotation
10013 metadata. Otherwise, the old rotation info saved in the vendor tag
10014 would be used */
10015 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10016 CAM_INTF_PARM_ROTATION, reprocParam) {
10017 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10018 } else {
10019 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010020 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010021 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010022 }
10023
10024 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10025 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10026 roi.width and roi.height would be the final JPEG size.
10027 For now, HAL only checks this for reprocess request */
10028 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10029 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10030 uint8_t *enable =
10031 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10032 if (*enable == TRUE) {
10033 int32_t *crop_data =
10034 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10035 cam_stream_crop_info_t crop_meta;
10036 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10037 crop_meta.stream_id = 0;
10038 crop_meta.crop.left = crop_data[0];
10039 crop_meta.crop.top = crop_data[1];
10040 crop_meta.crop.width = crop_data[2];
10041 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010042 // The JPEG crop roi should match cpp output size
10043 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10044 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10045 crop_meta.roi_map.left = 0;
10046 crop_meta.roi_map.top = 0;
10047 crop_meta.roi_map.width = cpp_crop->crop.width;
10048 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010049 }
10050 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10051 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010052 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010053 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010054 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10055 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010056 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010057 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10058
10059 // Add JPEG scale information
10060 cam_dimension_t scale_dim;
10061 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10062 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10063 int32_t *roi =
10064 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10065 scale_dim.width = roi[2];
10066 scale_dim.height = roi[3];
10067 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10068 scale_dim);
10069 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10070 scale_dim.width, scale_dim.height, mCameraId);
10071 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010072 }
10073 }
10074
10075 return rc;
10076}
10077
10078/*===========================================================================
10079 * FUNCTION : saveRequestSettings
10080 *
10081 * DESCRIPTION: Add any settings that might have changed to the request settings
10082 * and save the settings to be applied on the frame
10083 *
10084 * PARAMETERS :
10085 * @jpegMetadata : the extracted and/or modified jpeg metadata
10086 * @request : request with initial settings
10087 *
10088 * RETURN :
10089 * camera_metadata_t* : pointer to the saved request settings
10090 *==========================================================================*/
10091camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10092 const CameraMetadata &jpegMetadata,
10093 camera3_capture_request_t *request)
10094{
10095 camera_metadata_t *resultMetadata;
10096 CameraMetadata camMetadata;
10097 camMetadata = request->settings;
10098
10099 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10100 int32_t thumbnail_size[2];
10101 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10102 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10103 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10104 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10105 }
10106
10107 if (request->input_buffer != NULL) {
10108 uint8_t reprocessFlags = 1;
10109 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10110 (uint8_t*)&reprocessFlags,
10111 sizeof(reprocessFlags));
10112 }
10113
10114 resultMetadata = camMetadata.release();
10115 return resultMetadata;
10116}
10117
10118/*===========================================================================
10119 * FUNCTION : setHalFpsRange
10120 *
10121 * DESCRIPTION: set FPS range parameter
10122 *
10123 *
10124 * PARAMETERS :
10125 * @settings : Metadata from framework
10126 * @hal_metadata: Metadata buffer
10127 *
10128 *
10129 * RETURN : success: NO_ERROR
10130 * failure:
10131 *==========================================================================*/
10132int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10133 metadata_buffer_t *hal_metadata)
10134{
10135 int32_t rc = NO_ERROR;
10136 cam_fps_range_t fps_range;
10137 fps_range.min_fps = (float)
10138 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10139 fps_range.max_fps = (float)
10140 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10141 fps_range.video_min_fps = fps_range.min_fps;
10142 fps_range.video_max_fps = fps_range.max_fps;
10143
10144 LOGD("aeTargetFpsRange fps: [%f %f]",
10145 fps_range.min_fps, fps_range.max_fps);
10146 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10147 * follows:
10148 * ---------------------------------------------------------------|
10149 * Video stream is absent in configure_streams |
10150 * (Camcorder preview before the first video record |
10151 * ---------------------------------------------------------------|
10152 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10153 * | | | vid_min/max_fps|
10154 * ---------------------------------------------------------------|
10155 * NO | [ 30, 240] | 240 | [240, 240] |
10156 * |-------------|-------------|----------------|
10157 * | [240, 240] | 240 | [240, 240] |
10158 * ---------------------------------------------------------------|
10159 * Video stream is present in configure_streams |
10160 * ---------------------------------------------------------------|
10161 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10162 * | | | vid_min/max_fps|
10163 * ---------------------------------------------------------------|
10164 * NO | [ 30, 240] | 240 | [240, 240] |
10165 * (camcorder prev |-------------|-------------|----------------|
10166 * after video rec | [240, 240] | 240 | [240, 240] |
10167 * is stopped) | | | |
10168 * ---------------------------------------------------------------|
10169 * YES | [ 30, 240] | 240 | [240, 240] |
10170 * |-------------|-------------|----------------|
10171 * | [240, 240] | 240 | [240, 240] |
10172 * ---------------------------------------------------------------|
10173 * When Video stream is absent in configure_streams,
10174 * preview fps = sensor_fps / batchsize
10175 * Eg: for 240fps at batchSize 4, preview = 60fps
10176 * for 120fps at batchSize 4, preview = 30fps
10177 *
10178 * When video stream is present in configure_streams, preview fps is as per
10179 * the ratio of preview buffers to video buffers requested in process
10180 * capture request
10181 */
10182 mBatchSize = 0;
10183 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10184 fps_range.min_fps = fps_range.video_max_fps;
10185 fps_range.video_min_fps = fps_range.video_max_fps;
10186 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10187 fps_range.max_fps);
10188 if (NAME_NOT_FOUND != val) {
10189 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10191 return BAD_VALUE;
10192 }
10193
10194 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10195 /* If batchmode is currently in progress and the fps changes,
10196 * set the flag to restart the sensor */
10197 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10198 (mHFRVideoFps != fps_range.max_fps)) {
10199 mNeedSensorRestart = true;
10200 }
10201 mHFRVideoFps = fps_range.max_fps;
10202 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10203 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10204 mBatchSize = MAX_HFR_BATCH_SIZE;
10205 }
10206 }
10207 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10208
10209 }
10210 } else {
10211 /* HFR mode is session param in backend/ISP. This should be reset when
10212 * in non-HFR mode */
10213 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10215 return BAD_VALUE;
10216 }
10217 }
10218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10219 return BAD_VALUE;
10220 }
10221 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10222 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10223 return rc;
10224}
10225
10226/*===========================================================================
10227 * FUNCTION : translateToHalMetadata
10228 *
10229 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10230 *
10231 *
10232 * PARAMETERS :
10233 * @request : request sent from framework
10234 *
10235 *
10236 * RETURN : success: NO_ERROR
10237 * failure:
10238 *==========================================================================*/
10239int QCamera3HardwareInterface::translateToHalMetadata
10240 (const camera3_capture_request_t *request,
10241 metadata_buffer_t *hal_metadata,
10242 uint32_t snapshotStreamId)
10243{
10244 int rc = 0;
10245 CameraMetadata frame_settings;
10246 frame_settings = request->settings;
10247
10248 /* Do not change the order of the following list unless you know what you are
10249 * doing.
10250 * The order is laid out in such a way that parameters in the front of the table
10251 * may be used to override the parameters later in the table. Examples are:
10252 * 1. META_MODE should precede AEC/AWB/AF MODE
10253 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10254 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10255 * 4. Any mode should precede it's corresponding settings
10256 */
10257 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10258 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10260 rc = BAD_VALUE;
10261 }
10262 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10263 if (rc != NO_ERROR) {
10264 LOGE("extractSceneMode failed");
10265 }
10266 }
10267
10268 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10269 uint8_t fwk_aeMode =
10270 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10271 uint8_t aeMode;
10272 int32_t redeye;
10273
10274 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10275 aeMode = CAM_AE_MODE_OFF;
10276 } else {
10277 aeMode = CAM_AE_MODE_ON;
10278 }
10279 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10280 redeye = 1;
10281 } else {
10282 redeye = 0;
10283 }
10284
10285 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10286 fwk_aeMode);
10287 if (NAME_NOT_FOUND != val) {
10288 int32_t flashMode = (int32_t)val;
10289 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10290 }
10291
10292 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10294 rc = BAD_VALUE;
10295 }
10296 }
10297
10298 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10299 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10300 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10301 fwk_whiteLevel);
10302 if (NAME_NOT_FOUND != val) {
10303 uint8_t whiteLevel = (uint8_t)val;
10304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10305 rc = BAD_VALUE;
10306 }
10307 }
10308 }
10309
10310 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10311 uint8_t fwk_cacMode =
10312 frame_settings.find(
10313 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10314 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10315 fwk_cacMode);
10316 if (NAME_NOT_FOUND != val) {
10317 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10318 bool entryAvailable = FALSE;
10319 // Check whether Frameworks set CAC mode is supported in device or not
10320 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10321 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10322 entryAvailable = TRUE;
10323 break;
10324 }
10325 }
10326 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10327 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10328 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10329 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10330 if (entryAvailable == FALSE) {
10331 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10332 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10333 } else {
10334 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10335 // High is not supported and so set the FAST as spec say's underlying
10336 // device implementation can be the same for both modes.
10337 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10338 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10339 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10340 // in order to avoid the fps drop due to high quality
10341 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10342 } else {
10343 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10344 }
10345 }
10346 }
10347 LOGD("Final cacMode is %d", cacMode);
10348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10349 rc = BAD_VALUE;
10350 }
10351 } else {
10352 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10353 }
10354 }
10355
10356 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10357 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10358 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10359 fwk_focusMode);
10360 if (NAME_NOT_FOUND != val) {
10361 uint8_t focusMode = (uint8_t)val;
10362 LOGD("set focus mode %d", focusMode);
10363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10364 rc = BAD_VALUE;
10365 }
10366 }
10367 }
10368
10369 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10370 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10372 focalDistance)) {
10373 rc = BAD_VALUE;
10374 }
10375 }
10376
10377 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10378 uint8_t fwk_antibandingMode =
10379 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10380 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10381 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10382 if (NAME_NOT_FOUND != val) {
10383 uint32_t hal_antibandingMode = (uint32_t)val;
10384 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10385 hal_antibandingMode)) {
10386 rc = BAD_VALUE;
10387 }
10388 }
10389 }
10390
10391 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10392 int32_t expCompensation = frame_settings.find(
10393 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10394 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10395 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10396 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10397 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010398 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010399 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10400 expCompensation)) {
10401 rc = BAD_VALUE;
10402 }
10403 }
10404
10405 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10406 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10408 rc = BAD_VALUE;
10409 }
10410 }
10411 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10412 rc = setHalFpsRange(frame_settings, hal_metadata);
10413 if (rc != NO_ERROR) {
10414 LOGE("setHalFpsRange failed");
10415 }
10416 }
10417
10418 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10419 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10421 rc = BAD_VALUE;
10422 }
10423 }
10424
10425 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10426 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10427 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10428 fwk_effectMode);
10429 if (NAME_NOT_FOUND != val) {
10430 uint8_t effectMode = (uint8_t)val;
10431 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10432 rc = BAD_VALUE;
10433 }
10434 }
10435 }
10436
10437 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10438 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10440 colorCorrectMode)) {
10441 rc = BAD_VALUE;
10442 }
10443 }
10444
10445 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10446 cam_color_correct_gains_t colorCorrectGains;
10447 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10448 colorCorrectGains.gains[i] =
10449 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10450 }
10451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10452 colorCorrectGains)) {
10453 rc = BAD_VALUE;
10454 }
10455 }
10456
10457 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10458 cam_color_correct_matrix_t colorCorrectTransform;
10459 cam_rational_type_t transform_elem;
10460 size_t num = 0;
10461 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10462 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10463 transform_elem.numerator =
10464 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10465 transform_elem.denominator =
10466 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10467 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10468 num++;
10469 }
10470 }
10471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10472 colorCorrectTransform)) {
10473 rc = BAD_VALUE;
10474 }
10475 }
10476
10477 cam_trigger_t aecTrigger;
10478 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10479 aecTrigger.trigger_id = -1;
10480 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10481 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10482 aecTrigger.trigger =
10483 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10484 aecTrigger.trigger_id =
10485 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10487 aecTrigger)) {
10488 rc = BAD_VALUE;
10489 }
10490 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10491 aecTrigger.trigger, aecTrigger.trigger_id);
10492 }
10493
10494 /*af_trigger must come with a trigger id*/
10495 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10496 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10497 cam_trigger_t af_trigger;
10498 af_trigger.trigger =
10499 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10500 af_trigger.trigger_id =
10501 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10503 rc = BAD_VALUE;
10504 }
10505 LOGD("AfTrigger: %d AfTriggerID: %d",
10506 af_trigger.trigger, af_trigger.trigger_id);
10507 }
10508
10509 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10510 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10512 rc = BAD_VALUE;
10513 }
10514 }
10515 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10516 cam_edge_application_t edge_application;
10517 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10518 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10519 edge_application.sharpness = 0;
10520 } else {
10521 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10522 }
10523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10524 rc = BAD_VALUE;
10525 }
10526 }
10527
10528 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10529 int32_t respectFlashMode = 1;
10530 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10531 uint8_t fwk_aeMode =
10532 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10533 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10534 respectFlashMode = 0;
10535 LOGH("AE Mode controls flash, ignore android.flash.mode");
10536 }
10537 }
10538 if (respectFlashMode) {
10539 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10540 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10541 LOGH("flash mode after mapping %d", val);
10542 // To check: CAM_INTF_META_FLASH_MODE usage
10543 if (NAME_NOT_FOUND != val) {
10544 uint8_t flashMode = (uint8_t)val;
10545 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10546 rc = BAD_VALUE;
10547 }
10548 }
10549 }
10550 }
10551
10552 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10553 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10555 rc = BAD_VALUE;
10556 }
10557 }
10558
10559 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10560 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10562 flashFiringTime)) {
10563 rc = BAD_VALUE;
10564 }
10565 }
10566
10567 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10568 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10570 hotPixelMode)) {
10571 rc = BAD_VALUE;
10572 }
10573 }
10574
10575 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10576 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10578 lensAperture)) {
10579 rc = BAD_VALUE;
10580 }
10581 }
10582
10583 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10584 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10586 filterDensity)) {
10587 rc = BAD_VALUE;
10588 }
10589 }
10590
10591 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10592 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10594 focalLength)) {
10595 rc = BAD_VALUE;
10596 }
10597 }
10598
10599 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10600 uint8_t optStabMode =
10601 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10602 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10603 optStabMode)) {
10604 rc = BAD_VALUE;
10605 }
10606 }
10607
10608 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10609 uint8_t videoStabMode =
10610 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10611 LOGD("videoStabMode from APP = %d", videoStabMode);
10612 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10613 videoStabMode)) {
10614 rc = BAD_VALUE;
10615 }
10616 }
10617
10618
10619 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10620 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10622 noiseRedMode)) {
10623 rc = BAD_VALUE;
10624 }
10625 }
10626
10627 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10628 float reprocessEffectiveExposureFactor =
10629 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10631 reprocessEffectiveExposureFactor)) {
10632 rc = BAD_VALUE;
10633 }
10634 }
10635
10636 cam_crop_region_t scalerCropRegion;
10637 bool scalerCropSet = false;
10638 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10639 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10640 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10641 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10642 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10643
10644 // Map coordinate system from active array to sensor output.
10645 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10646 scalerCropRegion.width, scalerCropRegion.height);
10647
10648 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10649 scalerCropRegion)) {
10650 rc = BAD_VALUE;
10651 }
10652 scalerCropSet = true;
10653 }
10654
10655 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10656 int64_t sensorExpTime =
10657 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10658 LOGD("setting sensorExpTime %lld", sensorExpTime);
10659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10660 sensorExpTime)) {
10661 rc = BAD_VALUE;
10662 }
10663 }
10664
10665 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10666 int64_t sensorFrameDuration =
10667 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10668 int64_t minFrameDuration = getMinFrameDuration(request);
10669 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10670 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10671 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10672 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10674 sensorFrameDuration)) {
10675 rc = BAD_VALUE;
10676 }
10677 }
10678
10679 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10680 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10681 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10682 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10683 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10684 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10685 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10687 sensorSensitivity)) {
10688 rc = BAD_VALUE;
10689 }
10690 }
10691
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010692#ifndef USE_HAL_3_3
10693 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10694 int32_t ispSensitivity =
10695 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10696 if (ispSensitivity <
10697 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10698 ispSensitivity =
10699 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10700 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10701 }
10702 if (ispSensitivity >
10703 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10704 ispSensitivity =
10705 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10706 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10707 }
10708 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10709 ispSensitivity)) {
10710 rc = BAD_VALUE;
10711 }
10712 }
10713#endif
10714
Thierry Strudel3d639192016-09-09 11:52:26 -070010715 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10716 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10718 rc = BAD_VALUE;
10719 }
10720 }
10721
10722 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10723 uint8_t fwk_facedetectMode =
10724 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10725
10726 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10727 fwk_facedetectMode);
10728
10729 if (NAME_NOT_FOUND != val) {
10730 uint8_t facedetectMode = (uint8_t)val;
10731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10732 facedetectMode)) {
10733 rc = BAD_VALUE;
10734 }
10735 }
10736 }
10737
10738 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10739 uint8_t histogramMode =
10740 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10742 histogramMode)) {
10743 rc = BAD_VALUE;
10744 }
10745 }
10746
10747 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10748 uint8_t sharpnessMapMode =
10749 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10751 sharpnessMapMode)) {
10752 rc = BAD_VALUE;
10753 }
10754 }
10755
10756 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10757 uint8_t tonemapMode =
10758 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10760 rc = BAD_VALUE;
10761 }
10762 }
10763 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10764 /*All tonemap channels will have the same number of points*/
10765 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10766 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10767 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10768 cam_rgb_tonemap_curves tonemapCurves;
10769 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10770 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10771 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10772 tonemapCurves.tonemap_points_cnt,
10773 CAM_MAX_TONEMAP_CURVE_SIZE);
10774 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10775 }
10776
10777 /* ch0 = G*/
10778 size_t point = 0;
10779 cam_tonemap_curve_t tonemapCurveGreen;
10780 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10781 for (size_t j = 0; j < 2; j++) {
10782 tonemapCurveGreen.tonemap_points[i][j] =
10783 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10784 point++;
10785 }
10786 }
10787 tonemapCurves.curves[0] = tonemapCurveGreen;
10788
10789 /* ch 1 = B */
10790 point = 0;
10791 cam_tonemap_curve_t tonemapCurveBlue;
10792 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10793 for (size_t j = 0; j < 2; j++) {
10794 tonemapCurveBlue.tonemap_points[i][j] =
10795 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10796 point++;
10797 }
10798 }
10799 tonemapCurves.curves[1] = tonemapCurveBlue;
10800
10801 /* ch 2 = R */
10802 point = 0;
10803 cam_tonemap_curve_t tonemapCurveRed;
10804 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10805 for (size_t j = 0; j < 2; j++) {
10806 tonemapCurveRed.tonemap_points[i][j] =
10807 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10808 point++;
10809 }
10810 }
10811 tonemapCurves.curves[2] = tonemapCurveRed;
10812
10813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10814 tonemapCurves)) {
10815 rc = BAD_VALUE;
10816 }
10817 }
10818
10819 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10820 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10822 captureIntent)) {
10823 rc = BAD_VALUE;
10824 }
10825 }
10826
10827 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10828 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10830 blackLevelLock)) {
10831 rc = BAD_VALUE;
10832 }
10833 }
10834
10835 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10836 uint8_t lensShadingMapMode =
10837 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10839 lensShadingMapMode)) {
10840 rc = BAD_VALUE;
10841 }
10842 }
10843
10844 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10845 cam_area_t roi;
10846 bool reset = true;
10847 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10848
10849 // Map coordinate system from active array to sensor output.
10850 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10851 roi.rect.height);
10852
10853 if (scalerCropSet) {
10854 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10855 }
10856 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10857 rc = BAD_VALUE;
10858 }
10859 }
10860
10861 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10862 cam_area_t roi;
10863 bool reset = true;
10864 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10865
10866 // Map coordinate system from active array to sensor output.
10867 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10868 roi.rect.height);
10869
10870 if (scalerCropSet) {
10871 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10872 }
10873 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10874 rc = BAD_VALUE;
10875 }
10876 }
10877
10878 // CDS for non-HFR non-video mode
10879 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10880 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10881 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10882 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10883 LOGE("Invalid CDS mode %d!", *fwk_cds);
10884 } else {
10885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10886 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10887 rc = BAD_VALUE;
10888 }
10889 }
10890 }
10891
Thierry Strudel04e026f2016-10-10 11:27:36 -070010892 // Video HDR
10893 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10894 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10895 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10896 rc = setVideoHdrMode(mParameters, vhdr);
10897 if (rc != NO_ERROR) {
10898 LOGE("setVideoHDR is failed");
10899 }
10900 }
10901
10902 //IR
10903 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10904 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10905 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10906 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10907 LOGE("Invalid IR mode %d!", fwk_ir);
10908 } else {
10909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10910 CAM_INTF_META_IR_MODE, fwk_ir)) {
10911 rc = BAD_VALUE;
10912 }
10913 }
10914 }
10915
Thierry Strudel269c81a2016-10-12 12:13:59 -070010916 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10917 float aec_speed;
10918 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10919 LOGD("AEC Speed :%f", aec_speed);
10920 if ( aec_speed < 0 ) {
10921 LOGE("Invalid AEC mode %f!", aec_speed);
10922 } else {
10923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10924 aec_speed)) {
10925 rc = BAD_VALUE;
10926 }
10927 }
10928 }
10929
10930 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10931 float awb_speed;
10932 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10933 LOGD("AWB Speed :%f", awb_speed);
10934 if ( awb_speed < 0 ) {
10935 LOGE("Invalid AWB mode %f!", awb_speed);
10936 } else {
10937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10938 awb_speed)) {
10939 rc = BAD_VALUE;
10940 }
10941 }
10942 }
10943
Thierry Strudel3d639192016-09-09 11:52:26 -070010944 // TNR
10945 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10946 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10947 uint8_t b_TnrRequested = 0;
10948 cam_denoise_param_t tnr;
10949 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10950 tnr.process_plates =
10951 (cam_denoise_process_type_t)frame_settings.find(
10952 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10953 b_TnrRequested = tnr.denoise_enable;
10954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10955 rc = BAD_VALUE;
10956 }
10957 }
10958
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010959 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10960 int32_t* exposure_metering_mode =
10961 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10962 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10963 *exposure_metering_mode)) {
10964 rc = BAD_VALUE;
10965 }
10966 }
10967
Thierry Strudel3d639192016-09-09 11:52:26 -070010968 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10969 int32_t fwk_testPatternMode =
10970 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10971 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10972 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10973
10974 if (NAME_NOT_FOUND != testPatternMode) {
10975 cam_test_pattern_data_t testPatternData;
10976 memset(&testPatternData, 0, sizeof(testPatternData));
10977 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10978 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10979 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10980 int32_t *fwk_testPatternData =
10981 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10982 testPatternData.r = fwk_testPatternData[0];
10983 testPatternData.b = fwk_testPatternData[3];
10984 switch (gCamCapability[mCameraId]->color_arrangement) {
10985 case CAM_FILTER_ARRANGEMENT_RGGB:
10986 case CAM_FILTER_ARRANGEMENT_GRBG:
10987 testPatternData.gr = fwk_testPatternData[1];
10988 testPatternData.gb = fwk_testPatternData[2];
10989 break;
10990 case CAM_FILTER_ARRANGEMENT_GBRG:
10991 case CAM_FILTER_ARRANGEMENT_BGGR:
10992 testPatternData.gr = fwk_testPatternData[2];
10993 testPatternData.gb = fwk_testPatternData[1];
10994 break;
10995 default:
10996 LOGE("color arrangement %d is not supported",
10997 gCamCapability[mCameraId]->color_arrangement);
10998 break;
10999 }
11000 }
11001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11002 testPatternData)) {
11003 rc = BAD_VALUE;
11004 }
11005 } else {
11006 LOGE("Invalid framework sensor test pattern mode %d",
11007 fwk_testPatternMode);
11008 }
11009 }
11010
11011 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11012 size_t count = 0;
11013 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11014 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11015 gps_coords.data.d, gps_coords.count, count);
11016 if (gps_coords.count != count) {
11017 rc = BAD_VALUE;
11018 }
11019 }
11020
11021 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11022 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11023 size_t count = 0;
11024 const char *gps_methods_src = (const char *)
11025 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11026 memset(gps_methods, '\0', sizeof(gps_methods));
11027 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11028 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11029 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11030 if (GPS_PROCESSING_METHOD_SIZE != count) {
11031 rc = BAD_VALUE;
11032 }
11033 }
11034
11035 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11036 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11037 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11038 gps_timestamp)) {
11039 rc = BAD_VALUE;
11040 }
11041 }
11042
11043 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11044 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11045 cam_rotation_info_t rotation_info;
11046 if (orientation == 0) {
11047 rotation_info.rotation = ROTATE_0;
11048 } else if (orientation == 90) {
11049 rotation_info.rotation = ROTATE_90;
11050 } else if (orientation == 180) {
11051 rotation_info.rotation = ROTATE_180;
11052 } else if (orientation == 270) {
11053 rotation_info.rotation = ROTATE_270;
11054 }
11055 rotation_info.streamId = snapshotStreamId;
11056 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11058 rc = BAD_VALUE;
11059 }
11060 }
11061
11062 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11063 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11065 rc = BAD_VALUE;
11066 }
11067 }
11068
11069 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11070 uint32_t thumb_quality = (uint32_t)
11071 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11072 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11073 thumb_quality)) {
11074 rc = BAD_VALUE;
11075 }
11076 }
11077
11078 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11079 cam_dimension_t dim;
11080 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11081 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11082 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11083 rc = BAD_VALUE;
11084 }
11085 }
11086
11087 // Internal metadata
11088 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11089 size_t count = 0;
11090 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11091 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11092 privatedata.data.i32, privatedata.count, count);
11093 if (privatedata.count != count) {
11094 rc = BAD_VALUE;
11095 }
11096 }
11097
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011098 // ISO/Exposure Priority
11099 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11100 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11101 cam_priority_mode_t mode =
11102 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11103 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11104 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11105 use_iso_exp_pty.previewOnly = FALSE;
11106 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11107 use_iso_exp_pty.value = *ptr;
11108
11109 if(CAM_ISO_PRIORITY == mode) {
11110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11111 use_iso_exp_pty)) {
11112 rc = BAD_VALUE;
11113 }
11114 }
11115 else {
11116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11117 use_iso_exp_pty)) {
11118 rc = BAD_VALUE;
11119 }
11120 }
11121 }
11122 }
11123
11124 // Saturation
11125 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11126 int32_t* use_saturation =
11127 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11129 rc = BAD_VALUE;
11130 }
11131 }
11132
Thierry Strudel3d639192016-09-09 11:52:26 -070011133 // EV step
11134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11135 gCamCapability[mCameraId]->exp_compensation_step)) {
11136 rc = BAD_VALUE;
11137 }
11138
11139 // CDS info
11140 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11141 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11142 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11143
11144 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11145 CAM_INTF_META_CDS_DATA, *cdsData)) {
11146 rc = BAD_VALUE;
11147 }
11148 }
11149
Shuzhen Wang19463d72016-03-08 11:09:52 -080011150 // Hybrid AE
11151 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11152 uint8_t *hybrid_ae = (uint8_t *)
11153 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11154
11155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11156 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11157 rc = BAD_VALUE;
11158 }
11159 }
11160
Thierry Strudel3d639192016-09-09 11:52:26 -070011161 return rc;
11162}
11163
11164/*===========================================================================
11165 * FUNCTION : captureResultCb
11166 *
11167 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11168 *
11169 * PARAMETERS :
11170 * @frame : frame information from mm-camera-interface
11171 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11172 * @userdata: userdata
11173 *
11174 * RETURN : NONE
11175 *==========================================================================*/
11176void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11177 camera3_stream_buffer_t *buffer,
11178 uint32_t frame_number, bool isInputBuffer, void *userdata)
11179{
11180 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11181 if (hw == NULL) {
11182 LOGE("Invalid hw %p", hw);
11183 return;
11184 }
11185
11186 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11187 return;
11188}
11189
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011190/*===========================================================================
11191 * FUNCTION : setBufferErrorStatus
11192 *
11193 * DESCRIPTION: Callback handler for channels to report any buffer errors
11194 *
11195 * PARAMETERS :
11196 * @ch : Channel on which buffer error is reported from
11197 * @frame_number : frame number on which buffer error is reported on
11198 * @buffer_status : buffer error status
11199 * @userdata: userdata
11200 *
11201 * RETURN : NONE
11202 *==========================================================================*/
11203void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11204 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11205{
11206 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11207 if (hw == NULL) {
11208 LOGE("Invalid hw %p", hw);
11209 return;
11210 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011211
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011212 hw->setBufferErrorStatus(ch, frame_number, err);
11213 return;
11214}
11215
11216void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11217 uint32_t frameNumber, camera3_buffer_status_t err)
11218{
11219 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11220 pthread_mutex_lock(&mMutex);
11221
11222 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11223 if (req.frame_number != frameNumber)
11224 continue;
11225 for (auto& k : req.mPendingBufferList) {
11226 if(k.stream->priv == ch) {
11227 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11228 }
11229 }
11230 }
11231
11232 pthread_mutex_unlock(&mMutex);
11233 return;
11234}
Thierry Strudel3d639192016-09-09 11:52:26 -070011235/*===========================================================================
11236 * FUNCTION : initialize
11237 *
11238 * DESCRIPTION: Pass framework callback pointers to HAL
11239 *
11240 * PARAMETERS :
11241 *
11242 *
11243 * RETURN : Success : 0
11244 * Failure: -ENODEV
11245 *==========================================================================*/
11246
11247int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11248 const camera3_callback_ops_t *callback_ops)
11249{
11250 LOGD("E");
11251 QCamera3HardwareInterface *hw =
11252 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11253 if (!hw) {
11254 LOGE("NULL camera device");
11255 return -ENODEV;
11256 }
11257
11258 int rc = hw->initialize(callback_ops);
11259 LOGD("X");
11260 return rc;
11261}
11262
11263/*===========================================================================
11264 * FUNCTION : configure_streams
11265 *
11266 * DESCRIPTION:
11267 *
11268 * PARAMETERS :
11269 *
11270 *
11271 * RETURN : Success: 0
11272 * Failure: -EINVAL (if stream configuration is invalid)
11273 * -ENODEV (fatal error)
11274 *==========================================================================*/
11275
11276int QCamera3HardwareInterface::configure_streams(
11277 const struct camera3_device *device,
11278 camera3_stream_configuration_t *stream_list)
11279{
11280 LOGD("E");
11281 QCamera3HardwareInterface *hw =
11282 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11283 if (!hw) {
11284 LOGE("NULL camera device");
11285 return -ENODEV;
11286 }
11287 int rc = hw->configureStreams(stream_list);
11288 LOGD("X");
11289 return rc;
11290}
11291
11292/*===========================================================================
11293 * FUNCTION : construct_default_request_settings
11294 *
11295 * DESCRIPTION: Configure a settings buffer to meet the required use case
11296 *
11297 * PARAMETERS :
11298 *
11299 *
11300 * RETURN : Success: Return valid metadata
11301 * Failure: Return NULL
11302 *==========================================================================*/
11303const camera_metadata_t* QCamera3HardwareInterface::
11304 construct_default_request_settings(const struct camera3_device *device,
11305 int type)
11306{
11307
11308 LOGD("E");
11309 camera_metadata_t* fwk_metadata = NULL;
11310 QCamera3HardwareInterface *hw =
11311 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11312 if (!hw) {
11313 LOGE("NULL camera device");
11314 return NULL;
11315 }
11316
11317 fwk_metadata = hw->translateCapabilityToMetadata(type);
11318
11319 LOGD("X");
11320 return fwk_metadata;
11321}
11322
11323/*===========================================================================
11324 * FUNCTION : process_capture_request
11325 *
11326 * DESCRIPTION:
11327 *
11328 * PARAMETERS :
11329 *
11330 *
11331 * RETURN :
11332 *==========================================================================*/
11333int QCamera3HardwareInterface::process_capture_request(
11334 const struct camera3_device *device,
11335 camera3_capture_request_t *request)
11336{
11337 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011338 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011339 QCamera3HardwareInterface *hw =
11340 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11341 if (!hw) {
11342 LOGE("NULL camera device");
11343 return -EINVAL;
11344 }
11345
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011346 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011347 LOGD("X");
11348 return rc;
11349}
11350
11351/*===========================================================================
11352 * FUNCTION : dump
11353 *
11354 * DESCRIPTION:
11355 *
11356 * PARAMETERS :
11357 *
11358 *
11359 * RETURN :
11360 *==========================================================================*/
11361
11362void QCamera3HardwareInterface::dump(
11363 const struct camera3_device *device, int fd)
11364{
11365 /* Log level property is read when "adb shell dumpsys media.camera" is
11366 called so that the log level can be controlled without restarting
11367 the media server */
11368 getLogLevel();
11369
11370 LOGD("E");
11371 QCamera3HardwareInterface *hw =
11372 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11373 if (!hw) {
11374 LOGE("NULL camera device");
11375 return;
11376 }
11377
11378 hw->dump(fd);
11379 LOGD("X");
11380 return;
11381}
11382
11383/*===========================================================================
11384 * FUNCTION : flush
11385 *
11386 * DESCRIPTION:
11387 *
11388 * PARAMETERS :
11389 *
11390 *
11391 * RETURN :
11392 *==========================================================================*/
11393
11394int QCamera3HardwareInterface::flush(
11395 const struct camera3_device *device)
11396{
11397 int rc;
11398 LOGD("E");
11399 QCamera3HardwareInterface *hw =
11400 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11401 if (!hw) {
11402 LOGE("NULL camera device");
11403 return -EINVAL;
11404 }
11405
11406 pthread_mutex_lock(&hw->mMutex);
11407 // Validate current state
11408 switch (hw->mState) {
11409 case STARTED:
11410 /* valid state */
11411 break;
11412
11413 case ERROR:
11414 pthread_mutex_unlock(&hw->mMutex);
11415 hw->handleCameraDeviceError();
11416 return -ENODEV;
11417
11418 default:
11419 LOGI("Flush returned during state %d", hw->mState);
11420 pthread_mutex_unlock(&hw->mMutex);
11421 return 0;
11422 }
11423 pthread_mutex_unlock(&hw->mMutex);
11424
11425 rc = hw->flush(true /* restart channels */ );
11426 LOGD("X");
11427 return rc;
11428}
11429
11430/*===========================================================================
11431 * FUNCTION : close_camera_device
11432 *
11433 * DESCRIPTION:
11434 *
11435 * PARAMETERS :
11436 *
11437 *
11438 * RETURN :
11439 *==========================================================================*/
11440int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11441{
11442 int ret = NO_ERROR;
11443 QCamera3HardwareInterface *hw =
11444 reinterpret_cast<QCamera3HardwareInterface *>(
11445 reinterpret_cast<camera3_device_t *>(device)->priv);
11446 if (!hw) {
11447 LOGE("NULL camera device");
11448 return BAD_VALUE;
11449 }
11450
11451 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11452 delete hw;
11453 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011454 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011455 return ret;
11456}
11457
11458/*===========================================================================
11459 * FUNCTION : getWaveletDenoiseProcessPlate
11460 *
11461 * DESCRIPTION: query wavelet denoise process plate
11462 *
11463 * PARAMETERS : None
11464 *
11465 * RETURN : WNR prcocess plate value
11466 *==========================================================================*/
11467cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11468{
11469 char prop[PROPERTY_VALUE_MAX];
11470 memset(prop, 0, sizeof(prop));
11471 property_get("persist.denoise.process.plates", prop, "0");
11472 int processPlate = atoi(prop);
11473 switch(processPlate) {
11474 case 0:
11475 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11476 case 1:
11477 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11478 case 2:
11479 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11480 case 3:
11481 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11482 default:
11483 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11484 }
11485}
11486
11487
11488/*===========================================================================
11489 * FUNCTION : getTemporalDenoiseProcessPlate
11490 *
11491 * DESCRIPTION: query temporal denoise process plate
11492 *
11493 * PARAMETERS : None
11494 *
11495 * RETURN : TNR prcocess plate value
11496 *==========================================================================*/
11497cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11498{
11499 char prop[PROPERTY_VALUE_MAX];
11500 memset(prop, 0, sizeof(prop));
11501 property_get("persist.tnr.process.plates", prop, "0");
11502 int processPlate = atoi(prop);
11503 switch(processPlate) {
11504 case 0:
11505 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11506 case 1:
11507 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11508 case 2:
11509 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11510 case 3:
11511 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11512 default:
11513 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11514 }
11515}
11516
11517
11518/*===========================================================================
11519 * FUNCTION : extractSceneMode
11520 *
11521 * DESCRIPTION: Extract scene mode from frameworks set metadata
11522 *
11523 * PARAMETERS :
11524 * @frame_settings: CameraMetadata reference
11525 * @metaMode: ANDROID_CONTORL_MODE
11526 * @hal_metadata: hal metadata structure
11527 *
11528 * RETURN : None
11529 *==========================================================================*/
11530int32_t QCamera3HardwareInterface::extractSceneMode(
11531 const CameraMetadata &frame_settings, uint8_t metaMode,
11532 metadata_buffer_t *hal_metadata)
11533{
11534 int32_t rc = NO_ERROR;
11535
11536 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11537 camera_metadata_ro_entry entry =
11538 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11539 if (0 == entry.count)
11540 return rc;
11541
11542 uint8_t fwk_sceneMode = entry.data.u8[0];
11543
11544 int val = lookupHalName(SCENE_MODES_MAP,
11545 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11546 fwk_sceneMode);
11547 if (NAME_NOT_FOUND != val) {
11548 uint8_t sceneMode = (uint8_t)val;
11549 LOGD("sceneMode: %d", sceneMode);
11550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11551 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11552 rc = BAD_VALUE;
11553 }
11554 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011555
11556 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11557 cam_hdr_param_t hdr_params;
11558 hdr_params.hdr_enable = 1;
11559 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11560 hdr_params.hdr_need_1x = false;
11561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11562 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11563 rc = BAD_VALUE;
11564 }
11565 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011566 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11567 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11568 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11569 LOGD("sceneMode: %d", sceneMode);
11570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11571 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11572 rc = BAD_VALUE;
11573 }
11574 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011575
11576 if (mForceHdrSnapshot) {
11577 cam_hdr_param_t hdr_params;
11578 hdr_params.hdr_enable = 1;
11579 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11580 hdr_params.hdr_need_1x = false;
11581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11582 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11583 rc = BAD_VALUE;
11584 }
11585 }
11586
Thierry Strudel3d639192016-09-09 11:52:26 -070011587 return rc;
11588}
11589
11590/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011591 * FUNCTION : setVideoHdrMode
11592 *
11593 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11594 *
11595 * PARAMETERS :
11596 * @hal_metadata: hal metadata structure
11597 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11598 *
11599 * RETURN : None
11600 *==========================================================================*/
11601int32_t QCamera3HardwareInterface::setVideoHdrMode(
11602 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11603{
11604 int32_t rc = NO_ERROR;
11605 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11606 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11607 rc = BAD_VALUE;
11608 } else {
11609 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11610 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11611 LOGD("Setting HDR mode Off");
11612 vhdr_type = CAM_SENSOR_HDR_OFF;
11613 } else {
11614 char video_hdr_prop[PROPERTY_VALUE_MAX];
11615 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11616 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11617 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11618 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11619 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11620 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11621 LOGD("Setting HDR mode In Sensor");
11622 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11623 }
11624 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11625 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11626 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11627 LOGD("Setting HDR mode Zigzag");
11628 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11629 }
11630 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11631 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11632 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11633 LOGD("Setting HDR mode Staggered");
11634 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11635 }
11636 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11637 LOGD("HDR mode not supported");
11638 rc = BAD_VALUE;
11639 }
11640 }
11641 if(rc == NO_ERROR) {
11642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11643 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11644 rc = BAD_VALUE;
11645 }
11646 }
11647 }
11648 return rc;
11649}
11650
11651/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011652 * FUNCTION : needRotationReprocess
11653 *
11654 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11655 *
11656 * PARAMETERS : none
11657 *
11658 * RETURN : true: needed
11659 * false: no need
11660 *==========================================================================*/
11661bool QCamera3HardwareInterface::needRotationReprocess()
11662{
11663 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11664 // current rotation is not zero, and pp has the capability to process rotation
11665 LOGH("need do reprocess for rotation");
11666 return true;
11667 }
11668
11669 return false;
11670}
11671
11672/*===========================================================================
11673 * FUNCTION : needReprocess
11674 *
11675 * DESCRIPTION: if reprocess in needed
11676 *
11677 * PARAMETERS : none
11678 *
11679 * RETURN : true: needed
11680 * false: no need
11681 *==========================================================================*/
11682bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11683{
11684 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11685 // TODO: add for ZSL HDR later
11686 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11687 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11688 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11689 return true;
11690 } else {
11691 LOGH("already post processed frame");
11692 return false;
11693 }
11694 }
11695 return needRotationReprocess();
11696}
11697
11698/*===========================================================================
11699 * FUNCTION : needJpegExifRotation
11700 *
11701 * DESCRIPTION: if rotation from jpeg is needed
11702 *
11703 * PARAMETERS : none
11704 *
11705 * RETURN : true: needed
11706 * false: no need
11707 *==========================================================================*/
11708bool QCamera3HardwareInterface::needJpegExifRotation()
11709{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011710 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011711 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11712 LOGD("Need use Jpeg EXIF Rotation");
11713 return true;
11714 }
11715 return false;
11716}
11717
11718/*===========================================================================
11719 * FUNCTION : addOfflineReprocChannel
11720 *
11721 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11722 * coming from input channel
11723 *
11724 * PARAMETERS :
11725 * @config : reprocess configuration
11726 * @inputChHandle : pointer to the input (source) channel
11727 *
11728 *
11729 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11730 *==========================================================================*/
11731QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11732 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11733{
11734 int32_t rc = NO_ERROR;
11735 QCamera3ReprocessChannel *pChannel = NULL;
11736
11737 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011738 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11739 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011740 if (NULL == pChannel) {
11741 LOGE("no mem for reprocess channel");
11742 return NULL;
11743 }
11744
11745 rc = pChannel->initialize(IS_TYPE_NONE);
11746 if (rc != NO_ERROR) {
11747 LOGE("init reprocess channel failed, ret = %d", rc);
11748 delete pChannel;
11749 return NULL;
11750 }
11751
11752 // pp feature config
11753 cam_pp_feature_config_t pp_config;
11754 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11755
11756 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11757 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11758 & CAM_QCOM_FEATURE_DSDN) {
11759 //Use CPP CDS incase h/w supports it.
11760 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11761 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11762 }
11763 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11764 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11765 }
11766
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011767 if (config.hdr_param.hdr_enable) {
11768 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11769 pp_config.hdr_param = config.hdr_param;
11770 }
11771
11772 if (mForceHdrSnapshot) {
11773 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11774 pp_config.hdr_param.hdr_enable = 1;
11775 pp_config.hdr_param.hdr_need_1x = 0;
11776 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11777 }
11778
Thierry Strudel3d639192016-09-09 11:52:26 -070011779 rc = pChannel->addReprocStreamsFromSource(pp_config,
11780 config,
11781 IS_TYPE_NONE,
11782 mMetadataChannel);
11783
11784 if (rc != NO_ERROR) {
11785 delete pChannel;
11786 return NULL;
11787 }
11788 return pChannel;
11789}
11790
11791/*===========================================================================
11792 * FUNCTION : getMobicatMask
11793 *
11794 * DESCRIPTION: returns mobicat mask
11795 *
11796 * PARAMETERS : none
11797 *
11798 * RETURN : mobicat mask
11799 *
11800 *==========================================================================*/
11801uint8_t QCamera3HardwareInterface::getMobicatMask()
11802{
11803 return m_MobicatMask;
11804}
11805
11806/*===========================================================================
11807 * FUNCTION : setMobicat
11808 *
11809 * DESCRIPTION: set Mobicat on/off.
11810 *
11811 * PARAMETERS :
11812 * @params : none
11813 *
11814 * RETURN : int32_t type of status
11815 * NO_ERROR -- success
11816 * none-zero failure code
11817 *==========================================================================*/
11818int32_t QCamera3HardwareInterface::setMobicat()
11819{
11820 char value [PROPERTY_VALUE_MAX];
11821 property_get("persist.camera.mobicat", value, "0");
11822 int32_t ret = NO_ERROR;
11823 uint8_t enableMobi = (uint8_t)atoi(value);
11824
11825 if (enableMobi) {
11826 tune_cmd_t tune_cmd;
11827 tune_cmd.type = SET_RELOAD_CHROMATIX;
11828 tune_cmd.module = MODULE_ALL;
11829 tune_cmd.value = TRUE;
11830 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11831 CAM_INTF_PARM_SET_VFE_COMMAND,
11832 tune_cmd);
11833
11834 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11835 CAM_INTF_PARM_SET_PP_COMMAND,
11836 tune_cmd);
11837 }
11838 m_MobicatMask = enableMobi;
11839
11840 return ret;
11841}
11842
11843/*===========================================================================
11844* FUNCTION : getLogLevel
11845*
11846* DESCRIPTION: Reads the log level property into a variable
11847*
11848* PARAMETERS :
11849* None
11850*
11851* RETURN :
11852* None
11853*==========================================================================*/
11854void QCamera3HardwareInterface::getLogLevel()
11855{
11856 char prop[PROPERTY_VALUE_MAX];
11857 uint32_t globalLogLevel = 0;
11858
11859 property_get("persist.camera.hal.debug", prop, "0");
11860 int val = atoi(prop);
11861 if (0 <= val) {
11862 gCamHal3LogLevel = (uint32_t)val;
11863 }
11864
Thierry Strudel9ec39c62016-12-28 11:30:05 -080011865 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070011866 gKpiDebugLevel = atoi(prop);
11867
11868 property_get("persist.camera.global.debug", prop, "0");
11869 val = atoi(prop);
11870 if (0 <= val) {
11871 globalLogLevel = (uint32_t)val;
11872 }
11873
11874 /* Highest log level among hal.logs and global.logs is selected */
11875 if (gCamHal3LogLevel < globalLogLevel)
11876 gCamHal3LogLevel = globalLogLevel;
11877
11878 return;
11879}
11880
11881/*===========================================================================
11882 * FUNCTION : validateStreamRotations
11883 *
11884 * DESCRIPTION: Check if the rotations requested are supported
11885 *
11886 * PARAMETERS :
11887 * @stream_list : streams to be configured
11888 *
11889 * RETURN : NO_ERROR on success
11890 * -EINVAL on failure
11891 *
11892 *==========================================================================*/
11893int QCamera3HardwareInterface::validateStreamRotations(
11894 camera3_stream_configuration_t *streamList)
11895{
11896 int rc = NO_ERROR;
11897
11898 /*
11899 * Loop through all streams requested in configuration
11900 * Check if unsupported rotations have been requested on any of them
11901 */
11902 for (size_t j = 0; j < streamList->num_streams; j++){
11903 camera3_stream_t *newStream = streamList->streams[j];
11904
11905 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11906 bool isImplDef = (newStream->format ==
11907 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11908 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11909 isImplDef);
11910
11911 if (isRotated && (!isImplDef || isZsl)) {
11912 LOGE("Error: Unsupported rotation of %d requested for stream"
11913 "type:%d and stream format:%d",
11914 newStream->rotation, newStream->stream_type,
11915 newStream->format);
11916 rc = -EINVAL;
11917 break;
11918 }
11919 }
11920
11921 return rc;
11922}
11923
11924/*===========================================================================
11925* FUNCTION : getFlashInfo
11926*
11927* DESCRIPTION: Retrieve information about whether the device has a flash.
11928*
11929* PARAMETERS :
11930* @cameraId : Camera id to query
11931* @hasFlash : Boolean indicating whether there is a flash device
11932* associated with given camera
11933* @flashNode : If a flash device exists, this will be its device node.
11934*
11935* RETURN :
11936* None
11937*==========================================================================*/
11938void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11939 bool& hasFlash,
11940 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11941{
11942 cam_capability_t* camCapability = gCamCapability[cameraId];
11943 if (NULL == camCapability) {
11944 hasFlash = false;
11945 flashNode[0] = '\0';
11946 } else {
11947 hasFlash = camCapability->flash_available;
11948 strlcpy(flashNode,
11949 (char*)camCapability->flash_dev_name,
11950 QCAMERA_MAX_FILEPATH_LENGTH);
11951 }
11952}
11953
11954/*===========================================================================
11955* FUNCTION : getEepromVersionInfo
11956*
11957* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11958*
11959* PARAMETERS : None
11960*
11961* RETURN : string describing EEPROM version
11962* "\0" if no such info available
11963*==========================================================================*/
11964const char *QCamera3HardwareInterface::getEepromVersionInfo()
11965{
11966 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11967}
11968
11969/*===========================================================================
11970* FUNCTION : getLdafCalib
11971*
11972* DESCRIPTION: Retrieve Laser AF calibration data
11973*
11974* PARAMETERS : None
11975*
11976* RETURN : Two uint32_t describing laser AF calibration data
11977* NULL if none is available.
11978*==========================================================================*/
11979const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11980{
11981 if (mLdafCalibExist) {
11982 return &mLdafCalib[0];
11983 } else {
11984 return NULL;
11985 }
11986}
11987
11988/*===========================================================================
11989 * FUNCTION : dynamicUpdateMetaStreamInfo
11990 *
11991 * DESCRIPTION: This function:
11992 * (1) stops all the channels
11993 * (2) returns error on pending requests and buffers
11994 * (3) sends metastream_info in setparams
11995 * (4) starts all channels
11996 * This is useful when sensor has to be restarted to apply any
11997 * settings such as frame rate from a different sensor mode
11998 *
11999 * PARAMETERS : None
12000 *
12001 * RETURN : NO_ERROR on success
12002 * Error codes on failure
12003 *
12004 *==========================================================================*/
12005int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12006{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012007 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012008 int rc = NO_ERROR;
12009
12010 LOGD("E");
12011
12012 rc = stopAllChannels();
12013 if (rc < 0) {
12014 LOGE("stopAllChannels failed");
12015 return rc;
12016 }
12017
12018 rc = notifyErrorForPendingRequests();
12019 if (rc < 0) {
12020 LOGE("notifyErrorForPendingRequests failed");
12021 return rc;
12022 }
12023
12024 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12025 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12026 "Format:%d",
12027 mStreamConfigInfo.type[i],
12028 mStreamConfigInfo.stream_sizes[i].width,
12029 mStreamConfigInfo.stream_sizes[i].height,
12030 mStreamConfigInfo.postprocess_mask[i],
12031 mStreamConfigInfo.format[i]);
12032 }
12033
12034 /* Send meta stream info once again so that ISP can start */
12035 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12036 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12037 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12038 mParameters);
12039 if (rc < 0) {
12040 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12041 }
12042
12043 rc = startAllChannels();
12044 if (rc < 0) {
12045 LOGE("startAllChannels failed");
12046 return rc;
12047 }
12048
12049 LOGD("X");
12050 return rc;
12051}
12052
12053/*===========================================================================
12054 * FUNCTION : stopAllChannels
12055 *
12056 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12057 *
12058 * PARAMETERS : None
12059 *
12060 * RETURN : NO_ERROR on success
12061 * Error codes on failure
12062 *
12063 *==========================================================================*/
12064int32_t QCamera3HardwareInterface::stopAllChannels()
12065{
12066 int32_t rc = NO_ERROR;
12067
12068 LOGD("Stopping all channels");
12069 // Stop the Streams/Channels
12070 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12071 it != mStreamInfo.end(); it++) {
12072 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12073 if (channel) {
12074 channel->stop();
12075 }
12076 (*it)->status = INVALID;
12077 }
12078
12079 if (mSupportChannel) {
12080 mSupportChannel->stop();
12081 }
12082 if (mAnalysisChannel) {
12083 mAnalysisChannel->stop();
12084 }
12085 if (mRawDumpChannel) {
12086 mRawDumpChannel->stop();
12087 }
12088 if (mMetadataChannel) {
12089 /* If content of mStreamInfo is not 0, there is metadata stream */
12090 mMetadataChannel->stop();
12091 }
12092
12093 LOGD("All channels stopped");
12094 return rc;
12095}
12096
12097/*===========================================================================
12098 * FUNCTION : startAllChannels
12099 *
12100 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12101 *
12102 * PARAMETERS : None
12103 *
12104 * RETURN : NO_ERROR on success
12105 * Error codes on failure
12106 *
12107 *==========================================================================*/
12108int32_t QCamera3HardwareInterface::startAllChannels()
12109{
12110 int32_t rc = NO_ERROR;
12111
12112 LOGD("Start all channels ");
12113 // Start the Streams/Channels
12114 if (mMetadataChannel) {
12115 /* If content of mStreamInfo is not 0, there is metadata stream */
12116 rc = mMetadataChannel->start();
12117 if (rc < 0) {
12118 LOGE("META channel start failed");
12119 return rc;
12120 }
12121 }
12122 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12123 it != mStreamInfo.end(); it++) {
12124 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12125 if (channel) {
12126 rc = channel->start();
12127 if (rc < 0) {
12128 LOGE("channel start failed");
12129 return rc;
12130 }
12131 }
12132 }
12133 if (mAnalysisChannel) {
12134 mAnalysisChannel->start();
12135 }
12136 if (mSupportChannel) {
12137 rc = mSupportChannel->start();
12138 if (rc < 0) {
12139 LOGE("Support channel start failed");
12140 return rc;
12141 }
12142 }
12143 if (mRawDumpChannel) {
12144 rc = mRawDumpChannel->start();
12145 if (rc < 0) {
12146 LOGE("RAW dump channel start failed");
12147 return rc;
12148 }
12149 }
12150
12151 LOGD("All channels started");
12152 return rc;
12153}
12154
12155/*===========================================================================
12156 * FUNCTION : notifyErrorForPendingRequests
12157 *
12158 * DESCRIPTION: This function sends error for all the pending requests/buffers
12159 *
12160 * PARAMETERS : None
12161 *
12162 * RETURN : Error codes
12163 * NO_ERROR on success
12164 *
12165 *==========================================================================*/
12166int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12167{
12168 int32_t rc = NO_ERROR;
12169 unsigned int frameNum = 0;
12170 camera3_capture_result_t result;
12171 camera3_stream_buffer_t *pStream_Buf = NULL;
12172
12173 memset(&result, 0, sizeof(camera3_capture_result_t));
12174
12175 if (mPendingRequestsList.size() > 0) {
12176 pendingRequestIterator i = mPendingRequestsList.begin();
12177 frameNum = i->frame_number;
12178 } else {
12179 /* There might still be pending buffers even though there are
12180 no pending requests. Setting the frameNum to MAX so that
12181 all the buffers with smaller frame numbers are returned */
12182 frameNum = UINT_MAX;
12183 }
12184
12185 LOGH("Oldest frame num on mPendingRequestsList = %u",
12186 frameNum);
12187
12188 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12189 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12190
12191 if (req->frame_number < frameNum) {
12192 // Send Error notify to frameworks for each buffer for which
12193 // metadata buffer is already sent
12194 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12195 req->frame_number, req->mPendingBufferList.size());
12196
12197 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12198 if (NULL == pStream_Buf) {
12199 LOGE("No memory for pending buffers array");
12200 return NO_MEMORY;
12201 }
12202 memset(pStream_Buf, 0,
12203 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12204 result.result = NULL;
12205 result.frame_number = req->frame_number;
12206 result.num_output_buffers = req->mPendingBufferList.size();
12207 result.output_buffers = pStream_Buf;
12208
12209 size_t index = 0;
12210 for (auto info = req->mPendingBufferList.begin();
12211 info != req->mPendingBufferList.end(); ) {
12212
12213 camera3_notify_msg_t notify_msg;
12214 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12215 notify_msg.type = CAMERA3_MSG_ERROR;
12216 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12217 notify_msg.message.error.error_stream = info->stream;
12218 notify_msg.message.error.frame_number = req->frame_number;
12219 pStream_Buf[index].acquire_fence = -1;
12220 pStream_Buf[index].release_fence = -1;
12221 pStream_Buf[index].buffer = info->buffer;
12222 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12223 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012224 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012225 index++;
12226 // Remove buffer from list
12227 info = req->mPendingBufferList.erase(info);
12228 }
12229
12230 // Remove this request from Map
12231 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12232 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12233 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12234
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012235 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012236
12237 delete [] pStream_Buf;
12238 } else {
12239
12240 // Go through the pending requests info and send error request to framework
12241 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12242
12243 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12244
12245 // Send error notify to frameworks
12246 camera3_notify_msg_t notify_msg;
12247 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12248 notify_msg.type = CAMERA3_MSG_ERROR;
12249 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12250 notify_msg.message.error.error_stream = NULL;
12251 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012252 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012253
12254 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12255 if (NULL == pStream_Buf) {
12256 LOGE("No memory for pending buffers array");
12257 return NO_MEMORY;
12258 }
12259 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12260
12261 result.result = NULL;
12262 result.frame_number = req->frame_number;
12263 result.input_buffer = i->input_buffer;
12264 result.num_output_buffers = req->mPendingBufferList.size();
12265 result.output_buffers = pStream_Buf;
12266
12267 size_t index = 0;
12268 for (auto info = req->mPendingBufferList.begin();
12269 info != req->mPendingBufferList.end(); ) {
12270 pStream_Buf[index].acquire_fence = -1;
12271 pStream_Buf[index].release_fence = -1;
12272 pStream_Buf[index].buffer = info->buffer;
12273 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12274 pStream_Buf[index].stream = info->stream;
12275 index++;
12276 // Remove buffer from list
12277 info = req->mPendingBufferList.erase(info);
12278 }
12279
12280 // Remove this request from Map
12281 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12282 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12283 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12284
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012285 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012286 delete [] pStream_Buf;
12287 i = erasePendingRequest(i);
12288 }
12289 }
12290
12291 /* Reset pending frame Drop list and requests list */
12292 mPendingFrameDropList.clear();
12293
12294 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12295 req.mPendingBufferList.clear();
12296 }
12297 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12298 mPendingReprocessResultList.clear();
12299 LOGH("Cleared all the pending buffers ");
12300
12301 return rc;
12302}
12303
12304bool QCamera3HardwareInterface::isOnEncoder(
12305 const cam_dimension_t max_viewfinder_size,
12306 uint32_t width, uint32_t height)
12307{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012308 return ((width > (uint32_t)max_viewfinder_size.width) ||
12309 (height > (uint32_t)max_viewfinder_size.height) ||
12310 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12311 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012312}
12313
12314/*===========================================================================
12315 * FUNCTION : setBundleInfo
12316 *
12317 * DESCRIPTION: Set bundle info for all streams that are bundle.
12318 *
12319 * PARAMETERS : None
12320 *
12321 * RETURN : NO_ERROR on success
12322 * Error codes on failure
12323 *==========================================================================*/
12324int32_t QCamera3HardwareInterface::setBundleInfo()
12325{
12326 int32_t rc = NO_ERROR;
12327
12328 if (mChannelHandle) {
12329 cam_bundle_config_t bundleInfo;
12330 memset(&bundleInfo, 0, sizeof(bundleInfo));
12331 rc = mCameraHandle->ops->get_bundle_info(
12332 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12333 if (rc != NO_ERROR) {
12334 LOGE("get_bundle_info failed");
12335 return rc;
12336 }
12337 if (mAnalysisChannel) {
12338 mAnalysisChannel->setBundleInfo(bundleInfo);
12339 }
12340 if (mSupportChannel) {
12341 mSupportChannel->setBundleInfo(bundleInfo);
12342 }
12343 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12344 it != mStreamInfo.end(); it++) {
12345 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12346 channel->setBundleInfo(bundleInfo);
12347 }
12348 if (mRawDumpChannel) {
12349 mRawDumpChannel->setBundleInfo(bundleInfo);
12350 }
12351 }
12352
12353 return rc;
12354}
12355
12356/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012357 * FUNCTION : setInstantAEC
12358 *
12359 * DESCRIPTION: Set Instant AEC related params.
12360 *
12361 * PARAMETERS :
12362 * @meta: CameraMetadata reference
12363 *
12364 * RETURN : NO_ERROR on success
12365 * Error codes on failure
12366 *==========================================================================*/
12367int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12368{
12369 int32_t rc = NO_ERROR;
12370 uint8_t val = 0;
12371 char prop[PROPERTY_VALUE_MAX];
12372
12373 // First try to configure instant AEC from framework metadata
12374 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12375 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12376 }
12377
12378 // If framework did not set this value, try to read from set prop.
12379 if (val == 0) {
12380 memset(prop, 0, sizeof(prop));
12381 property_get("persist.camera.instant.aec", prop, "0");
12382 val = (uint8_t)atoi(prop);
12383 }
12384
12385 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12386 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12387 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12388 mInstantAEC = val;
12389 mInstantAECSettledFrameNumber = 0;
12390 mInstantAecFrameIdxCount = 0;
12391 LOGH("instantAEC value set %d",val);
12392 if (mInstantAEC) {
12393 memset(prop, 0, sizeof(prop));
12394 property_get("persist.camera.ae.instant.bound", prop, "10");
12395 int32_t aec_frame_skip_cnt = atoi(prop);
12396 if (aec_frame_skip_cnt >= 0) {
12397 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12398 } else {
12399 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12400 rc = BAD_VALUE;
12401 }
12402 }
12403 } else {
12404 LOGE("Bad instant aec value set %d", val);
12405 rc = BAD_VALUE;
12406 }
12407 return rc;
12408}
12409
12410/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012411 * FUNCTION : get_num_overall_buffers
12412 *
12413 * DESCRIPTION: Estimate number of pending buffers across all requests.
12414 *
12415 * PARAMETERS : None
12416 *
12417 * RETURN : Number of overall pending buffers
12418 *
12419 *==========================================================================*/
12420uint32_t PendingBuffersMap::get_num_overall_buffers()
12421{
12422 uint32_t sum_buffers = 0;
12423 for (auto &req : mPendingBuffersInRequest) {
12424 sum_buffers += req.mPendingBufferList.size();
12425 }
12426 return sum_buffers;
12427}
12428
12429/*===========================================================================
12430 * FUNCTION : removeBuf
12431 *
12432 * DESCRIPTION: Remove a matching buffer from tracker.
12433 *
12434 * PARAMETERS : @buffer: image buffer for the callback
12435 *
12436 * RETURN : None
12437 *
12438 *==========================================================================*/
12439void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12440{
12441 bool buffer_found = false;
12442 for (auto req = mPendingBuffersInRequest.begin();
12443 req != mPendingBuffersInRequest.end(); req++) {
12444 for (auto k = req->mPendingBufferList.begin();
12445 k != req->mPendingBufferList.end(); k++ ) {
12446 if (k->buffer == buffer) {
12447 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12448 req->frame_number, buffer);
12449 k = req->mPendingBufferList.erase(k);
12450 if (req->mPendingBufferList.empty()) {
12451 // Remove this request from Map
12452 req = mPendingBuffersInRequest.erase(req);
12453 }
12454 buffer_found = true;
12455 break;
12456 }
12457 }
12458 if (buffer_found) {
12459 break;
12460 }
12461 }
12462 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12463 get_num_overall_buffers());
12464}
12465
12466/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012467 * FUNCTION : getBufErrStatus
12468 *
12469 * DESCRIPTION: get buffer error status
12470 *
12471 * PARAMETERS : @buffer: buffer handle
12472 *
12473 * RETURN : Error status
12474 *
12475 *==========================================================================*/
12476int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12477{
12478 for (auto& req : mPendingBuffersInRequest) {
12479 for (auto& k : req.mPendingBufferList) {
12480 if (k.buffer == buffer)
12481 return k.bufStatus;
12482 }
12483 }
12484 return CAMERA3_BUFFER_STATUS_OK;
12485}
12486
12487/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012488 * FUNCTION : setPAAFSupport
12489 *
12490 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12491 * feature mask according to stream type and filter
12492 * arrangement
12493 *
12494 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12495 * @stream_type: stream type
12496 * @filter_arrangement: filter arrangement
12497 *
12498 * RETURN : None
12499 *==========================================================================*/
12500void QCamera3HardwareInterface::setPAAFSupport(
12501 cam_feature_mask_t& feature_mask,
12502 cam_stream_type_t stream_type,
12503 cam_color_filter_arrangement_t filter_arrangement)
12504{
12505 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12506 feature_mask, stream_type, filter_arrangement);
12507
12508 switch (filter_arrangement) {
12509 case CAM_FILTER_ARRANGEMENT_RGGB:
12510 case CAM_FILTER_ARRANGEMENT_GRBG:
12511 case CAM_FILTER_ARRANGEMENT_GBRG:
12512 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012513 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12514 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012515 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12516 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12517 }
12518 break;
12519 case CAM_FILTER_ARRANGEMENT_Y:
12520 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12521 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12522 }
12523 break;
12524 default:
12525 break;
12526 }
12527}
12528
12529/*===========================================================================
12530* FUNCTION : getSensorMountAngle
12531*
12532* DESCRIPTION: Retrieve sensor mount angle
12533*
12534* PARAMETERS : None
12535*
12536* RETURN : sensor mount angle in uint32_t
12537*==========================================================================*/
12538uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12539{
12540 return gCamCapability[mCameraId]->sensor_mount_angle;
12541}
12542
12543/*===========================================================================
12544* FUNCTION : getRelatedCalibrationData
12545*
12546* DESCRIPTION: Retrieve related system calibration data
12547*
12548* PARAMETERS : None
12549*
12550* RETURN : Pointer of related system calibration data
12551*==========================================================================*/
12552const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12553{
12554 return (const cam_related_system_calibration_data_t *)
12555 &(gCamCapability[mCameraId]->related_cam_calibration);
12556}
12557}; //end namespace qcamera