blob: f821541f9ffa96da6e03388d764f7ad2a5d944bb [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080047#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070048
49// Display dependencies
50#include "qdMetaData.h"
51
52// Camera dependencies
53#include "android/QCamera3External.h"
54#include "util/QCameraFlash.h"
55#include "QCamera3HWI.h"
56#include "QCamera3VendorTags.h"
57#include "QCameraTrace.h"
58
59extern "C" {
60#include "mm_camera_dbg.h"
61}
62
63using namespace android;
64
65namespace qcamera {
66
67#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
68
69#define EMPTY_PIPELINE_DELAY 2
70#define PARTIAL_RESULT_COUNT 2
71#define FRAME_SKIP_DELAY 0
72
73#define MAX_VALUE_8BIT ((1<<8)-1)
74#define MAX_VALUE_10BIT ((1<<10)-1)
75#define MAX_VALUE_12BIT ((1<<12)-1)
76
77#define VIDEO_4K_WIDTH 3840
78#define VIDEO_4K_HEIGHT 2160
79
80#define MAX_EIS_WIDTH 1920
81#define MAX_EIS_HEIGHT 1080
82
83#define MAX_RAW_STREAMS 1
84#define MAX_STALLING_STREAMS 1
85#define MAX_PROCESSED_STREAMS 3
86/* Batch mode is enabled only if FPS set is equal to or greater than this */
87#define MIN_FPS_FOR_BATCH_MODE (120)
88#define PREVIEW_FPS_FOR_HFR (30)
89#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080090#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070091#define MAX_HFR_BATCH_SIZE (8)
92#define REGIONS_TUPLE_COUNT 5
93#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070094// Set a threshold for detection of missing buffers //seconds
95#define MISSING_REQUEST_BUF_TIMEOUT 3
96#define FLUSH_TIMEOUT 3
97#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
98
99#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
100 CAM_QCOM_FEATURE_CROP |\
101 CAM_QCOM_FEATURE_ROTATION |\
102 CAM_QCOM_FEATURE_SHARPNESS |\
103 CAM_QCOM_FEATURE_SCALE |\
104 CAM_QCOM_FEATURE_CAC |\
105 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700106/* Per configuration size for static metadata length*/
107#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700108
109#define TIMEOUT_NEVER -1
110
Thierry Strudel04e026f2016-10-10 11:27:36 -0700111/* Face landmarks indices */
112#define LEFT_EYE_X 0
113#define LEFT_EYE_Y 1
114#define RIGHT_EYE_X 2
115#define RIGHT_EYE_Y 3
116#define MOUTH_X 4
117#define MOUTH_Y 5
118#define TOTAL_LANDMARK_INDICES 6
119
Thierry Strudel3d639192016-09-09 11:52:26 -0700120cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
121const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
122extern pthread_mutex_t gCamLock;
123volatile uint32_t gCamHal3LogLevel = 1;
124extern uint8_t gNumCameraSessions;
125
126const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
127 {"On", CAM_CDS_MODE_ON},
128 {"Off", CAM_CDS_MODE_OFF},
129 {"Auto",CAM_CDS_MODE_AUTO}
130};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700131const QCamera3HardwareInterface::QCameraMap<
132 camera_metadata_enum_android_video_hdr_mode_t,
133 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
134 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
135 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
136};
137
138
139const QCamera3HardwareInterface::QCameraMap<
140 camera_metadata_enum_android_ir_mode_t,
141 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
142 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
143 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
144 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
145};
Thierry Strudel3d639192016-09-09 11:52:26 -0700146
147const QCamera3HardwareInterface::QCameraMap<
148 camera_metadata_enum_android_control_effect_mode_t,
149 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
150 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
151 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
152 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
153 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
154 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
155 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
156 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
157 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
158 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
159};
160
161const QCamera3HardwareInterface::QCameraMap<
162 camera_metadata_enum_android_control_awb_mode_t,
163 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
164 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
165 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
166 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
167 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
168 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
169 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
170 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
171 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
172 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
173};
174
175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_control_scene_mode_t,
177 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
178 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
179 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
180 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
181 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
182 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
183 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
184 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
185 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
186 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
187 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
188 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
189 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
190 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
191 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
192 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
193 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
194};
195
196const QCamera3HardwareInterface::QCameraMap<
197 camera_metadata_enum_android_control_af_mode_t,
198 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
199 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
200 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
201 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
202 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
203 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
204 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
205 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_color_correction_aberration_mode_t,
210 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
211 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
212 CAM_COLOR_CORRECTION_ABERRATION_OFF },
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
214 CAM_COLOR_CORRECTION_ABERRATION_FAST },
215 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
216 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_ae_antibanding_mode_t,
221 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
222 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229 camera_metadata_enum_android_control_ae_mode_t,
230 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
231 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
232 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
233 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
234 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
235 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_flash_mode_t,
240 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
241 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
242 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
243 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_statistics_face_detect_mode_t,
248 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
249 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
250 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
252};
253
254const QCamera3HardwareInterface::QCameraMap<
255 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
256 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
257 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
258 CAM_FOCUS_UNCALIBRATED },
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
260 CAM_FOCUS_APPROXIMATE },
261 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
262 CAM_FOCUS_CALIBRATED }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_lens_state_t,
267 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
268 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
269 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
270};
271
272const int32_t available_thumbnail_sizes[] = {0, 0,
273 176, 144,
274 240, 144,
275 256, 144,
276 240, 160,
277 256, 154,
278 240, 240,
279 320, 240};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_sensor_test_pattern_mode_t,
283 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
284 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
290};
291
292/* Since there is no mapping for all the options some Android enum are not listed.
293 * Also, the order in this list is important because while mapping from HAL to Android it will
294 * traverse from lower to higher index which means that for HAL values that are map to different
295 * Android values, the traverse logic will select the first one found.
296 */
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_sensor_reference_illuminant1_t,
299 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
300 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
316};
317
318const QCamera3HardwareInterface::QCameraMap<
319 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
320 { 60, CAM_HFR_MODE_60FPS},
321 { 90, CAM_HFR_MODE_90FPS},
322 { 120, CAM_HFR_MODE_120FPS},
323 { 150, CAM_HFR_MODE_150FPS},
324 { 180, CAM_HFR_MODE_180FPS},
325 { 210, CAM_HFR_MODE_210FPS},
326 { 240, CAM_HFR_MODE_240FPS},
327 { 480, CAM_HFR_MODE_480FPS},
328};
329
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700330const QCamera3HardwareInterface::QCameraMap<
331 qcamera3_ext_instant_aec_mode_t,
332 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
333 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
334 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
335 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
336};
Thierry Strudel3d639192016-09-09 11:52:26 -0700337camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
338 .initialize = QCamera3HardwareInterface::initialize,
339 .configure_streams = QCamera3HardwareInterface::configure_streams,
340 .register_stream_buffers = NULL,
341 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
342 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
343 .get_metadata_vendor_tag_ops = NULL,
344 .dump = QCamera3HardwareInterface::dump,
345 .flush = QCamera3HardwareInterface::flush,
346 .reserved = {0},
347};
348
349// initialise to some default value
350uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
351
352/*===========================================================================
353 * FUNCTION : QCamera3HardwareInterface
354 *
355 * DESCRIPTION: constructor of QCamera3HardwareInterface
356 *
357 * PARAMETERS :
358 * @cameraId : camera ID
359 *
360 * RETURN : none
361 *==========================================================================*/
362QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
363 const camera_module_callbacks_t *callbacks)
364 : mCameraId(cameraId),
365 mCameraHandle(NULL),
366 mCameraInitialized(false),
367 mCallbackOps(NULL),
368 mMetadataChannel(NULL),
369 mPictureChannel(NULL),
370 mRawChannel(NULL),
371 mSupportChannel(NULL),
372 mAnalysisChannel(NULL),
373 mRawDumpChannel(NULL),
374 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800375 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700376 mCommon(),
377 mChannelHandle(0),
378 mFirstConfiguration(true),
379 mFlush(false),
380 mFlushPerf(false),
381 mParamHeap(NULL),
382 mParameters(NULL),
383 mPrevParameters(NULL),
384 m_bIsVideo(false),
385 m_bIs4KVideo(false),
386 m_bEisSupportedSize(false),
387 m_bEisEnable(false),
388 m_MobicatMask(0),
389 mMinProcessedFrameDuration(0),
390 mMinJpegFrameDuration(0),
391 mMinRawFrameDuration(0),
392 mMetaFrameCount(0U),
393 mUpdateDebugLevel(false),
394 mCallbacks(callbacks),
395 mCaptureIntent(0),
396 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800397 /* DevCamDebug metadata internal m control*/
398 mDevCamDebugMetaEnable(0),
399 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700400 mBatchSize(0),
401 mToBeQueuedVidBufs(0),
402 mHFRVideoFps(DEFAULT_VIDEO_FPS),
403 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
404 mFirstFrameNumberInBatch(0),
405 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800406 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700407 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
408 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700409 mInstantAEC(false),
410 mResetInstantAEC(false),
411 mInstantAECSettledFrameNumber(0),
412 mAecSkipDisplayFrameBound(0),
413 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700414 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mLastCustIntentFrmNum(-1),
416 mState(CLOSED),
417 mIsDeviceLinked(false),
418 mIsMainCamera(true),
419 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700420 m_pDualCamCmdHeap(NULL),
421 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700422{
423 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700424 mCommon.init(gCamCapability[cameraId]);
425 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700426#ifndef USE_HAL_3_3
427 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
428#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700429 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700430#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700431 mCameraDevice.common.close = close_camera_device;
432 mCameraDevice.ops = &mCameraOps;
433 mCameraDevice.priv = this;
434 gCamCapability[cameraId]->version = CAM_HAL_V3;
435 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
436 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
437 gCamCapability[cameraId]->min_num_pp_bufs = 3;
438
439 pthread_cond_init(&mBuffersCond, NULL);
440
441 pthread_cond_init(&mRequestCond, NULL);
442 mPendingLiveRequest = 0;
443 mCurrentRequestId = -1;
444 pthread_mutex_init(&mMutex, NULL);
445
446 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
447 mDefaultMetadata[i] = NULL;
448
449 // Getting system props of different kinds
450 char prop[PROPERTY_VALUE_MAX];
451 memset(prop, 0, sizeof(prop));
452 property_get("persist.camera.raw.dump", prop, "0");
453 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800454 property_get("persist.camera.hal3.force.hdr", prop, "0");
455 mForceHdrSnapshot = atoi(prop);
456
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 if (mEnableRawDump)
458 LOGD("Raw dump from Camera HAL enabled");
459
460 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
461 memset(mLdafCalib, 0, sizeof(mLdafCalib));
462
463 memset(prop, 0, sizeof(prop));
464 property_get("persist.camera.tnr.preview", prop, "0");
465 m_bTnrPreview = (uint8_t)atoi(prop);
466
467 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800468 property_get("persist.camera.swtnr.preview", prop, "1");
469 m_bSwTnrPreview = (uint8_t)atoi(prop);
470
471 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 property_get("persist.camera.tnr.video", prop, "0");
473 m_bTnrVideo = (uint8_t)atoi(prop);
474
475 memset(prop, 0, sizeof(prop));
476 property_get("persist.camera.avtimer.debug", prop, "0");
477 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800478 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700479
480 //Load and read GPU library.
481 lib_surface_utils = NULL;
482 LINK_get_surface_pixel_alignment = NULL;
483 mSurfaceStridePadding = CAM_PAD_TO_32;
484 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
485 if (lib_surface_utils) {
486 *(void **)&LINK_get_surface_pixel_alignment =
487 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
488 if (LINK_get_surface_pixel_alignment) {
489 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
490 }
491 dlclose(lib_surface_utils);
492 }
493}
494
495/*===========================================================================
496 * FUNCTION : ~QCamera3HardwareInterface
497 *
498 * DESCRIPTION: destructor of QCamera3HardwareInterface
499 *
500 * PARAMETERS : none
501 *
502 * RETURN : none
503 *==========================================================================*/
504QCamera3HardwareInterface::~QCamera3HardwareInterface()
505{
506 LOGD("E");
507
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800508 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700509
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800510 // Disable power hint and enable the perf lock for close camera
511 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
512 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
513
514 // unlink of dualcam during close camera
515 if (mIsDeviceLinked) {
516 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
517 &m_pDualCamCmdPtr->bundle_info;
518 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
519 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
520 pthread_mutex_lock(&gCamLock);
521
522 if (mIsMainCamera == 1) {
523 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
524 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
525 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
526 // related session id should be session id of linked session
527 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
528 } else {
529 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
530 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
531 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
532 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
533 }
534 pthread_mutex_unlock(&gCamLock);
535
536 rc = mCameraHandle->ops->set_dual_cam_cmd(
537 mCameraHandle->camera_handle);
538 if (rc < 0) {
539 LOGE("Dualcam: Unlink failed, but still proceed to close");
540 }
541 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
543 /* We need to stop all streams before deleting any stream */
544 if (mRawDumpChannel) {
545 mRawDumpChannel->stop();
546 }
547
548 // NOTE: 'camera3_stream_t *' objects are already freed at
549 // this stage by the framework
550 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
551 it != mStreamInfo.end(); it++) {
552 QCamera3ProcessingChannel *channel = (*it)->channel;
553 if (channel) {
554 channel->stop();
555 }
556 }
557 if (mSupportChannel)
558 mSupportChannel->stop();
559
560 if (mAnalysisChannel) {
561 mAnalysisChannel->stop();
562 }
563 if (mMetadataChannel) {
564 mMetadataChannel->stop();
565 }
566 if (mChannelHandle) {
567 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
568 mChannelHandle);
569 LOGD("stopping channel %d", mChannelHandle);
570 }
571
572 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
573 it != mStreamInfo.end(); it++) {
574 QCamera3ProcessingChannel *channel = (*it)->channel;
575 if (channel)
576 delete channel;
577 free (*it);
578 }
579 if (mSupportChannel) {
580 delete mSupportChannel;
581 mSupportChannel = NULL;
582 }
583
584 if (mAnalysisChannel) {
585 delete mAnalysisChannel;
586 mAnalysisChannel = NULL;
587 }
588 if (mRawDumpChannel) {
589 delete mRawDumpChannel;
590 mRawDumpChannel = NULL;
591 }
592 if (mDummyBatchChannel) {
593 delete mDummyBatchChannel;
594 mDummyBatchChannel = NULL;
595 }
596
597 mPictureChannel = NULL;
598
599 if (mMetadataChannel) {
600 delete mMetadataChannel;
601 mMetadataChannel = NULL;
602 }
603
604 /* Clean up all channels */
605 if (mCameraInitialized) {
606 if(!mFirstConfiguration){
607 //send the last unconfigure
608 cam_stream_size_info_t stream_config_info;
609 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
610 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
611 stream_config_info.buffer_info.max_buffers =
612 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700613 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700614 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
615 stream_config_info);
616 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
617 if (rc < 0) {
618 LOGE("set_parms failed for unconfigure");
619 }
620 }
621 deinitParameters();
622 }
623
624 if (mChannelHandle) {
625 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
626 mChannelHandle);
627 LOGH("deleting channel %d", mChannelHandle);
628 mChannelHandle = 0;
629 }
630
631 if (mState != CLOSED)
632 closeCamera();
633
634 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
635 req.mPendingBufferList.clear();
636 }
637 mPendingBuffersMap.mPendingBuffersInRequest.clear();
638 mPendingReprocessResultList.clear();
639 for (pendingRequestIterator i = mPendingRequestsList.begin();
640 i != mPendingRequestsList.end();) {
641 i = erasePendingRequest(i);
642 }
643 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
644 if (mDefaultMetadata[i])
645 free_camera_metadata(mDefaultMetadata[i]);
646
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800647 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700648
649 pthread_cond_destroy(&mRequestCond);
650
651 pthread_cond_destroy(&mBuffersCond);
652
653 pthread_mutex_destroy(&mMutex);
654 LOGD("X");
655}
656
657/*===========================================================================
658 * FUNCTION : erasePendingRequest
659 *
660 * DESCRIPTION: function to erase a desired pending request after freeing any
661 * allocated memory
662 *
663 * PARAMETERS :
664 * @i : iterator pointing to pending request to be erased
665 *
666 * RETURN : iterator pointing to the next request
667 *==========================================================================*/
668QCamera3HardwareInterface::pendingRequestIterator
669 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
670{
671 if (i->input_buffer != NULL) {
672 free(i->input_buffer);
673 i->input_buffer = NULL;
674 }
675 if (i->settings != NULL)
676 free_camera_metadata((camera_metadata_t*)i->settings);
677 return mPendingRequestsList.erase(i);
678}
679
680/*===========================================================================
681 * FUNCTION : camEvtHandle
682 *
683 * DESCRIPTION: Function registered to mm-camera-interface to handle events
684 *
685 * PARAMETERS :
686 * @camera_handle : interface layer camera handle
687 * @evt : ptr to event
688 * @user_data : user data ptr
689 *
690 * RETURN : none
691 *==========================================================================*/
692void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
693 mm_camera_event_t *evt,
694 void *user_data)
695{
696 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
697 if (obj && evt) {
698 switch(evt->server_event_type) {
699 case CAM_EVENT_TYPE_DAEMON_DIED:
700 pthread_mutex_lock(&obj->mMutex);
701 obj->mState = ERROR;
702 pthread_mutex_unlock(&obj->mMutex);
703 LOGE("Fatal, camera daemon died");
704 break;
705
706 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
707 LOGD("HAL got request pull from Daemon");
708 pthread_mutex_lock(&obj->mMutex);
709 obj->mWokenUpByDaemon = true;
710 obj->unblockRequestIfNecessary();
711 pthread_mutex_unlock(&obj->mMutex);
712 break;
713
714 default:
715 LOGW("Warning: Unhandled event %d",
716 evt->server_event_type);
717 break;
718 }
719 } else {
720 LOGE("NULL user_data/evt");
721 }
722}
723
724/*===========================================================================
725 * FUNCTION : openCamera
726 *
727 * DESCRIPTION: open camera
728 *
729 * PARAMETERS :
730 * @hw_device : double ptr for camera device struct
731 *
732 * RETURN : int32_t type of status
733 * NO_ERROR -- success
734 * none-zero failure code
735 *==========================================================================*/
736int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
737{
738 int rc = 0;
739 if (mState != CLOSED) {
740 *hw_device = NULL;
741 return PERMISSION_DENIED;
742 }
743
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800744 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700745 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
746 mCameraId);
747
748 rc = openCamera();
749 if (rc == 0) {
750 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800751 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700752 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800753 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700754
Thierry Strudel3d639192016-09-09 11:52:26 -0700755 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
756 mCameraId, rc);
757
758 if (rc == NO_ERROR) {
759 mState = OPENED;
760 }
761 return rc;
762}
763
764/*===========================================================================
765 * FUNCTION : openCamera
766 *
767 * DESCRIPTION: open camera
768 *
769 * PARAMETERS : none
770 *
771 * RETURN : int32_t type of status
772 * NO_ERROR -- success
773 * none-zero failure code
774 *==========================================================================*/
775int QCamera3HardwareInterface::openCamera()
776{
777 int rc = 0;
778 char value[PROPERTY_VALUE_MAX];
779
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800780 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700781 if (mCameraHandle) {
782 LOGE("Failure: Camera already opened");
783 return ALREADY_EXISTS;
784 }
785
786 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
787 if (rc < 0) {
788 LOGE("Failed to reserve flash for camera id: %d",
789 mCameraId);
790 return UNKNOWN_ERROR;
791 }
792
793 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
794 if (rc) {
795 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
796 return rc;
797 }
798
799 if (!mCameraHandle) {
800 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
801 return -ENODEV;
802 }
803
804 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
805 camEvtHandle, (void *)this);
806
807 if (rc < 0) {
808 LOGE("Error, failed to register event callback");
809 /* Not closing camera here since it is already handled in destructor */
810 return FAILED_TRANSACTION;
811 }
812
813 mExifParams.debug_params =
814 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
815 if (mExifParams.debug_params) {
816 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
817 } else {
818 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
819 return NO_MEMORY;
820 }
821 mFirstConfiguration = true;
822
823 //Notify display HAL that a camera session is active.
824 //But avoid calling the same during bootup because camera service might open/close
825 //cameras at boot time during its initialization and display service will also internally
826 //wait for camera service to initialize first while calling this display API, resulting in a
827 //deadlock situation. Since boot time camera open/close calls are made only to fetch
828 //capabilities, no need of this display bw optimization.
829 //Use "service.bootanim.exit" property to know boot status.
830 property_get("service.bootanim.exit", value, "0");
831 if (atoi(value) == 1) {
832 pthread_mutex_lock(&gCamLock);
833 if (gNumCameraSessions++ == 0) {
834 setCameraLaunchStatus(true);
835 }
836 pthread_mutex_unlock(&gCamLock);
837 }
838
839 //fill the session id needed while linking dual cam
840 pthread_mutex_lock(&gCamLock);
841 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
842 &sessionId[mCameraId]);
843 pthread_mutex_unlock(&gCamLock);
844
845 if (rc < 0) {
846 LOGE("Error, failed to get sessiion id");
847 return UNKNOWN_ERROR;
848 } else {
849 //Allocate related cam sync buffer
850 //this is needed for the payload that goes along with bundling cmd for related
851 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700852 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
853 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700854 if(rc != OK) {
855 rc = NO_MEMORY;
856 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
857 return NO_MEMORY;
858 }
859
860 //Map memory for related cam sync buffer
861 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700862 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
863 m_pDualCamCmdHeap->getFd(0),
864 sizeof(cam_dual_camera_cmd_info_t),
865 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 if(rc < 0) {
867 LOGE("Dualcam: failed to map Related cam sync buffer");
868 rc = FAILED_TRANSACTION;
869 return NO_MEMORY;
870 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700871 m_pDualCamCmdPtr =
872 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 }
874
875 LOGH("mCameraId=%d",mCameraId);
876
877 return NO_ERROR;
878}
879
880/*===========================================================================
881 * FUNCTION : closeCamera
882 *
883 * DESCRIPTION: close camera
884 *
885 * PARAMETERS : none
886 *
887 * RETURN : int32_t type of status
888 * NO_ERROR -- success
889 * none-zero failure code
890 *==========================================================================*/
891int QCamera3HardwareInterface::closeCamera()
892{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 int rc = NO_ERROR;
895 char value[PROPERTY_VALUE_MAX];
896
897 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
898 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700899
900 // unmap memory for related cam sync buffer
901 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800902 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700903 if (NULL != m_pDualCamCmdHeap) {
904 m_pDualCamCmdHeap->deallocate();
905 delete m_pDualCamCmdHeap;
906 m_pDualCamCmdHeap = NULL;
907 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
911 mCameraHandle = NULL;
912
913 //reset session id to some invalid id
914 pthread_mutex_lock(&gCamLock);
915 sessionId[mCameraId] = 0xDEADBEEF;
916 pthread_mutex_unlock(&gCamLock);
917
918 //Notify display HAL that there is no active camera session
919 //but avoid calling the same during bootup. Refer to openCamera
920 //for more details.
921 property_get("service.bootanim.exit", value, "0");
922 if (atoi(value) == 1) {
923 pthread_mutex_lock(&gCamLock);
924 if (--gNumCameraSessions == 0) {
925 setCameraLaunchStatus(false);
926 }
927 pthread_mutex_unlock(&gCamLock);
928 }
929
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 if (mExifParams.debug_params) {
931 free(mExifParams.debug_params);
932 mExifParams.debug_params = NULL;
933 }
934 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
935 LOGW("Failed to release flash for camera id: %d",
936 mCameraId);
937 }
938 mState = CLOSED;
939 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
940 mCameraId, rc);
941 return rc;
942}
943
944/*===========================================================================
945 * FUNCTION : initialize
946 *
947 * DESCRIPTION: Initialize frameworks callback functions
948 *
949 * PARAMETERS :
950 * @callback_ops : callback function to frameworks
951 *
952 * RETURN :
953 *
954 *==========================================================================*/
955int QCamera3HardwareInterface::initialize(
956 const struct camera3_callback_ops *callback_ops)
957{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800958 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700959 int rc;
960
961 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
962 pthread_mutex_lock(&mMutex);
963
964 // Validate current state
965 switch (mState) {
966 case OPENED:
967 /* valid state */
968 break;
969 default:
970 LOGE("Invalid state %d", mState);
971 rc = -ENODEV;
972 goto err1;
973 }
974
975 rc = initParameters();
976 if (rc < 0) {
977 LOGE("initParamters failed %d", rc);
978 goto err1;
979 }
980 mCallbackOps = callback_ops;
981
982 mChannelHandle = mCameraHandle->ops->add_channel(
983 mCameraHandle->camera_handle, NULL, NULL, this);
984 if (mChannelHandle == 0) {
985 LOGE("add_channel failed");
986 rc = -ENOMEM;
987 pthread_mutex_unlock(&mMutex);
988 return rc;
989 }
990
991 pthread_mutex_unlock(&mMutex);
992 mCameraInitialized = true;
993 mState = INITIALIZED;
994 LOGI("X");
995 return 0;
996
997err1:
998 pthread_mutex_unlock(&mMutex);
999 return rc;
1000}
1001
1002/*===========================================================================
1003 * FUNCTION : validateStreamDimensions
1004 *
1005 * DESCRIPTION: Check if the configuration requested are those advertised
1006 *
1007 * PARAMETERS :
1008 * @stream_list : streams to be configured
1009 *
1010 * RETURN :
1011 *
1012 *==========================================================================*/
1013int QCamera3HardwareInterface::validateStreamDimensions(
1014 camera3_stream_configuration_t *streamList)
1015{
1016 int rc = NO_ERROR;
1017 size_t count = 0;
1018
1019 camera3_stream_t *inputStream = NULL;
1020 /*
1021 * Loop through all streams to find input stream if it exists*
1022 */
1023 for (size_t i = 0; i< streamList->num_streams; i++) {
1024 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1025 if (inputStream != NULL) {
1026 LOGE("Error, Multiple input streams requested");
1027 return -EINVAL;
1028 }
1029 inputStream = streamList->streams[i];
1030 }
1031 }
1032 /*
1033 * Loop through all streams requested in configuration
1034 * Check if unsupported sizes have been requested on any of them
1035 */
1036 for (size_t j = 0; j < streamList->num_streams; j++) {
1037 bool sizeFound = false;
1038 camera3_stream_t *newStream = streamList->streams[j];
1039
1040 uint32_t rotatedHeight = newStream->height;
1041 uint32_t rotatedWidth = newStream->width;
1042 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1043 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1044 rotatedHeight = newStream->width;
1045 rotatedWidth = newStream->height;
1046 }
1047
1048 /*
1049 * Sizes are different for each type of stream format check against
1050 * appropriate table.
1051 */
1052 switch (newStream->format) {
1053 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1054 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1055 case HAL_PIXEL_FORMAT_RAW10:
1056 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1057 for (size_t i = 0; i < count; i++) {
1058 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1059 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1060 sizeFound = true;
1061 break;
1062 }
1063 }
1064 break;
1065 case HAL_PIXEL_FORMAT_BLOB:
1066 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1067 /* Verify set size against generated sizes table */
1068 for (size_t i = 0; i < count; i++) {
1069 if (((int32_t)rotatedWidth ==
1070 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1071 ((int32_t)rotatedHeight ==
1072 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1073 sizeFound = true;
1074 break;
1075 }
1076 }
1077 break;
1078 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1079 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1080 default:
1081 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1082 || newStream->stream_type == CAMERA3_STREAM_INPUT
1083 || IS_USAGE_ZSL(newStream->usage)) {
1084 if (((int32_t)rotatedWidth ==
1085 gCamCapability[mCameraId]->active_array_size.width) &&
1086 ((int32_t)rotatedHeight ==
1087 gCamCapability[mCameraId]->active_array_size.height)) {
1088 sizeFound = true;
1089 break;
1090 }
1091 /* We could potentially break here to enforce ZSL stream
1092 * set from frameworks always is full active array size
1093 * but it is not clear from the spc if framework will always
1094 * follow that, also we have logic to override to full array
1095 * size, so keeping the logic lenient at the moment
1096 */
1097 }
1098 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1099 MAX_SIZES_CNT);
1100 for (size_t i = 0; i < count; i++) {
1101 if (((int32_t)rotatedWidth ==
1102 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1103 ((int32_t)rotatedHeight ==
1104 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1105 sizeFound = true;
1106 break;
1107 }
1108 }
1109 break;
1110 } /* End of switch(newStream->format) */
1111
1112 /* We error out even if a single stream has unsupported size set */
1113 if (!sizeFound) {
1114 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1115 rotatedWidth, rotatedHeight, newStream->format,
1116 gCamCapability[mCameraId]->active_array_size.width,
1117 gCamCapability[mCameraId]->active_array_size.height);
1118 rc = -EINVAL;
1119 break;
1120 }
1121 } /* End of for each stream */
1122 return rc;
1123}
1124
1125/*==============================================================================
1126 * FUNCTION : isSupportChannelNeeded
1127 *
1128 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1129 *
1130 * PARAMETERS :
1131 * @stream_list : streams to be configured
1132 * @stream_config_info : the config info for streams to be configured
1133 *
1134 * RETURN : Boolen true/false decision
1135 *
1136 *==========================================================================*/
1137bool QCamera3HardwareInterface::isSupportChannelNeeded(
1138 camera3_stream_configuration_t *streamList,
1139 cam_stream_size_info_t stream_config_info)
1140{
1141 uint32_t i;
1142 bool pprocRequested = false;
1143 /* Check for conditions where PProc pipeline does not have any streams*/
1144 for (i = 0; i < stream_config_info.num_streams; i++) {
1145 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1146 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1147 pprocRequested = true;
1148 break;
1149 }
1150 }
1151
1152 if (pprocRequested == false )
1153 return true;
1154
1155 /* Dummy stream needed if only raw or jpeg streams present */
1156 for (i = 0; i < streamList->num_streams; i++) {
1157 switch(streamList->streams[i]->format) {
1158 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1159 case HAL_PIXEL_FORMAT_RAW10:
1160 case HAL_PIXEL_FORMAT_RAW16:
1161 case HAL_PIXEL_FORMAT_BLOB:
1162 break;
1163 default:
1164 return false;
1165 }
1166 }
1167 return true;
1168}
1169
1170/*==============================================================================
1171 * FUNCTION : getSensorOutputSize
1172 *
1173 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1174 *
1175 * PARAMETERS :
1176 * @sensor_dim : sensor output dimension (output)
1177 *
1178 * RETURN : int32_t type of status
1179 * NO_ERROR -- success
1180 * none-zero failure code
1181 *
1182 *==========================================================================*/
1183int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1184{
1185 int32_t rc = NO_ERROR;
1186
1187 cam_dimension_t max_dim = {0, 0};
1188 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1189 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1190 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1191 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1192 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1193 }
1194
1195 clear_metadata_buffer(mParameters);
1196
1197 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1198 max_dim);
1199 if (rc != NO_ERROR) {
1200 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1201 return rc;
1202 }
1203
1204 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1205 if (rc != NO_ERROR) {
1206 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1207 return rc;
1208 }
1209
1210 clear_metadata_buffer(mParameters);
1211 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1212
1213 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1214 mParameters);
1215 if (rc != NO_ERROR) {
1216 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1217 return rc;
1218 }
1219
1220 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1221 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1222
1223 return rc;
1224}
1225
1226/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001227 * FUNCTION : addToPPFeatureMask
1228 *
1229 * DESCRIPTION: add additional features to pp feature mask based on
1230 * stream type and usecase
1231 *
1232 * PARAMETERS :
1233 * @stream_format : stream type for feature mask
1234 * @stream_idx : stream idx within postprocess_mask list to change
1235 *
1236 * RETURN : NULL
1237 *
1238 *==========================================================================*/
1239void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1240 uint32_t stream_idx)
1241{
1242 char feature_mask_value[PROPERTY_VALUE_MAX];
1243 cam_feature_mask_t feature_mask;
1244 int args_converted;
1245 int property_len;
1246
1247 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001248#ifdef _LE_CAMERA_
1249 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1250 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1251 property_len = property_get("persist.camera.hal3.feature",
1252 feature_mask_value, swtnr_feature_mask_value);
1253#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001254 property_len = property_get("persist.camera.hal3.feature",
1255 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001256#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001257 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1258 (feature_mask_value[1] == 'x')) {
1259 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1260 } else {
1261 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1262 }
1263 if (1 != args_converted) {
1264 feature_mask = 0;
1265 LOGE("Wrong feature mask %s", feature_mask_value);
1266 return;
1267 }
1268
1269 switch (stream_format) {
1270 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1271 /* Add LLVD to pp feature mask only if video hint is enabled */
1272 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1273 mStreamConfigInfo.postprocess_mask[stream_idx]
1274 |= CAM_QTI_FEATURE_SW_TNR;
1275 LOGH("Added SW TNR to pp feature mask");
1276 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1277 mStreamConfigInfo.postprocess_mask[stream_idx]
1278 |= CAM_QCOM_FEATURE_LLVD;
1279 LOGH("Added LLVD SeeMore to pp feature mask");
1280 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001281 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1282 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1283 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1284 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001285 break;
1286 }
1287 default:
1288 break;
1289 }
1290 LOGD("PP feature mask %llx",
1291 mStreamConfigInfo.postprocess_mask[stream_idx]);
1292}
1293
1294/*==============================================================================
1295 * FUNCTION : updateFpsInPreviewBuffer
1296 *
1297 * DESCRIPTION: update FPS information in preview buffer.
1298 *
1299 * PARAMETERS :
1300 * @metadata : pointer to metadata buffer
1301 * @frame_number: frame_number to look for in pending buffer list
1302 *
1303 * RETURN : None
1304 *
1305 *==========================================================================*/
1306void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1307 uint32_t frame_number)
1308{
1309 // Mark all pending buffers for this particular request
1310 // with corresponding framerate information
1311 for (List<PendingBuffersInRequest>::iterator req =
1312 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1313 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1314 for(List<PendingBufferInfo>::iterator j =
1315 req->mPendingBufferList.begin();
1316 j != req->mPendingBufferList.end(); j++) {
1317 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1318 if ((req->frame_number == frame_number) &&
1319 (channel->getStreamTypeMask() &
1320 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1321 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1322 CAM_INTF_PARM_FPS_RANGE, metadata) {
1323 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1324 struct private_handle_t *priv_handle =
1325 (struct private_handle_t *)(*(j->buffer));
1326 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1327 }
1328 }
1329 }
1330 }
1331}
1332
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001333/*==============================================================================
1334 * FUNCTION : updateTimeStampInPendingBuffers
1335 *
1336 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1337 * of a frame number
1338 *
1339 * PARAMETERS :
1340 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1341 * @timestamp : timestamp to be set
1342 *
1343 * RETURN : None
1344 *
1345 *==========================================================================*/
1346void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1347 uint32_t frameNumber, nsecs_t timestamp)
1348{
1349 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1350 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1351 if (req->frame_number != frameNumber)
1352 continue;
1353
1354 for (auto k = req->mPendingBufferList.begin();
1355 k != req->mPendingBufferList.end(); k++ ) {
1356 struct private_handle_t *priv_handle =
1357 (struct private_handle_t *) (*(k->buffer));
1358 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1359 }
1360 }
1361 return;
1362}
1363
Thierry Strudel3d639192016-09-09 11:52:26 -07001364/*===========================================================================
1365 * FUNCTION : configureStreams
1366 *
1367 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1368 * and output streams.
1369 *
1370 * PARAMETERS :
1371 * @stream_list : streams to be configured
1372 *
1373 * RETURN :
1374 *
1375 *==========================================================================*/
1376int QCamera3HardwareInterface::configureStreams(
1377 camera3_stream_configuration_t *streamList)
1378{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001379 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001380 int rc = 0;
1381
1382 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001383 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001384 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001385 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001386
1387 return rc;
1388}
1389
1390/*===========================================================================
1391 * FUNCTION : configureStreamsPerfLocked
1392 *
1393 * DESCRIPTION: configureStreams while perfLock is held.
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 *
1398 * RETURN : int32_t type of status
1399 * NO_ERROR -- success
1400 * none-zero failure code
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::configureStreamsPerfLocked(
1403 camera3_stream_configuration_t *streamList)
1404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001405 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 int rc = 0;
1407
1408 // Sanity check stream_list
1409 if (streamList == NULL) {
1410 LOGE("NULL stream configuration");
1411 return BAD_VALUE;
1412 }
1413 if (streamList->streams == NULL) {
1414 LOGE("NULL stream list");
1415 return BAD_VALUE;
1416 }
1417
1418 if (streamList->num_streams < 1) {
1419 LOGE("Bad number of streams requested: %d",
1420 streamList->num_streams);
1421 return BAD_VALUE;
1422 }
1423
1424 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1425 LOGE("Maximum number of streams %d exceeded: %d",
1426 MAX_NUM_STREAMS, streamList->num_streams);
1427 return BAD_VALUE;
1428 }
1429
1430 mOpMode = streamList->operation_mode;
1431 LOGD("mOpMode: %d", mOpMode);
1432
1433 /* first invalidate all the steams in the mStreamList
1434 * if they appear again, they will be validated */
1435 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1436 it != mStreamInfo.end(); it++) {
1437 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1438 if (channel) {
1439 channel->stop();
1440 }
1441 (*it)->status = INVALID;
1442 }
1443
1444 if (mRawDumpChannel) {
1445 mRawDumpChannel->stop();
1446 delete mRawDumpChannel;
1447 mRawDumpChannel = NULL;
1448 }
1449
1450 if (mSupportChannel)
1451 mSupportChannel->stop();
1452
1453 if (mAnalysisChannel) {
1454 mAnalysisChannel->stop();
1455 }
1456 if (mMetadataChannel) {
1457 /* If content of mStreamInfo is not 0, there is metadata stream */
1458 mMetadataChannel->stop();
1459 }
1460 if (mChannelHandle) {
1461 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1462 mChannelHandle);
1463 LOGD("stopping channel %d", mChannelHandle);
1464 }
1465
1466 pthread_mutex_lock(&mMutex);
1467
1468 // Check state
1469 switch (mState) {
1470 case INITIALIZED:
1471 case CONFIGURED:
1472 case STARTED:
1473 /* valid state */
1474 break;
1475 default:
1476 LOGE("Invalid state %d", mState);
1477 pthread_mutex_unlock(&mMutex);
1478 return -ENODEV;
1479 }
1480
1481 /* Check whether we have video stream */
1482 m_bIs4KVideo = false;
1483 m_bIsVideo = false;
1484 m_bEisSupportedSize = false;
1485 m_bTnrEnabled = false;
1486 bool isZsl = false;
1487 uint32_t videoWidth = 0U;
1488 uint32_t videoHeight = 0U;
1489 size_t rawStreamCnt = 0;
1490 size_t stallStreamCnt = 0;
1491 size_t processedStreamCnt = 0;
1492 // Number of streams on ISP encoder path
1493 size_t numStreamsOnEncoder = 0;
1494 size_t numYuv888OnEncoder = 0;
1495 bool bYuv888OverrideJpeg = false;
1496 cam_dimension_t largeYuv888Size = {0, 0};
1497 cam_dimension_t maxViewfinderSize = {0, 0};
1498 bool bJpegExceeds4K = false;
1499 bool bJpegOnEncoder = false;
1500 bool bUseCommonFeatureMask = false;
1501 cam_feature_mask_t commonFeatureMask = 0;
1502 bool bSmallJpegSize = false;
1503 uint32_t width_ratio;
1504 uint32_t height_ratio;
1505 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1506 camera3_stream_t *inputStream = NULL;
1507 bool isJpeg = false;
1508 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001509 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001510
1511 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1512
1513 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001515 uint8_t eis_prop_set;
1516 uint32_t maxEisWidth = 0;
1517 uint32_t maxEisHeight = 0;
1518
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001519 // Initialize all instant AEC related variables
1520 mInstantAEC = false;
1521 mResetInstantAEC = false;
1522 mInstantAECSettledFrameNumber = 0;
1523 mAecSkipDisplayFrameBound = 0;
1524 mInstantAecFrameIdxCount = 0;
1525
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1527
1528 size_t count = IS_TYPE_MAX;
1529 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1530 for (size_t i = 0; i < count; i++) {
1531 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001532 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1533 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001534 break;
1535 }
1536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001537 count = CAM_OPT_STAB_MAX;
1538 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1539 for (size_t i = 0; i < count; i++) {
1540 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1541 oisSupported = true;
1542 break;
1543 }
1544 }
1545
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001546 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001547 maxEisWidth = MAX_EIS_WIDTH;
1548 maxEisHeight = MAX_EIS_HEIGHT;
1549 }
1550
1551 /* EIS setprop control */
1552 char eis_prop[PROPERTY_VALUE_MAX];
1553 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001554 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001555 eis_prop_set = (uint8_t)atoi(eis_prop);
1556
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001557 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001558 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1559
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001560 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1561 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1562
Thierry Strudel3d639192016-09-09 11:52:26 -07001563 /* stream configurations */
1564 for (size_t i = 0; i < streamList->num_streams; i++) {
1565 camera3_stream_t *newStream = streamList->streams[i];
1566 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1567 "height = %d, rotation = %d, usage = 0x%x",
1568 i, newStream->stream_type, newStream->format,
1569 newStream->width, newStream->height, newStream->rotation,
1570 newStream->usage);
1571 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1572 newStream->stream_type == CAMERA3_STREAM_INPUT){
1573 isZsl = true;
1574 }
1575 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1576 inputStream = newStream;
1577 }
1578
1579 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1580 isJpeg = true;
1581 jpegSize.width = newStream->width;
1582 jpegSize.height = newStream->height;
1583 if (newStream->width > VIDEO_4K_WIDTH ||
1584 newStream->height > VIDEO_4K_HEIGHT)
1585 bJpegExceeds4K = true;
1586 }
1587
1588 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1589 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1590 m_bIsVideo = true;
1591 videoWidth = newStream->width;
1592 videoHeight = newStream->height;
1593 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1594 (VIDEO_4K_HEIGHT <= newStream->height)) {
1595 m_bIs4KVideo = true;
1596 }
1597 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1598 (newStream->height <= maxEisHeight);
1599 }
1600 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1601 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1602 switch (newStream->format) {
1603 case HAL_PIXEL_FORMAT_BLOB:
1604 stallStreamCnt++;
1605 if (isOnEncoder(maxViewfinderSize, newStream->width,
1606 newStream->height)) {
1607 numStreamsOnEncoder++;
1608 bJpegOnEncoder = true;
1609 }
1610 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1611 newStream->width);
1612 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1613 newStream->height);;
1614 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1615 "FATAL: max_downscale_factor cannot be zero and so assert");
1616 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1617 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1618 LOGH("Setting small jpeg size flag to true");
1619 bSmallJpegSize = true;
1620 }
1621 break;
1622 case HAL_PIXEL_FORMAT_RAW10:
1623 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1624 case HAL_PIXEL_FORMAT_RAW16:
1625 rawStreamCnt++;
1626 break;
1627 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1628 processedStreamCnt++;
1629 if (isOnEncoder(maxViewfinderSize, newStream->width,
1630 newStream->height)) {
1631 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1632 !IS_USAGE_ZSL(newStream->usage)) {
1633 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1634 }
1635 numStreamsOnEncoder++;
1636 }
1637 break;
1638 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1639 processedStreamCnt++;
1640 if (isOnEncoder(maxViewfinderSize, newStream->width,
1641 newStream->height)) {
1642 // If Yuv888 size is not greater than 4K, set feature mask
1643 // to SUPERSET so that it support concurrent request on
1644 // YUV and JPEG.
1645 if (newStream->width <= VIDEO_4K_WIDTH &&
1646 newStream->height <= VIDEO_4K_HEIGHT) {
1647 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1648 }
1649 numStreamsOnEncoder++;
1650 numYuv888OnEncoder++;
1651 largeYuv888Size.width = newStream->width;
1652 largeYuv888Size.height = newStream->height;
1653 }
1654 break;
1655 default:
1656 processedStreamCnt++;
1657 if (isOnEncoder(maxViewfinderSize, newStream->width,
1658 newStream->height)) {
1659 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1660 numStreamsOnEncoder++;
1661 }
1662 break;
1663 }
1664
1665 }
1666 }
1667
1668 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1669 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1670 !m_bIsVideo) {
1671 m_bEisEnable = false;
1672 }
1673
1674 /* Logic to enable/disable TNR based on specific config size/etc.*/
1675 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1676 ((videoWidth == 1920 && videoHeight == 1080) ||
1677 (videoWidth == 1280 && videoHeight == 720)) &&
1678 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1679 m_bTnrEnabled = true;
1680
1681 /* Check if num_streams is sane */
1682 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1683 rawStreamCnt > MAX_RAW_STREAMS ||
1684 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1685 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1686 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1687 pthread_mutex_unlock(&mMutex);
1688 return -EINVAL;
1689 }
1690 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001691 if (isZsl && m_bIs4KVideo) {
1692 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001693 pthread_mutex_unlock(&mMutex);
1694 return -EINVAL;
1695 }
1696 /* Check if stream sizes are sane */
1697 if (numStreamsOnEncoder > 2) {
1698 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1699 pthread_mutex_unlock(&mMutex);
1700 return -EINVAL;
1701 } else if (1 < numStreamsOnEncoder){
1702 bUseCommonFeatureMask = true;
1703 LOGH("Multiple streams above max viewfinder size, common mask needed");
1704 }
1705
1706 /* Check if BLOB size is greater than 4k in 4k recording case */
1707 if (m_bIs4KVideo && bJpegExceeds4K) {
1708 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1709 pthread_mutex_unlock(&mMutex);
1710 return -EINVAL;
1711 }
1712
1713 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1714 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1715 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1716 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1717 // configurations:
1718 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1719 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1720 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1721 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1722 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1723 __func__);
1724 pthread_mutex_unlock(&mMutex);
1725 return -EINVAL;
1726 }
1727
1728 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1729 // the YUV stream's size is greater or equal to the JPEG size, set common
1730 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1731 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1732 jpegSize.width, jpegSize.height) &&
1733 largeYuv888Size.width > jpegSize.width &&
1734 largeYuv888Size.height > jpegSize.height) {
1735 bYuv888OverrideJpeg = true;
1736 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1737 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1738 }
1739
1740 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1741 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1742 commonFeatureMask);
1743 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1744 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1745
1746 rc = validateStreamDimensions(streamList);
1747 if (rc == NO_ERROR) {
1748 rc = validateStreamRotations(streamList);
1749 }
1750 if (rc != NO_ERROR) {
1751 LOGE("Invalid stream configuration requested!");
1752 pthread_mutex_unlock(&mMutex);
1753 return rc;
1754 }
1755
1756 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1757 for (size_t i = 0; i < streamList->num_streams; i++) {
1758 camera3_stream_t *newStream = streamList->streams[i];
1759 LOGH("newStream type = %d, stream format = %d "
1760 "stream size : %d x %d, stream rotation = %d",
1761 newStream->stream_type, newStream->format,
1762 newStream->width, newStream->height, newStream->rotation);
1763 //if the stream is in the mStreamList validate it
1764 bool stream_exists = false;
1765 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1766 it != mStreamInfo.end(); it++) {
1767 if ((*it)->stream == newStream) {
1768 QCamera3ProcessingChannel *channel =
1769 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1770 stream_exists = true;
1771 if (channel)
1772 delete channel;
1773 (*it)->status = VALID;
1774 (*it)->stream->priv = NULL;
1775 (*it)->channel = NULL;
1776 }
1777 }
1778 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1779 //new stream
1780 stream_info_t* stream_info;
1781 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1782 if (!stream_info) {
1783 LOGE("Could not allocate stream info");
1784 rc = -ENOMEM;
1785 pthread_mutex_unlock(&mMutex);
1786 return rc;
1787 }
1788 stream_info->stream = newStream;
1789 stream_info->status = VALID;
1790 stream_info->channel = NULL;
1791 mStreamInfo.push_back(stream_info);
1792 }
1793 /* Covers Opaque ZSL and API1 F/W ZSL */
1794 if (IS_USAGE_ZSL(newStream->usage)
1795 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1796 if (zslStream != NULL) {
1797 LOGE("Multiple input/reprocess streams requested!");
1798 pthread_mutex_unlock(&mMutex);
1799 return BAD_VALUE;
1800 }
1801 zslStream = newStream;
1802 }
1803 /* Covers YUV reprocess */
1804 if (inputStream != NULL) {
1805 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1806 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1807 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1808 && inputStream->width == newStream->width
1809 && inputStream->height == newStream->height) {
1810 if (zslStream != NULL) {
1811 /* This scenario indicates multiple YUV streams with same size
1812 * as input stream have been requested, since zsl stream handle
1813 * is solely use for the purpose of overriding the size of streams
1814 * which share h/w streams we will just make a guess here as to
1815 * which of the stream is a ZSL stream, this will be refactored
1816 * once we make generic logic for streams sharing encoder output
1817 */
1818 LOGH("Warning, Multiple ip/reprocess streams requested!");
1819 }
1820 zslStream = newStream;
1821 }
1822 }
1823 }
1824
1825 /* If a zsl stream is set, we know that we have configured at least one input or
1826 bidirectional stream */
1827 if (NULL != zslStream) {
1828 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1829 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1830 mInputStreamInfo.format = zslStream->format;
1831 mInputStreamInfo.usage = zslStream->usage;
1832 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1833 mInputStreamInfo.dim.width,
1834 mInputStreamInfo.dim.height,
1835 mInputStreamInfo.format, mInputStreamInfo.usage);
1836 }
1837
1838 cleanAndSortStreamInfo();
1839 if (mMetadataChannel) {
1840 delete mMetadataChannel;
1841 mMetadataChannel = NULL;
1842 }
1843 if (mSupportChannel) {
1844 delete mSupportChannel;
1845 mSupportChannel = NULL;
1846 }
1847
1848 if (mAnalysisChannel) {
1849 delete mAnalysisChannel;
1850 mAnalysisChannel = NULL;
1851 }
1852
1853 if (mDummyBatchChannel) {
1854 delete mDummyBatchChannel;
1855 mDummyBatchChannel = NULL;
1856 }
1857
1858 //Create metadata channel and initialize it
1859 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1860 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1861 gCamCapability[mCameraId]->color_arrangement);
1862 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1863 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001864 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001865 if (mMetadataChannel == NULL) {
1866 LOGE("failed to allocate metadata channel");
1867 rc = -ENOMEM;
1868 pthread_mutex_unlock(&mMutex);
1869 return rc;
1870 }
1871 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1872 if (rc < 0) {
1873 LOGE("metadata channel initialization failed");
1874 delete mMetadataChannel;
1875 mMetadataChannel = NULL;
1876 pthread_mutex_unlock(&mMutex);
1877 return rc;
1878 }
1879
Thierry Strudel3d639192016-09-09 11:52:26 -07001880 bool isRawStreamRequested = false;
1881 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1882 /* Allocate channel objects for the requested streams */
1883 for (size_t i = 0; i < streamList->num_streams; i++) {
1884 camera3_stream_t *newStream = streamList->streams[i];
1885 uint32_t stream_usage = newStream->usage;
1886 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1887 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1888 struct camera_info *p_info = NULL;
1889 pthread_mutex_lock(&gCamLock);
1890 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1891 pthread_mutex_unlock(&gCamLock);
1892 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1893 || IS_USAGE_ZSL(newStream->usage)) &&
1894 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1896 if (bUseCommonFeatureMask) {
1897 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1898 commonFeatureMask;
1899 } else {
1900 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1901 CAM_QCOM_FEATURE_NONE;
1902 }
1903
1904 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1905 LOGH("Input stream configured, reprocess config");
1906 } else {
1907 //for non zsl streams find out the format
1908 switch (newStream->format) {
1909 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1910 {
1911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1912 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1913 /* add additional features to pp feature mask */
1914 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1915 mStreamConfigInfo.num_streams);
1916
1917 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1918 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1919 CAM_STREAM_TYPE_VIDEO;
1920 if (m_bTnrEnabled && m_bTnrVideo) {
1921 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1922 CAM_QCOM_FEATURE_CPP_TNR;
1923 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1924 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1925 ~CAM_QCOM_FEATURE_CDS;
1926 }
1927 } else {
1928 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1929 CAM_STREAM_TYPE_PREVIEW;
1930 if (m_bTnrEnabled && m_bTnrPreview) {
1931 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1932 CAM_QCOM_FEATURE_CPP_TNR;
1933 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1934 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1935 ~CAM_QCOM_FEATURE_CDS;
1936 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001937 if(!m_bSwTnrPreview) {
1938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1939 ~CAM_QTI_FEATURE_SW_TNR;
1940 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001941 padding_info.width_padding = mSurfaceStridePadding;
1942 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001943 previewSize.width = (int32_t)newStream->width;
1944 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001945 }
1946 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1947 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1949 newStream->height;
1950 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1951 newStream->width;
1952 }
1953 }
1954 break;
1955 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1956 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1957 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1958 if (bUseCommonFeatureMask)
1959 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1960 commonFeatureMask;
1961 else
1962 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1963 CAM_QCOM_FEATURE_NONE;
1964 } else {
1965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1966 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1967 }
1968 break;
1969 case HAL_PIXEL_FORMAT_BLOB:
1970 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1971 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1972 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1973 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1974 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1975 } else {
1976 if (bUseCommonFeatureMask &&
1977 isOnEncoder(maxViewfinderSize, newStream->width,
1978 newStream->height)) {
1979 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1980 } else {
1981 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1982 }
1983 }
1984 if (isZsl) {
1985 if (zslStream) {
1986 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1987 (int32_t)zslStream->width;
1988 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1989 (int32_t)zslStream->height;
1990 } else {
1991 LOGE("Error, No ZSL stream identified");
1992 pthread_mutex_unlock(&mMutex);
1993 return -EINVAL;
1994 }
1995 } else if (m_bIs4KVideo) {
1996 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1997 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1998 } else if (bYuv888OverrideJpeg) {
1999 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2000 (int32_t)largeYuv888Size.width;
2001 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2002 (int32_t)largeYuv888Size.height;
2003 }
2004 break;
2005 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2006 case HAL_PIXEL_FORMAT_RAW16:
2007 case HAL_PIXEL_FORMAT_RAW10:
2008 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2009 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2010 isRawStreamRequested = true;
2011 break;
2012 default:
2013 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2014 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2015 break;
2016 }
2017 }
2018
2019 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2020 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2021 gCamCapability[mCameraId]->color_arrangement);
2022
2023 if (newStream->priv == NULL) {
2024 //New stream, construct channel
2025 switch (newStream->stream_type) {
2026 case CAMERA3_STREAM_INPUT:
2027 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2028 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2029 break;
2030 case CAMERA3_STREAM_BIDIRECTIONAL:
2031 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2032 GRALLOC_USAGE_HW_CAMERA_WRITE;
2033 break;
2034 case CAMERA3_STREAM_OUTPUT:
2035 /* For video encoding stream, set read/write rarely
2036 * flag so that they may be set to un-cached */
2037 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2038 newStream->usage |=
2039 (GRALLOC_USAGE_SW_READ_RARELY |
2040 GRALLOC_USAGE_SW_WRITE_RARELY |
2041 GRALLOC_USAGE_HW_CAMERA_WRITE);
2042 else if (IS_USAGE_ZSL(newStream->usage))
2043 {
2044 LOGD("ZSL usage flag skipping");
2045 }
2046 else if (newStream == zslStream
2047 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2048 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2049 } else
2050 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2051 break;
2052 default:
2053 LOGE("Invalid stream_type %d", newStream->stream_type);
2054 break;
2055 }
2056
2057 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2058 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2059 QCamera3ProcessingChannel *channel = NULL;
2060 switch (newStream->format) {
2061 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2062 if ((newStream->usage &
2063 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2064 (streamList->operation_mode ==
2065 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2066 ) {
2067 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2068 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002069 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002070 this,
2071 newStream,
2072 (cam_stream_type_t)
2073 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2074 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2075 mMetadataChannel,
2076 0); //heap buffers are not required for HFR video channel
2077 if (channel == NULL) {
2078 LOGE("allocation of channel failed");
2079 pthread_mutex_unlock(&mMutex);
2080 return -ENOMEM;
2081 }
2082 //channel->getNumBuffers() will return 0 here so use
2083 //MAX_INFLIGH_HFR_REQUESTS
2084 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2085 newStream->priv = channel;
2086 LOGI("num video buffers in HFR mode: %d",
2087 MAX_INFLIGHT_HFR_REQUESTS);
2088 } else {
2089 /* Copy stream contents in HFR preview only case to create
2090 * dummy batch channel so that sensor streaming is in
2091 * HFR mode */
2092 if (!m_bIsVideo && (streamList->operation_mode ==
2093 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2094 mDummyBatchStream = *newStream;
2095 }
2096 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2097 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002098 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 this,
2100 newStream,
2101 (cam_stream_type_t)
2102 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2103 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2104 mMetadataChannel,
2105 MAX_INFLIGHT_REQUESTS);
2106 if (channel == NULL) {
2107 LOGE("allocation of channel failed");
2108 pthread_mutex_unlock(&mMutex);
2109 return -ENOMEM;
2110 }
2111 newStream->max_buffers = channel->getNumBuffers();
2112 newStream->priv = channel;
2113 }
2114 break;
2115 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2116 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2117 mChannelHandle,
2118 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002119 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002120 this,
2121 newStream,
2122 (cam_stream_type_t)
2123 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2124 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2125 mMetadataChannel);
2126 if (channel == NULL) {
2127 LOGE("allocation of YUV channel failed");
2128 pthread_mutex_unlock(&mMutex);
2129 return -ENOMEM;
2130 }
2131 newStream->max_buffers = channel->getNumBuffers();
2132 newStream->priv = channel;
2133 break;
2134 }
2135 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2136 case HAL_PIXEL_FORMAT_RAW16:
2137 case HAL_PIXEL_FORMAT_RAW10:
2138 mRawChannel = new QCamera3RawChannel(
2139 mCameraHandle->camera_handle, mChannelHandle,
2140 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002141 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002142 this, newStream,
2143 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2144 mMetadataChannel,
2145 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2146 if (mRawChannel == NULL) {
2147 LOGE("allocation of raw channel failed");
2148 pthread_mutex_unlock(&mMutex);
2149 return -ENOMEM;
2150 }
2151 newStream->max_buffers = mRawChannel->getNumBuffers();
2152 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2153 break;
2154 case HAL_PIXEL_FORMAT_BLOB:
2155 // Max live snapshot inflight buffer is 1. This is to mitigate
2156 // frame drop issues for video snapshot. The more buffers being
2157 // allocated, the more frame drops there are.
2158 mPictureChannel = new QCamera3PicChannel(
2159 mCameraHandle->camera_handle, mChannelHandle,
2160 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002161 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002162 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2163 m_bIs4KVideo, isZsl, mMetadataChannel,
2164 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2165 if (mPictureChannel == NULL) {
2166 LOGE("allocation of channel failed");
2167 pthread_mutex_unlock(&mMutex);
2168 return -ENOMEM;
2169 }
2170 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2171 newStream->max_buffers = mPictureChannel->getNumBuffers();
2172 mPictureChannel->overrideYuvSize(
2173 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2174 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2175 break;
2176
2177 default:
2178 LOGE("not a supported format 0x%x", newStream->format);
2179 break;
2180 }
2181 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2182 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2183 } else {
2184 LOGE("Error, Unknown stream type");
2185 pthread_mutex_unlock(&mMutex);
2186 return -EINVAL;
2187 }
2188
2189 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2190 if (channel != NULL && channel->isUBWCEnabled()) {
2191 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002192 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2193 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002194 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2195 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2196 }
2197 }
2198
2199 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2200 it != mStreamInfo.end(); it++) {
2201 if ((*it)->stream == newStream) {
2202 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2203 break;
2204 }
2205 }
2206 } else {
2207 // Channel already exists for this stream
2208 // Do nothing for now
2209 }
2210 padding_info = gCamCapability[mCameraId]->padding_info;
2211
2212 /* Do not add entries for input stream in metastream info
2213 * since there is no real stream associated with it
2214 */
2215 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2216 mStreamConfigInfo.num_streams++;
2217 }
2218
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002219 // Create analysis stream all the time, even when h/w support is not available
2220 {
2221 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2222 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2223 gCamCapability[mCameraId]->color_arrangement);
2224 cam_analysis_info_t analysisInfo;
2225 int32_t ret = NO_ERROR;
2226 ret = mCommon.getAnalysisInfo(
2227 FALSE,
2228 analysisFeatureMask,
2229 &analysisInfo);
2230 if (ret == NO_ERROR) {
2231 cam_dimension_t analysisDim;
2232 analysisDim = mCommon.getMatchingDimension(previewSize,
2233 analysisInfo.analysis_recommended_res);
2234
2235 mAnalysisChannel = new QCamera3SupportChannel(
2236 mCameraHandle->camera_handle,
2237 mChannelHandle,
2238 mCameraHandle->ops,
2239 &analysisInfo.analysis_padding_info,
2240 analysisFeatureMask,
2241 CAM_STREAM_TYPE_ANALYSIS,
2242 &analysisDim,
2243 (analysisInfo.analysis_format
2244 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2245 : CAM_FORMAT_YUV_420_NV21),
2246 analysisInfo.hw_analysis_supported,
2247 gCamCapability[mCameraId]->color_arrangement,
2248 this,
2249 0); // force buffer count to 0
2250 } else {
2251 LOGW("getAnalysisInfo failed, ret = %d", ret);
2252 }
2253 if (!mAnalysisChannel) {
2254 LOGW("Analysis channel cannot be created");
2255 }
2256 }
2257
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 //RAW DUMP channel
2259 if (mEnableRawDump && isRawStreamRequested == false){
2260 cam_dimension_t rawDumpSize;
2261 rawDumpSize = getMaxRawSize(mCameraId);
2262 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2263 setPAAFSupport(rawDumpFeatureMask,
2264 CAM_STREAM_TYPE_RAW,
2265 gCamCapability[mCameraId]->color_arrangement);
2266 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2267 mChannelHandle,
2268 mCameraHandle->ops,
2269 rawDumpSize,
2270 &padding_info,
2271 this, rawDumpFeatureMask);
2272 if (!mRawDumpChannel) {
2273 LOGE("Raw Dump channel cannot be created");
2274 pthread_mutex_unlock(&mMutex);
2275 return -ENOMEM;
2276 }
2277 }
2278
2279
2280 if (mAnalysisChannel) {
2281 cam_analysis_info_t analysisInfo;
2282 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2283 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2284 CAM_STREAM_TYPE_ANALYSIS;
2285 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2286 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2287 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2289 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002290 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2292 &analysisInfo);
2293 if (rc != NO_ERROR) {
2294 LOGE("getAnalysisInfo failed, ret = %d", rc);
2295 pthread_mutex_unlock(&mMutex);
2296 return rc;
2297 }
2298 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002299 mCommon.getMatchingDimension(previewSize,
2300 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 mStreamConfigInfo.num_streams++;
2302 }
2303
2304 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2305 cam_analysis_info_t supportInfo;
2306 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2307 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2308 setPAAFSupport(callbackFeatureMask,
2309 CAM_STREAM_TYPE_CALLBACK,
2310 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002311 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002312 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002313 if (ret != NO_ERROR) {
2314 /* Ignore the error for Mono camera
2315 * because the PAAF bit mask is only set
2316 * for CAM_STREAM_TYPE_ANALYSIS stream type
2317 */
2318 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2319 LOGW("getAnalysisInfo failed, ret = %d", ret);
2320 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 }
2322 mSupportChannel = new QCamera3SupportChannel(
2323 mCameraHandle->camera_handle,
2324 mChannelHandle,
2325 mCameraHandle->ops,
2326 &gCamCapability[mCameraId]->padding_info,
2327 callbackFeatureMask,
2328 CAM_STREAM_TYPE_CALLBACK,
2329 &QCamera3SupportChannel::kDim,
2330 CAM_FORMAT_YUV_420_NV21,
2331 supportInfo.hw_analysis_supported,
2332 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002333 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 if (!mSupportChannel) {
2335 LOGE("dummy channel cannot be created");
2336 pthread_mutex_unlock(&mMutex);
2337 return -ENOMEM;
2338 }
2339 }
2340
2341 if (mSupportChannel) {
2342 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2343 QCamera3SupportChannel::kDim;
2344 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2345 CAM_STREAM_TYPE_CALLBACK;
2346 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2347 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2348 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2349 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2350 gCamCapability[mCameraId]->color_arrangement);
2351 mStreamConfigInfo.num_streams++;
2352 }
2353
2354 if (mRawDumpChannel) {
2355 cam_dimension_t rawSize;
2356 rawSize = getMaxRawSize(mCameraId);
2357 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2358 rawSize;
2359 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2360 CAM_STREAM_TYPE_RAW;
2361 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2362 CAM_QCOM_FEATURE_NONE;
2363 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2364 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2365 gCamCapability[mCameraId]->color_arrangement);
2366 mStreamConfigInfo.num_streams++;
2367 }
2368 /* In HFR mode, if video stream is not added, create a dummy channel so that
2369 * ISP can create a batch mode even for preview only case. This channel is
2370 * never 'start'ed (no stream-on), it is only 'initialized' */
2371 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2372 !m_bIsVideo) {
2373 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2374 setPAAFSupport(dummyFeatureMask,
2375 CAM_STREAM_TYPE_VIDEO,
2376 gCamCapability[mCameraId]->color_arrangement);
2377 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2378 mChannelHandle,
2379 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002380 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002381 this,
2382 &mDummyBatchStream,
2383 CAM_STREAM_TYPE_VIDEO,
2384 dummyFeatureMask,
2385 mMetadataChannel);
2386 if (NULL == mDummyBatchChannel) {
2387 LOGE("creation of mDummyBatchChannel failed."
2388 "Preview will use non-hfr sensor mode ");
2389 }
2390 }
2391 if (mDummyBatchChannel) {
2392 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2393 mDummyBatchStream.width;
2394 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2395 mDummyBatchStream.height;
2396 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2397 CAM_STREAM_TYPE_VIDEO;
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2400 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2401 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2402 gCamCapability[mCameraId]->color_arrangement);
2403 mStreamConfigInfo.num_streams++;
2404 }
2405
2406 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2407 mStreamConfigInfo.buffer_info.max_buffers =
2408 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2409
2410 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2411 for (pendingRequestIterator i = mPendingRequestsList.begin();
2412 i != mPendingRequestsList.end();) {
2413 i = erasePendingRequest(i);
2414 }
2415 mPendingFrameDropList.clear();
2416 // Initialize/Reset the pending buffers list
2417 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2418 req.mPendingBufferList.clear();
2419 }
2420 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2421
2422 mPendingReprocessResultList.clear();
2423
2424 mCurJpegMeta.clear();
2425 //Get min frame duration for this streams configuration
2426 deriveMinFrameDuration();
2427
2428 // Update state
2429 mState = CONFIGURED;
2430
2431 pthread_mutex_unlock(&mMutex);
2432
2433 return rc;
2434}
2435
2436/*===========================================================================
2437 * FUNCTION : validateCaptureRequest
2438 *
2439 * DESCRIPTION: validate a capture request from camera service
2440 *
2441 * PARAMETERS :
2442 * @request : request from framework to process
2443 *
2444 * RETURN :
2445 *
2446 *==========================================================================*/
2447int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002448 camera3_capture_request_t *request,
2449 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002450{
2451 ssize_t idx = 0;
2452 const camera3_stream_buffer_t *b;
2453 CameraMetadata meta;
2454
2455 /* Sanity check the request */
2456 if (request == NULL) {
2457 LOGE("NULL capture request");
2458 return BAD_VALUE;
2459 }
2460
2461 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2462 /*settings cannot be null for the first request*/
2463 return BAD_VALUE;
2464 }
2465
2466 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002467 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2468 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 LOGE("Request %d: No output buffers provided!",
2470 __FUNCTION__, frameNumber);
2471 return BAD_VALUE;
2472 }
2473 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2474 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2475 request->num_output_buffers, MAX_NUM_STREAMS);
2476 return BAD_VALUE;
2477 }
2478 if (request->input_buffer != NULL) {
2479 b = request->input_buffer;
2480 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2481 LOGE("Request %d: Buffer %ld: Status not OK!",
2482 frameNumber, (long)idx);
2483 return BAD_VALUE;
2484 }
2485 if (b->release_fence != -1) {
2486 LOGE("Request %d: Buffer %ld: Has a release fence!",
2487 frameNumber, (long)idx);
2488 return BAD_VALUE;
2489 }
2490 if (b->buffer == NULL) {
2491 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2492 frameNumber, (long)idx);
2493 return BAD_VALUE;
2494 }
2495 }
2496
2497 // Validate all buffers
2498 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002499 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 QCamera3ProcessingChannel *channel =
2501 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2502 if (channel == NULL) {
2503 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2504 frameNumber, (long)idx);
2505 return BAD_VALUE;
2506 }
2507 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2508 LOGE("Request %d: Buffer %ld: Status not OK!",
2509 frameNumber, (long)idx);
2510 return BAD_VALUE;
2511 }
2512 if (b->release_fence != -1) {
2513 LOGE("Request %d: Buffer %ld: Has a release fence!",
2514 frameNumber, (long)idx);
2515 return BAD_VALUE;
2516 }
2517 if (b->buffer == NULL) {
2518 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2519 frameNumber, (long)idx);
2520 return BAD_VALUE;
2521 }
2522 if (*(b->buffer) == NULL) {
2523 LOGE("Request %d: Buffer %ld: NULL private handle!",
2524 frameNumber, (long)idx);
2525 return BAD_VALUE;
2526 }
2527 idx++;
2528 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002529 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002530 return NO_ERROR;
2531}
2532
2533/*===========================================================================
2534 * FUNCTION : deriveMinFrameDuration
2535 *
2536 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2537 * on currently configured streams.
2538 *
2539 * PARAMETERS : NONE
2540 *
2541 * RETURN : NONE
2542 *
2543 *==========================================================================*/
2544void QCamera3HardwareInterface::deriveMinFrameDuration()
2545{
2546 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2547
2548 maxJpegDim = 0;
2549 maxProcessedDim = 0;
2550 maxRawDim = 0;
2551
2552 // Figure out maximum jpeg, processed, and raw dimensions
2553 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2554 it != mStreamInfo.end(); it++) {
2555
2556 // Input stream doesn't have valid stream_type
2557 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2558 continue;
2559
2560 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2561 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2562 if (dimension > maxJpegDim)
2563 maxJpegDim = dimension;
2564 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2565 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2566 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2567 if (dimension > maxRawDim)
2568 maxRawDim = dimension;
2569 } else {
2570 if (dimension > maxProcessedDim)
2571 maxProcessedDim = dimension;
2572 }
2573 }
2574
2575 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2576 MAX_SIZES_CNT);
2577
2578 //Assume all jpeg dimensions are in processed dimensions.
2579 if (maxJpegDim > maxProcessedDim)
2580 maxProcessedDim = maxJpegDim;
2581 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2582 if (maxProcessedDim > maxRawDim) {
2583 maxRawDim = INT32_MAX;
2584
2585 for (size_t i = 0; i < count; i++) {
2586 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2587 gCamCapability[mCameraId]->raw_dim[i].height;
2588 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2589 maxRawDim = dimension;
2590 }
2591 }
2592
2593 //Find minimum durations for processed, jpeg, and raw
2594 for (size_t i = 0; i < count; i++) {
2595 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2596 gCamCapability[mCameraId]->raw_dim[i].height) {
2597 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2598 break;
2599 }
2600 }
2601 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2602 for (size_t i = 0; i < count; i++) {
2603 if (maxProcessedDim ==
2604 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2605 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2606 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2607 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2608 break;
2609 }
2610 }
2611}
2612
2613/*===========================================================================
2614 * FUNCTION : getMinFrameDuration
2615 *
2616 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2617 * and current request configuration.
2618 *
2619 * PARAMETERS : @request: requset sent by the frameworks
2620 *
2621 * RETURN : min farme duration for a particular request
2622 *
2623 *==========================================================================*/
2624int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2625{
2626 bool hasJpegStream = false;
2627 bool hasRawStream = false;
2628 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2629 const camera3_stream_t *stream = request->output_buffers[i].stream;
2630 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2631 hasJpegStream = true;
2632 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2633 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2634 stream->format == HAL_PIXEL_FORMAT_RAW16)
2635 hasRawStream = true;
2636 }
2637
2638 if (!hasJpegStream)
2639 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2640 else
2641 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2642}
2643
2644/*===========================================================================
2645 * FUNCTION : handleBuffersDuringFlushLock
2646 *
2647 * DESCRIPTION: Account for buffers returned from back-end during flush
2648 * This function is executed while mMutex is held by the caller.
2649 *
2650 * PARAMETERS :
2651 * @buffer: image buffer for the callback
2652 *
2653 * RETURN :
2654 *==========================================================================*/
2655void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2656{
2657 bool buffer_found = false;
2658 for (List<PendingBuffersInRequest>::iterator req =
2659 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2660 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2661 for (List<PendingBufferInfo>::iterator i =
2662 req->mPendingBufferList.begin();
2663 i != req->mPendingBufferList.end(); i++) {
2664 if (i->buffer == buffer->buffer) {
2665 mPendingBuffersMap.numPendingBufsAtFlush--;
2666 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2667 buffer->buffer, req->frame_number,
2668 mPendingBuffersMap.numPendingBufsAtFlush);
2669 buffer_found = true;
2670 break;
2671 }
2672 }
2673 if (buffer_found) {
2674 break;
2675 }
2676 }
2677 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2678 //signal the flush()
2679 LOGD("All buffers returned to HAL. Continue flush");
2680 pthread_cond_signal(&mBuffersCond);
2681 }
2682}
2683
2684
2685/*===========================================================================
2686 * FUNCTION : handlePendingReprocResults
2687 *
2688 * DESCRIPTION: check and notify on any pending reprocess results
2689 *
2690 * PARAMETERS :
2691 * @frame_number : Pending request frame number
2692 *
2693 * RETURN : int32_t type of status
2694 * NO_ERROR -- success
2695 * none-zero failure code
2696 *==========================================================================*/
2697int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2698{
2699 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2700 j != mPendingReprocessResultList.end(); j++) {
2701 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002702 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002703
2704 LOGD("Delayed reprocess notify %d",
2705 frame_number);
2706
2707 for (pendingRequestIterator k = mPendingRequestsList.begin();
2708 k != mPendingRequestsList.end(); k++) {
2709
2710 if (k->frame_number == j->frame_number) {
2711 LOGD("Found reprocess frame number %d in pending reprocess List "
2712 "Take it out!!",
2713 k->frame_number);
2714
2715 camera3_capture_result result;
2716 memset(&result, 0, sizeof(camera3_capture_result));
2717 result.frame_number = frame_number;
2718 result.num_output_buffers = 1;
2719 result.output_buffers = &j->buffer;
2720 result.input_buffer = k->input_buffer;
2721 result.result = k->settings;
2722 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002723 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002724
2725 erasePendingRequest(k);
2726 break;
2727 }
2728 }
2729 mPendingReprocessResultList.erase(j);
2730 break;
2731 }
2732 }
2733 return NO_ERROR;
2734}
2735
2736/*===========================================================================
2737 * FUNCTION : handleBatchMetadata
2738 *
2739 * DESCRIPTION: Handles metadata buffer callback in batch mode
2740 *
2741 * PARAMETERS : @metadata_buf: metadata buffer
2742 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2743 * the meta buf in this method
2744 *
2745 * RETURN :
2746 *
2747 *==========================================================================*/
2748void QCamera3HardwareInterface::handleBatchMetadata(
2749 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2750{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002751 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002752
2753 if (NULL == metadata_buf) {
2754 LOGE("metadata_buf is NULL");
2755 return;
2756 }
2757 /* In batch mode, the metdata will contain the frame number and timestamp of
2758 * the last frame in the batch. Eg: a batch containing buffers from request
2759 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2760 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2761 * multiple process_capture_results */
2762 metadata_buffer_t *metadata =
2763 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2764 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2765 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2766 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2767 uint32_t frame_number = 0, urgent_frame_number = 0;
2768 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2769 bool invalid_metadata = false;
2770 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2771 size_t loopCount = 1;
2772
2773 int32_t *p_frame_number_valid =
2774 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2775 uint32_t *p_frame_number =
2776 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2777 int64_t *p_capture_time =
2778 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2779 int32_t *p_urgent_frame_number_valid =
2780 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2781 uint32_t *p_urgent_frame_number =
2782 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2783
2784 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2785 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2786 (NULL == p_urgent_frame_number)) {
2787 LOGE("Invalid metadata");
2788 invalid_metadata = true;
2789 } else {
2790 frame_number_valid = *p_frame_number_valid;
2791 last_frame_number = *p_frame_number;
2792 last_frame_capture_time = *p_capture_time;
2793 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2794 last_urgent_frame_number = *p_urgent_frame_number;
2795 }
2796
2797 /* In batchmode, when no video buffers are requested, set_parms are sent
2798 * for every capture_request. The difference between consecutive urgent
2799 * frame numbers and frame numbers should be used to interpolate the
2800 * corresponding frame numbers and time stamps */
2801 pthread_mutex_lock(&mMutex);
2802 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002803 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2804 if(idx < 0) {
2805 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2806 last_urgent_frame_number);
2807 mState = ERROR;
2808 pthread_mutex_unlock(&mMutex);
2809 return;
2810 }
2811 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2813 first_urgent_frame_number;
2814
2815 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2816 urgent_frame_number_valid,
2817 first_urgent_frame_number, last_urgent_frame_number);
2818 }
2819
2820 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002821 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2822 if(idx < 0) {
2823 LOGE("Invalid frame number received: %d. Irrecoverable error",
2824 last_frame_number);
2825 mState = ERROR;
2826 pthread_mutex_unlock(&mMutex);
2827 return;
2828 }
2829 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 frameNumDiff = last_frame_number + 1 -
2831 first_frame_number;
2832 mPendingBatchMap.removeItem(last_frame_number);
2833
2834 LOGD("frm: valid: %d frm_num: %d - %d",
2835 frame_number_valid,
2836 first_frame_number, last_frame_number);
2837
2838 }
2839 pthread_mutex_unlock(&mMutex);
2840
2841 if (urgent_frame_number_valid || frame_number_valid) {
2842 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2843 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2844 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2845 urgentFrameNumDiff, last_urgent_frame_number);
2846 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2847 LOGE("frameNumDiff: %d frameNum: %d",
2848 frameNumDiff, last_frame_number);
2849 }
2850
2851 for (size_t i = 0; i < loopCount; i++) {
2852 /* handleMetadataWithLock is called even for invalid_metadata for
2853 * pipeline depth calculation */
2854 if (!invalid_metadata) {
2855 /* Infer frame number. Batch metadata contains frame number of the
2856 * last frame */
2857 if (urgent_frame_number_valid) {
2858 if (i < urgentFrameNumDiff) {
2859 urgent_frame_number =
2860 first_urgent_frame_number + i;
2861 LOGD("inferred urgent frame_number: %d",
2862 urgent_frame_number);
2863 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2864 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2865 } else {
2866 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2867 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2868 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2869 }
2870 }
2871
2872 /* Infer frame number. Batch metadata contains frame number of the
2873 * last frame */
2874 if (frame_number_valid) {
2875 if (i < frameNumDiff) {
2876 frame_number = first_frame_number + i;
2877 LOGD("inferred frame_number: %d", frame_number);
2878 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2879 CAM_INTF_META_FRAME_NUMBER, frame_number);
2880 } else {
2881 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2882 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2883 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2884 }
2885 }
2886
2887 if (last_frame_capture_time) {
2888 //Infer timestamp
2889 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002890 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002891 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002892 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002893 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2894 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2895 LOGD("batch capture_time: %lld, capture_time: %lld",
2896 last_frame_capture_time, capture_time);
2897 }
2898 }
2899 pthread_mutex_lock(&mMutex);
2900 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002901 false /* free_and_bufdone_meta_buf */,
2902 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 pthread_mutex_unlock(&mMutex);
2904 }
2905
2906 /* BufDone metadata buffer */
2907 if (free_and_bufdone_meta_buf) {
2908 mMetadataChannel->bufDone(metadata_buf);
2909 free(metadata_buf);
2910 }
2911}
2912
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2914 camera3_error_msg_code_t errorCode)
2915{
2916 camera3_notify_msg_t notify_msg;
2917 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2918 notify_msg.type = CAMERA3_MSG_ERROR;
2919 notify_msg.message.error.error_code = errorCode;
2920 notify_msg.message.error.error_stream = NULL;
2921 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002922 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002923
2924 return;
2925}
Thierry Strudel3d639192016-09-09 11:52:26 -07002926/*===========================================================================
2927 * FUNCTION : handleMetadataWithLock
2928 *
2929 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2930 *
2931 * PARAMETERS : @metadata_buf: metadata buffer
2932 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2933 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002934 * @firstMetadataInBatch: Boolean to indicate whether this is the
2935 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002936 *
2937 * RETURN :
2938 *
2939 *==========================================================================*/
2940void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2942 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002943{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002944 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002945 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2946 //during flush do not send metadata from this thread
2947 LOGD("not sending metadata during flush or when mState is error");
2948 if (free_and_bufdone_meta_buf) {
2949 mMetadataChannel->bufDone(metadata_buf);
2950 free(metadata_buf);
2951 }
2952 return;
2953 }
2954
2955 //not in flush
2956 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2957 int32_t frame_number_valid, urgent_frame_number_valid;
2958 uint32_t frame_number, urgent_frame_number;
2959 int64_t capture_time;
2960 nsecs_t currentSysTime;
2961
2962 int32_t *p_frame_number_valid =
2963 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2964 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2965 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2966 int32_t *p_urgent_frame_number_valid =
2967 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2968 uint32_t *p_urgent_frame_number =
2969 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2970 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2971 metadata) {
2972 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2973 *p_frame_number_valid, *p_frame_number);
2974 }
2975
2976 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2977 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2978 LOGE("Invalid metadata");
2979 if (free_and_bufdone_meta_buf) {
2980 mMetadataChannel->bufDone(metadata_buf);
2981 free(metadata_buf);
2982 }
2983 goto done_metadata;
2984 }
2985 frame_number_valid = *p_frame_number_valid;
2986 frame_number = *p_frame_number;
2987 capture_time = *p_capture_time;
2988 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2989 urgent_frame_number = *p_urgent_frame_number;
2990 currentSysTime = systemTime(CLOCK_MONOTONIC);
2991
2992 // Detect if buffers from any requests are overdue
2993 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2994 if ( (currentSysTime - req.timestamp) >
2995 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2996 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002997 assert(missed.stream->priv);
2998 if (missed.stream->priv) {
2999 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3000 assert(ch->mStreams[0]);
3001 if (ch->mStreams[0]) {
3002 LOGE("Cancel missing frame = %d, buffer = %p,"
3003 "stream type = %d, stream format = %d",
3004 req.frame_number, missed.buffer,
3005 ch->mStreams[0]->getMyType(), missed.stream->format);
3006 ch->timeoutFrame(req.frame_number);
3007 }
3008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 }
3010 }
3011 }
3012 //Partial result on process_capture_result for timestamp
3013 if (urgent_frame_number_valid) {
3014 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3015 urgent_frame_number, capture_time);
3016
3017 //Recieved an urgent Frame Number, handle it
3018 //using partial results
3019 for (pendingRequestIterator i =
3020 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3021 LOGD("Iterator Frame = %d urgent frame = %d",
3022 i->frame_number, urgent_frame_number);
3023
3024 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3025 (i->partial_result_cnt == 0)) {
3026 LOGE("Error: HAL missed urgent metadata for frame number %d",
3027 i->frame_number);
3028 }
3029
3030 if (i->frame_number == urgent_frame_number &&
3031 i->bUrgentReceived == 0) {
3032
3033 camera3_capture_result_t result;
3034 memset(&result, 0, sizeof(camera3_capture_result_t));
3035
3036 i->partial_result_cnt++;
3037 i->bUrgentReceived = 1;
3038 // Extract 3A metadata
3039 result.result =
3040 translateCbUrgentMetadataToResultMetadata(metadata);
3041 // Populate metadata result
3042 result.frame_number = urgent_frame_number;
3043 result.num_output_buffers = 0;
3044 result.output_buffers = NULL;
3045 result.partial_result = i->partial_result_cnt;
3046
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 LOGD("urgent frame_number = %u, capture_time = %lld",
3049 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003050 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3051 // Instant AEC settled for this frame.
3052 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3053 mInstantAECSettledFrameNumber = urgent_frame_number;
3054 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003055 free_camera_metadata((camera_metadata_t *)result.result);
3056 break;
3057 }
3058 }
3059 }
3060
3061 if (!frame_number_valid) {
3062 LOGD("Not a valid normal frame number, used as SOF only");
3063 if (free_and_bufdone_meta_buf) {
3064 mMetadataChannel->bufDone(metadata_buf);
3065 free(metadata_buf);
3066 }
3067 goto done_metadata;
3068 }
3069 LOGH("valid frame_number = %u, capture_time = %lld",
3070 frame_number, capture_time);
3071
3072 for (pendingRequestIterator i = mPendingRequestsList.begin();
3073 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3074 // Flush out all entries with less or equal frame numbers.
3075
3076 camera3_capture_result_t result;
3077 memset(&result, 0, sizeof(camera3_capture_result_t));
3078
3079 LOGD("frame_number in the list is %u", i->frame_number);
3080 i->partial_result_cnt++;
3081 result.partial_result = i->partial_result_cnt;
3082
3083 // Check whether any stream buffer corresponding to this is dropped or not
3084 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003085 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3086 if (p_cam_frame_drop ||
3087 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003088 /* Clear notify_msg structure */
3089 camera3_notify_msg_t notify_msg;
3090 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3091 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3092 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003093 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003094 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3095 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003096 if (p_cam_frame_drop) {
3097 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003098 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003099 // Got the stream ID for drop frame.
3100 dropFrame = true;
3101 break;
3102 }
3103 }
3104 } else {
3105 // This is instant AEC case.
3106 // For instant AEC drop the stream untill AEC is settled.
3107 dropFrame = true;
3108 }
3109 if (dropFrame) {
3110 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3111 if (p_cam_frame_drop) {
3112 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003113 LOGE("Start of reporting error frame#=%u, streamID=%u",
3114 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003115 } else {
3116 // For instant AEC, inform frame drop and frame number
3117 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3118 "AEC settled frame number = %u",
3119 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3120 }
3121 notify_msg.type = CAMERA3_MSG_ERROR;
3122 notify_msg.message.error.frame_number = i->frame_number;
3123 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3124 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003125 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003126 if (p_cam_frame_drop) {
3127 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003128 LOGE("End of reporting error frame#=%u, streamID=%u",
3129 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003130 } else {
3131 // For instant AEC, inform frame drop and frame number
3132 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3133 "AEC settled frame number = %u",
3134 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3135 }
3136 PendingFrameDropInfo PendingFrameDrop;
3137 PendingFrameDrop.frame_number=i->frame_number;
3138 PendingFrameDrop.stream_ID = streamID;
3139 // Add the Frame drop info to mPendingFrameDropList
3140 mPendingFrameDropList.push_back(PendingFrameDrop);
3141 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003142 }
3143 }
3144
3145 // Send empty metadata with already filled buffers for dropped metadata
3146 // and send valid metadata with already filled buffers for current metadata
3147 /* we could hit this case when we either
3148 * 1. have a pending reprocess request or
3149 * 2. miss a metadata buffer callback */
3150 if (i->frame_number < frame_number) {
3151 if (i->input_buffer) {
3152 /* this will be handled in handleInputBufferWithLock */
3153 i++;
3154 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003155 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003156
3157 mPendingLiveRequest--;
3158
3159 CameraMetadata dummyMetadata;
3160 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3161 result.result = dummyMetadata.release();
3162
3163 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003164 }
3165 } else {
3166 mPendingLiveRequest--;
3167 /* Clear notify_msg structure */
3168 camera3_notify_msg_t notify_msg;
3169 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3170
3171 // Send shutter notify to frameworks
3172 notify_msg.type = CAMERA3_MSG_SHUTTER;
3173 notify_msg.message.shutter.frame_number = i->frame_number;
3174 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003175 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003176
3177 i->timestamp = capture_time;
3178
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003179 /* Set the timestamp in display metadata so that clients aware of
3180 private_handle such as VT can use this un-modified timestamps.
3181 Camera framework is unaware of this timestamp and cannot change this */
3182 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3183
Thierry Strudel3d639192016-09-09 11:52:26 -07003184 // Find channel requiring metadata, meaning internal offline postprocess
3185 // is needed.
3186 //TODO: for now, we don't support two streams requiring metadata at the same time.
3187 // (because we are not making copies, and metadata buffer is not reference counted.
3188 bool internalPproc = false;
3189 for (pendingBufferIterator iter = i->buffers.begin();
3190 iter != i->buffers.end(); iter++) {
3191 if (iter->need_metadata) {
3192 internalPproc = true;
3193 QCamera3ProcessingChannel *channel =
3194 (QCamera3ProcessingChannel *)iter->stream->priv;
3195 channel->queueReprocMetadata(metadata_buf);
3196 break;
3197 }
3198 }
3199
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003200 for (auto itr = i->internalRequestList.begin();
3201 itr != i->internalRequestList.end(); itr++) {
3202 if (itr->need_metadata) {
3203 internalPproc = true;
3204 QCamera3ProcessingChannel *channel =
3205 (QCamera3ProcessingChannel *)itr->stream->priv;
3206 channel->queueReprocMetadata(metadata_buf);
3207 break;
3208 }
3209 }
3210
3211
Thierry Strudel3d639192016-09-09 11:52:26 -07003212 result.result = translateFromHalMetadata(metadata,
3213 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003214 i->capture_intent,
3215 /* DevCamDebug metadata translateFromHalMetadata function call*/
3216 i->DevCamDebug_meta_enable,
3217 /* DevCamDebug metadata end */
3218 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003219 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003220
3221 saveExifParams(metadata);
3222
3223 if (i->blob_request) {
3224 {
3225 //Dump tuning metadata if enabled and available
3226 char prop[PROPERTY_VALUE_MAX];
3227 memset(prop, 0, sizeof(prop));
3228 property_get("persist.camera.dumpmetadata", prop, "0");
3229 int32_t enabled = atoi(prop);
3230 if (enabled && metadata->is_tuning_params_valid) {
3231 dumpMetadataToFile(metadata->tuning_params,
3232 mMetaFrameCount,
3233 enabled,
3234 "Snapshot",
3235 frame_number);
3236 }
3237 }
3238 }
3239
3240 if (!internalPproc) {
3241 LOGD("couldn't find need_metadata for this metadata");
3242 // Return metadata buffer
3243 if (free_and_bufdone_meta_buf) {
3244 mMetadataChannel->bufDone(metadata_buf);
3245 free(metadata_buf);
3246 }
3247 }
3248 }
3249 if (!result.result) {
3250 LOGE("metadata is NULL");
3251 }
3252 result.frame_number = i->frame_number;
3253 result.input_buffer = i->input_buffer;
3254 result.num_output_buffers = 0;
3255 result.output_buffers = NULL;
3256 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3257 j != i->buffers.end(); j++) {
3258 if (j->buffer) {
3259 result.num_output_buffers++;
3260 }
3261 }
3262
3263 updateFpsInPreviewBuffer(metadata, i->frame_number);
3264
3265 if (result.num_output_buffers > 0) {
3266 camera3_stream_buffer_t *result_buffers =
3267 new camera3_stream_buffer_t[result.num_output_buffers];
3268 if (result_buffers != NULL) {
3269 size_t result_buffers_idx = 0;
3270 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3271 j != i->buffers.end(); j++) {
3272 if (j->buffer) {
3273 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3274 m != mPendingFrameDropList.end(); m++) {
3275 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3276 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3277 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3278 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3279 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3280 frame_number, streamID);
3281 m = mPendingFrameDropList.erase(m);
3282 break;
3283 }
3284 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003285 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003286 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3287 result_buffers[result_buffers_idx++] = *(j->buffer);
3288 free(j->buffer);
3289 j->buffer = NULL;
3290 }
3291 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003292
Thierry Strudel3d639192016-09-09 11:52:26 -07003293 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003294 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003295 LOGD("meta frame_number = %u, capture_time = %lld",
3296 result.frame_number, i->timestamp);
3297 free_camera_metadata((camera_metadata_t *)result.result);
3298 delete[] result_buffers;
3299 }else {
3300 LOGE("Fatal error: out of memory");
3301 }
3302 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003303 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003304 LOGD("meta frame_number = %u, capture_time = %lld",
3305 result.frame_number, i->timestamp);
3306 free_camera_metadata((camera_metadata_t *)result.result);
3307 }
3308
3309 i = erasePendingRequest(i);
3310
3311 if (!mPendingReprocessResultList.empty()) {
3312 handlePendingReprocResults(frame_number + 1);
3313 }
3314 }
3315
3316done_metadata:
3317 for (pendingRequestIterator i = mPendingRequestsList.begin();
3318 i != mPendingRequestsList.end() ;i++) {
3319 i->pipeline_depth++;
3320 }
3321 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3322 unblockRequestIfNecessary();
3323}
3324
3325/*===========================================================================
3326 * FUNCTION : hdrPlusPerfLock
3327 *
3328 * DESCRIPTION: perf lock for HDR+ using custom intent
3329 *
3330 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3331 *
3332 * RETURN : None
3333 *
3334 *==========================================================================*/
3335void QCamera3HardwareInterface::hdrPlusPerfLock(
3336 mm_camera_super_buf_t *metadata_buf)
3337{
3338 if (NULL == metadata_buf) {
3339 LOGE("metadata_buf is NULL");
3340 return;
3341 }
3342 metadata_buffer_t *metadata =
3343 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3344 int32_t *p_frame_number_valid =
3345 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3346 uint32_t *p_frame_number =
3347 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3348
3349 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3350 LOGE("%s: Invalid metadata", __func__);
3351 return;
3352 }
3353
3354 //acquire perf lock for 5 sec after the last HDR frame is captured
3355 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3356 if ((p_frame_number != NULL) &&
3357 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003358 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003359 }
3360 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003361}
3362
3363/*===========================================================================
3364 * FUNCTION : handleInputBufferWithLock
3365 *
3366 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3367 *
3368 * PARAMETERS : @frame_number: frame number of the input buffer
3369 *
3370 * RETURN :
3371 *
3372 *==========================================================================*/
3373void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3374{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003375 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003376 pendingRequestIterator i = mPendingRequestsList.begin();
3377 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3378 i++;
3379 }
3380 if (i != mPendingRequestsList.end() && i->input_buffer) {
3381 //found the right request
3382 if (!i->shutter_notified) {
3383 CameraMetadata settings;
3384 camera3_notify_msg_t notify_msg;
3385 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3386 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3387 if(i->settings) {
3388 settings = i->settings;
3389 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3390 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3391 } else {
3392 LOGE("No timestamp in input settings! Using current one.");
3393 }
3394 } else {
3395 LOGE("Input settings missing!");
3396 }
3397
3398 notify_msg.type = CAMERA3_MSG_SHUTTER;
3399 notify_msg.message.shutter.frame_number = frame_number;
3400 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003401 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 i->shutter_notified = true;
3403 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3404 i->frame_number, notify_msg.message.shutter.timestamp);
3405 }
3406
3407 if (i->input_buffer->release_fence != -1) {
3408 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3409 close(i->input_buffer->release_fence);
3410 if (rc != OK) {
3411 LOGE("input buffer sync wait failed %d", rc);
3412 }
3413 }
3414
3415 camera3_capture_result result;
3416 memset(&result, 0, sizeof(camera3_capture_result));
3417 result.frame_number = frame_number;
3418 result.result = i->settings;
3419 result.input_buffer = i->input_buffer;
3420 result.partial_result = PARTIAL_RESULT_COUNT;
3421
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003422 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003423 LOGD("Input request metadata and input buffer frame_number = %u",
3424 i->frame_number);
3425 i = erasePendingRequest(i);
3426 } else {
3427 LOGE("Could not find input request for frame number %d", frame_number);
3428 }
3429}
3430
3431/*===========================================================================
3432 * FUNCTION : handleBufferWithLock
3433 *
3434 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3435 *
3436 * PARAMETERS : @buffer: image buffer for the callback
3437 * @frame_number: frame number of the image buffer
3438 *
3439 * RETURN :
3440 *
3441 *==========================================================================*/
3442void QCamera3HardwareInterface::handleBufferWithLock(
3443 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3444{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003445 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003446
3447 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3448 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3449 }
3450
Thierry Strudel3d639192016-09-09 11:52:26 -07003451 /* Nothing to be done during error state */
3452 if ((ERROR == mState) || (DEINIT == mState)) {
3453 return;
3454 }
3455 if (mFlushPerf) {
3456 handleBuffersDuringFlushLock(buffer);
3457 return;
3458 }
3459 //not in flush
3460 // If the frame number doesn't exist in the pending request list,
3461 // directly send the buffer to the frameworks, and update pending buffers map
3462 // Otherwise, book-keep the buffer.
3463 pendingRequestIterator i = mPendingRequestsList.begin();
3464 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3465 i++;
3466 }
3467 if (i == mPendingRequestsList.end()) {
3468 // Verify all pending requests frame_numbers are greater
3469 for (pendingRequestIterator j = mPendingRequestsList.begin();
3470 j != mPendingRequestsList.end(); j++) {
3471 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3472 LOGW("Error: pending live frame number %d is smaller than %d",
3473 j->frame_number, frame_number);
3474 }
3475 }
3476 camera3_capture_result_t result;
3477 memset(&result, 0, sizeof(camera3_capture_result_t));
3478 result.result = NULL;
3479 result.frame_number = frame_number;
3480 result.num_output_buffers = 1;
3481 result.partial_result = 0;
3482 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3483 m != mPendingFrameDropList.end(); m++) {
3484 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3485 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3486 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3487 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3488 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3489 frame_number, streamID);
3490 m = mPendingFrameDropList.erase(m);
3491 break;
3492 }
3493 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003494 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 result.output_buffers = buffer;
3496 LOGH("result frame_number = %d, buffer = %p",
3497 frame_number, buffer->buffer);
3498
3499 mPendingBuffersMap.removeBuf(buffer->buffer);
3500
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003501 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003502 } else {
3503 if (i->input_buffer) {
3504 CameraMetadata settings;
3505 camera3_notify_msg_t notify_msg;
3506 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3507 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3508 if(i->settings) {
3509 settings = i->settings;
3510 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3511 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3512 } else {
3513 LOGW("No timestamp in input settings! Using current one.");
3514 }
3515 } else {
3516 LOGE("Input settings missing!");
3517 }
3518
3519 notify_msg.type = CAMERA3_MSG_SHUTTER;
3520 notify_msg.message.shutter.frame_number = frame_number;
3521 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3522
3523 if (i->input_buffer->release_fence != -1) {
3524 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3525 close(i->input_buffer->release_fence);
3526 if (rc != OK) {
3527 LOGE("input buffer sync wait failed %d", rc);
3528 }
3529 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003530 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003531 mPendingBuffersMap.removeBuf(buffer->buffer);
3532
Thierry Strudel04e026f2016-10-10 11:27:36 -07003533 camera3_capture_result result;
3534 memset(&result, 0, sizeof(camera3_capture_result));
3535 result.frame_number = frame_number;
3536 result.result = i->settings;
3537 result.input_buffer = i->input_buffer;
3538 result.num_output_buffers = 1;
3539 result.output_buffers = buffer;
3540 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003541
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003542 orchestrateNotify(&notify_msg);
3543 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003544 LOGD("Notify reprocess now %d!", frame_number);
3545 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003546 } else {
3547 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3548 j != i->buffers.end(); j++) {
3549 if (j->stream == buffer->stream) {
3550 if (j->buffer != NULL) {
3551 LOGE("Error: buffer is already set");
3552 } else {
3553 j->buffer = (camera3_stream_buffer_t *)malloc(
3554 sizeof(camera3_stream_buffer_t));
3555 *(j->buffer) = *buffer;
3556 LOGH("cache buffer %p at result frame_number %u",
3557 buffer->buffer, frame_number);
3558 }
3559 }
3560 }
3561 }
3562 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003563
3564 if (mPreviewStarted == false) {
3565 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3566 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3567 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3568 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3569 mPreviewStarted = true;
3570
3571 // Set power hint for preview
3572 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3573 }
3574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003575}
3576
3577/*===========================================================================
3578 * FUNCTION : unblockRequestIfNecessary
3579 *
3580 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3581 * that mMutex is held when this function is called.
3582 *
3583 * PARAMETERS :
3584 *
3585 * RETURN :
3586 *
3587 *==========================================================================*/
3588void QCamera3HardwareInterface::unblockRequestIfNecessary()
3589{
3590 // Unblock process_capture_request
3591 pthread_cond_signal(&mRequestCond);
3592}
3593
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003594/*===========================================================================
3595 * FUNCTION : isHdrSnapshotRequest
3596 *
3597 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3598 *
3599 * PARAMETERS : camera3 request structure
3600 *
3601 * RETURN : boolean decision variable
3602 *
3603 *==========================================================================*/
3604bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3605{
3606 if (request == NULL) {
3607 LOGE("Invalid request handle");
3608 assert(0);
3609 return false;
3610 }
3611
3612 if (!mForceHdrSnapshot) {
3613 CameraMetadata frame_settings;
3614 frame_settings = request->settings;
3615
3616 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3617 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3618 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3619 return false;
3620 }
3621 } else {
3622 return false;
3623 }
3624
3625 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3626 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3627 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3628 return false;
3629 }
3630 } else {
3631 return false;
3632 }
3633 }
3634
3635 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3636 if (request->output_buffers[i].stream->format
3637 == HAL_PIXEL_FORMAT_BLOB) {
3638 return true;
3639 }
3640 }
3641
3642 return false;
3643}
3644/*===========================================================================
3645 * FUNCTION : orchestrateRequest
3646 *
3647 * DESCRIPTION: Orchestrates a capture request from camera service
3648 *
3649 * PARAMETERS :
3650 * @request : request from framework to process
3651 *
3652 * RETURN : Error status codes
3653 *
3654 *==========================================================================*/
3655int32_t QCamera3HardwareInterface::orchestrateRequest(
3656 camera3_capture_request_t *request)
3657{
3658
3659 uint32_t originalFrameNumber = request->frame_number;
3660 uint32_t originalOutputCount = request->num_output_buffers;
3661 const camera_metadata_t *original_settings = request->settings;
3662 List<InternalRequest> internallyRequestedStreams;
3663 List<InternalRequest> emptyInternalList;
3664
3665 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3666 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3667 uint32_t internalFrameNumber;
3668 CameraMetadata modified_meta;
3669
3670
3671 /* Add Blob channel to list of internally requested streams */
3672 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3673 if (request->output_buffers[i].stream->format
3674 == HAL_PIXEL_FORMAT_BLOB) {
3675 InternalRequest streamRequested;
3676 streamRequested.meteringOnly = 1;
3677 streamRequested.need_metadata = 0;
3678 streamRequested.stream = request->output_buffers[i].stream;
3679 internallyRequestedStreams.push_back(streamRequested);
3680 }
3681 }
3682 request->num_output_buffers = 0;
3683 auto itr = internallyRequestedStreams.begin();
3684
3685 /* Modify setting to set compensation */
3686 modified_meta = request->settings;
3687 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3688 uint8_t aeLock = 1;
3689 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3690 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3691 camera_metadata_t *modified_settings = modified_meta.release();
3692 request->settings = modified_settings;
3693
3694 /* Capture Settling & -2x frame */
3695 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3696 request->frame_number = internalFrameNumber;
3697 processCaptureRequest(request, internallyRequestedStreams);
3698
3699 request->num_output_buffers = originalOutputCount;
3700 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3701 request->frame_number = internalFrameNumber;
3702 processCaptureRequest(request, emptyInternalList);
3703 request->num_output_buffers = 0;
3704
3705 modified_meta = modified_settings;
3706 expCompensation = 0;
3707 aeLock = 1;
3708 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3709 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3710 modified_settings = modified_meta.release();
3711 request->settings = modified_settings;
3712
3713 /* Capture Settling & 0X frame */
3714
3715 itr = internallyRequestedStreams.begin();
3716 if (itr == internallyRequestedStreams.end()) {
3717 LOGE("Error Internally Requested Stream list is empty");
3718 assert(0);
3719 } else {
3720 itr->need_metadata = 0;
3721 itr->meteringOnly = 1;
3722 }
3723
3724 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3725 request->frame_number = internalFrameNumber;
3726 processCaptureRequest(request, internallyRequestedStreams);
3727
3728 itr = internallyRequestedStreams.begin();
3729 if (itr == internallyRequestedStreams.end()) {
3730 ALOGE("Error Internally Requested Stream list is empty");
3731 assert(0);
3732 } else {
3733 itr->need_metadata = 1;
3734 itr->meteringOnly = 0;
3735 }
3736
3737 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3738 request->frame_number = internalFrameNumber;
3739 processCaptureRequest(request, internallyRequestedStreams);
3740
3741 /* Capture 2X frame*/
3742 modified_meta = modified_settings;
3743 expCompensation = GB_HDR_2X_STEP_EV;
3744 aeLock = 1;
3745 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3746 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3747 modified_settings = modified_meta.release();
3748 request->settings = modified_settings;
3749
3750 itr = internallyRequestedStreams.begin();
3751 if (itr == internallyRequestedStreams.end()) {
3752 ALOGE("Error Internally Requested Stream list is empty");
3753 assert(0);
3754 } else {
3755 itr->need_metadata = 0;
3756 itr->meteringOnly = 1;
3757 }
3758 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3759 request->frame_number = internalFrameNumber;
3760 processCaptureRequest(request, internallyRequestedStreams);
3761
3762 itr = internallyRequestedStreams.begin();
3763 if (itr == internallyRequestedStreams.end()) {
3764 ALOGE("Error Internally Requested Stream list is empty");
3765 assert(0);
3766 } else {
3767 itr->need_metadata = 1;
3768 itr->meteringOnly = 0;
3769 }
3770
3771 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3772 request->frame_number = internalFrameNumber;
3773 processCaptureRequest(request, internallyRequestedStreams);
3774
3775
3776 /* Capture 2X on original streaming config*/
3777 internallyRequestedStreams.clear();
3778
3779 /* Restore original settings pointer */
3780 request->settings = original_settings;
3781 } else {
3782 uint32_t internalFrameNumber;
3783 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3784 request->frame_number = internalFrameNumber;
3785 return processCaptureRequest(request, internallyRequestedStreams);
3786 }
3787
3788 return NO_ERROR;
3789}
3790
3791/*===========================================================================
3792 * FUNCTION : orchestrateResult
3793 *
3794 * DESCRIPTION: Orchestrates a capture result to camera service
3795 *
3796 * PARAMETERS :
3797 * @request : request from framework to process
3798 *
3799 * RETURN :
3800 *
3801 *==========================================================================*/
3802void QCamera3HardwareInterface::orchestrateResult(
3803 camera3_capture_result_t *result)
3804{
3805 uint32_t frameworkFrameNumber;
3806 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3807 frameworkFrameNumber);
3808 if (rc != NO_ERROR) {
3809 LOGE("Cannot find translated frameworkFrameNumber");
3810 assert(0);
3811 } else {
3812 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3813 LOGD("CAM_DEBUG Internal Request drop the result");
3814 } else {
3815 result->frame_number = frameworkFrameNumber;
3816 mCallbackOps->process_capture_result(mCallbackOps, result);
3817 }
3818 }
3819}
3820
3821/*===========================================================================
3822 * FUNCTION : orchestrateNotify
3823 *
3824 * DESCRIPTION: Orchestrates a notify to camera service
3825 *
3826 * PARAMETERS :
3827 * @request : request from framework to process
3828 *
3829 * RETURN :
3830 *
3831 *==========================================================================*/
3832void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3833{
3834 uint32_t frameworkFrameNumber;
3835 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3836 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3837 frameworkFrameNumber);
3838 if (rc != NO_ERROR) {
3839 LOGE("Cannot find translated frameworkFrameNumber");
3840 assert(0);
3841 } else {
3842 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3843 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3844 } else {
3845 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3846 mCallbackOps->notify(mCallbackOps, notify_msg);
3847 }
3848 }
3849}
3850
3851/*===========================================================================
3852 * FUNCTION : FrameNumberRegistry
3853 *
3854 * DESCRIPTION: Constructor
3855 *
3856 * PARAMETERS :
3857 *
3858 * RETURN :
3859 *
3860 *==========================================================================*/
3861FrameNumberRegistry::FrameNumberRegistry()
3862{
3863 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3864}
3865
3866/*===========================================================================
3867 * FUNCTION : ~FrameNumberRegistry
3868 *
3869 * DESCRIPTION: Destructor
3870 *
3871 * PARAMETERS :
3872 *
3873 * RETURN :
3874 *
3875 *==========================================================================*/
3876FrameNumberRegistry::~FrameNumberRegistry()
3877{
3878}
3879
3880/*===========================================================================
3881 * FUNCTION : PurgeOldEntriesLocked
3882 *
3883 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3884 *
3885 * PARAMETERS :
3886 *
3887 * RETURN : NONE
3888 *
3889 *==========================================================================*/
3890void FrameNumberRegistry::purgeOldEntriesLocked()
3891{
3892 while (_register.begin() != _register.end()) {
3893 auto itr = _register.begin();
3894 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3895 _register.erase(itr);
3896 } else {
3897 return;
3898 }
3899 }
3900}
3901
3902/*===========================================================================
3903 * FUNCTION : allocStoreInternalFrameNumber
3904 *
3905 * DESCRIPTION: Method to note down a framework request and associate a new
3906 * internal request number against it
3907 *
3908 * PARAMETERS :
3909 * @fFrameNumber: Identifier given by framework
3910 * @internalFN : Output parameter which will have the newly generated internal
3911 * entry
3912 *
3913 * RETURN : Error code
3914 *
3915 *==========================================================================*/
3916int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3917 uint32_t &internalFrameNumber)
3918{
3919 Mutex::Autolock lock(mRegistryLock);
3920 internalFrameNumber = _nextFreeInternalNumber++;
3921 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3922 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3923 purgeOldEntriesLocked();
3924 return NO_ERROR;
3925}
3926
3927/*===========================================================================
3928 * FUNCTION : generateStoreInternalFrameNumber
3929 *
3930 * DESCRIPTION: Method to associate a new internal request number independent
3931 * of any associate with framework requests
3932 *
3933 * PARAMETERS :
3934 * @internalFrame#: Output parameter which will have the newly generated internal
3935 *
3936 *
3937 * RETURN : Error code
3938 *
3939 *==========================================================================*/
3940int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3941{
3942 Mutex::Autolock lock(mRegistryLock);
3943 internalFrameNumber = _nextFreeInternalNumber++;
3944 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3945 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3946 purgeOldEntriesLocked();
3947 return NO_ERROR;
3948}
3949
3950/*===========================================================================
3951 * FUNCTION : getFrameworkFrameNumber
3952 *
3953 * DESCRIPTION: Method to query the framework framenumber given an internal #
3954 *
3955 * PARAMETERS :
3956 * @internalFrame#: Internal reference
3957 * @frameworkframenumber: Output parameter holding framework frame entry
3958 *
3959 * RETURN : Error code
3960 *
3961 *==========================================================================*/
3962int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3963 uint32_t &frameworkFrameNumber)
3964{
3965 Mutex::Autolock lock(mRegistryLock);
3966 auto itr = _register.find(internalFrameNumber);
3967 if (itr == _register.end()) {
3968 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3969 return -ENOENT;
3970 }
3971
3972 frameworkFrameNumber = itr->second;
3973 purgeOldEntriesLocked();
3974 return NO_ERROR;
3975}
Thierry Strudel3d639192016-09-09 11:52:26 -07003976
3977/*===========================================================================
3978 * FUNCTION : processCaptureRequest
3979 *
3980 * DESCRIPTION: process a capture request from camera service
3981 *
3982 * PARAMETERS :
3983 * @request : request from framework to process
3984 *
3985 * RETURN :
3986 *
3987 *==========================================================================*/
3988int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003989 camera3_capture_request_t *request,
3990 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003991{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003992 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003993 int rc = NO_ERROR;
3994 int32_t request_id;
3995 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003996 bool isVidBufRequested = false;
3997 camera3_stream_buffer_t *pInputBuffer = NULL;
3998
3999 pthread_mutex_lock(&mMutex);
4000
4001 // Validate current state
4002 switch (mState) {
4003 case CONFIGURED:
4004 case STARTED:
4005 /* valid state */
4006 break;
4007
4008 case ERROR:
4009 pthread_mutex_unlock(&mMutex);
4010 handleCameraDeviceError();
4011 return -ENODEV;
4012
4013 default:
4014 LOGE("Invalid state %d", mState);
4015 pthread_mutex_unlock(&mMutex);
4016 return -ENODEV;
4017 }
4018
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004019 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004020 if (rc != NO_ERROR) {
4021 LOGE("incoming request is not valid");
4022 pthread_mutex_unlock(&mMutex);
4023 return rc;
4024 }
4025
4026 meta = request->settings;
4027
4028 // For first capture request, send capture intent, and
4029 // stream on all streams
4030 if (mState == CONFIGURED) {
4031 // send an unconfigure to the backend so that the isp
4032 // resources are deallocated
4033 if (!mFirstConfiguration) {
4034 cam_stream_size_info_t stream_config_info;
4035 int32_t hal_version = CAM_HAL_V3;
4036 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4037 stream_config_info.buffer_info.min_buffers =
4038 MIN_INFLIGHT_REQUESTS;
4039 stream_config_info.buffer_info.max_buffers =
4040 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4041 clear_metadata_buffer(mParameters);
4042 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4043 CAM_INTF_PARM_HAL_VERSION, hal_version);
4044 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4045 CAM_INTF_META_STREAM_INFO, stream_config_info);
4046 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4047 mParameters);
4048 if (rc < 0) {
4049 LOGE("set_parms for unconfigure failed");
4050 pthread_mutex_unlock(&mMutex);
4051 return rc;
4052 }
4053 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004054 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004055 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004056 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004057 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004058 property_get("persist.camera.is_type", is_type_value, "4");
4059 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4060 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4061 property_get("persist.camera.is_type_preview", is_type_value, "4");
4062 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4063 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004064
4065 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4066 int32_t hal_version = CAM_HAL_V3;
4067 uint8_t captureIntent =
4068 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4069 mCaptureIntent = captureIntent;
4070 clear_metadata_buffer(mParameters);
4071 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4072 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4073 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004074 if (mFirstConfiguration) {
4075 // configure instant AEC
4076 // Instant AEC is a session based parameter and it is needed only
4077 // once per complete session after open camera.
4078 // i.e. This is set only once for the first capture request, after open camera.
4079 setInstantAEC(meta);
4080 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004081 uint8_t fwkVideoStabMode=0;
4082 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4083 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4084 }
4085
4086 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4087 // turn it on for video/preview
4088 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4089 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004090 int32_t vsMode;
4091 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4093 rc = BAD_VALUE;
4094 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004095 LOGD("setEis %d", setEis);
4096 bool eis3Supported = false;
4097 size_t count = IS_TYPE_MAX;
4098 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4099 for (size_t i = 0; i < count; i++) {
4100 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4101 eis3Supported = true;
4102 break;
4103 }
4104 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004105
4106 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004107 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4109 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004110 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4111 is_type = isTypePreview;
4112 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4113 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4114 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004115 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004116 } else {
4117 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004119 } else {
4120 is_type = IS_TYPE_NONE;
4121 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004123 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004124 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4125 }
4126 }
4127
4128 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4129 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4130
4131 int32_t tintless_value = 1;
4132 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4133 CAM_INTF_PARM_TINTLESS, tintless_value);
4134 //Disable CDS for HFR mode or if DIS/EIS is on.
4135 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4136 //after every configure_stream
4137 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4138 (m_bIsVideo)) {
4139 int32_t cds = CAM_CDS_MODE_OFF;
4140 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4141 CAM_INTF_PARM_CDS_MODE, cds))
4142 LOGE("Failed to disable CDS for HFR mode");
4143
4144 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004145
4146 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4147 uint8_t* use_av_timer = NULL;
4148
4149 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004150 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004151 use_av_timer = &m_debug_avtimer;
4152 }
4153 else{
4154 use_av_timer =
4155 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004156 if (use_av_timer) {
4157 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4158 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004159 }
4160
4161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4162 rc = BAD_VALUE;
4163 }
4164 }
4165
Thierry Strudel3d639192016-09-09 11:52:26 -07004166 setMobicat();
4167
4168 /* Set fps and hfr mode while sending meta stream info so that sensor
4169 * can configure appropriate streaming mode */
4170 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004171 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4172 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004173 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4174 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004175 if (rc == NO_ERROR) {
4176 int32_t max_fps =
4177 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004178 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004179 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4180 }
4181 /* For HFR, more buffers are dequeued upfront to improve the performance */
4182 if (mBatchSize) {
4183 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4184 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4185 }
4186 }
4187 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004188 LOGE("setHalFpsRange failed");
4189 }
4190 }
4191 if (meta.exists(ANDROID_CONTROL_MODE)) {
4192 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4193 rc = extractSceneMode(meta, metaMode, mParameters);
4194 if (rc != NO_ERROR) {
4195 LOGE("extractSceneMode failed");
4196 }
4197 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004198 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004199
Thierry Strudel04e026f2016-10-10 11:27:36 -07004200 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4201 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4202 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4203 rc = setVideoHdrMode(mParameters, vhdr);
4204 if (rc != NO_ERROR) {
4205 LOGE("setVideoHDR is failed");
4206 }
4207 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004208
Thierry Strudel3d639192016-09-09 11:52:26 -07004209 //TODO: validate the arguments, HSV scenemode should have only the
4210 //advertised fps ranges
4211
4212 /*set the capture intent, hal version, tintless, stream info,
4213 *and disenable parameters to the backend*/
4214 LOGD("set_parms META_STREAM_INFO " );
4215 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4216 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004217 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004218 mStreamConfigInfo.type[i],
4219 mStreamConfigInfo.stream_sizes[i].width,
4220 mStreamConfigInfo.stream_sizes[i].height,
4221 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004222 mStreamConfigInfo.format[i],
4223 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004224 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225
Thierry Strudel3d639192016-09-09 11:52:26 -07004226 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4227 mParameters);
4228 if (rc < 0) {
4229 LOGE("set_parms failed for hal version, stream info");
4230 }
4231
4232 cam_dimension_t sensor_dim;
4233 memset(&sensor_dim, 0, sizeof(sensor_dim));
4234 rc = getSensorOutputSize(sensor_dim);
4235 if (rc != NO_ERROR) {
4236 LOGE("Failed to get sensor output size");
4237 pthread_mutex_unlock(&mMutex);
4238 goto error_exit;
4239 }
4240
4241 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4242 gCamCapability[mCameraId]->active_array_size.height,
4243 sensor_dim.width, sensor_dim.height);
4244
4245 /* Set batchmode before initializing channel. Since registerBuffer
4246 * internally initializes some of the channels, better set batchmode
4247 * even before first register buffer */
4248 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4249 it != mStreamInfo.end(); it++) {
4250 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4251 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4252 && mBatchSize) {
4253 rc = channel->setBatchSize(mBatchSize);
4254 //Disable per frame map unmap for HFR/batchmode case
4255 rc |= channel->setPerFrameMapUnmap(false);
4256 if (NO_ERROR != rc) {
4257 LOGE("Channel init failed %d", rc);
4258 pthread_mutex_unlock(&mMutex);
4259 goto error_exit;
4260 }
4261 }
4262 }
4263
4264 //First initialize all streams
4265 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4266 it != mStreamInfo.end(); it++) {
4267 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4268 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4269 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004270 setEis) {
4271 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4272 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4273 is_type = mStreamConfigInfo.is_type[i];
4274 break;
4275 }
4276 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004277 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004278 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004279 rc = channel->initialize(IS_TYPE_NONE);
4280 }
4281 if (NO_ERROR != rc) {
4282 LOGE("Channel initialization failed %d", rc);
4283 pthread_mutex_unlock(&mMutex);
4284 goto error_exit;
4285 }
4286 }
4287
4288 if (mRawDumpChannel) {
4289 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4290 if (rc != NO_ERROR) {
4291 LOGE("Error: Raw Dump Channel init failed");
4292 pthread_mutex_unlock(&mMutex);
4293 goto error_exit;
4294 }
4295 }
4296 if (mSupportChannel) {
4297 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4298 if (rc < 0) {
4299 LOGE("Support channel initialization failed");
4300 pthread_mutex_unlock(&mMutex);
4301 goto error_exit;
4302 }
4303 }
4304 if (mAnalysisChannel) {
4305 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4306 if (rc < 0) {
4307 LOGE("Analysis channel initialization failed");
4308 pthread_mutex_unlock(&mMutex);
4309 goto error_exit;
4310 }
4311 }
4312 if (mDummyBatchChannel) {
4313 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4314 if (rc < 0) {
4315 LOGE("mDummyBatchChannel setBatchSize failed");
4316 pthread_mutex_unlock(&mMutex);
4317 goto error_exit;
4318 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004319 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004320 if (rc < 0) {
4321 LOGE("mDummyBatchChannel initialization failed");
4322 pthread_mutex_unlock(&mMutex);
4323 goto error_exit;
4324 }
4325 }
4326
4327 // Set bundle info
4328 rc = setBundleInfo();
4329 if (rc < 0) {
4330 LOGE("setBundleInfo failed %d", rc);
4331 pthread_mutex_unlock(&mMutex);
4332 goto error_exit;
4333 }
4334
4335 //update settings from app here
4336 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4337 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4338 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4339 }
4340 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4341 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4342 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4343 }
4344 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4345 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4346 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4347
4348 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4349 (mLinkedCameraId != mCameraId) ) {
4350 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4351 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004352 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004353 goto error_exit;
4354 }
4355 }
4356
4357 // add bundle related cameras
4358 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4359 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004360 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4361 &m_pDualCamCmdPtr->bundle_info;
4362 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004363 if (mIsDeviceLinked)
4364 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4365 else
4366 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4367
4368 pthread_mutex_lock(&gCamLock);
4369
4370 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4371 LOGE("Dualcam: Invalid Session Id ");
4372 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004373 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004374 goto error_exit;
4375 }
4376
4377 if (mIsMainCamera == 1) {
4378 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4379 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004380 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004381 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004382 // related session id should be session id of linked session
4383 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4384 } else {
4385 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4386 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004387 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004388 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004389 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4390 }
4391 pthread_mutex_unlock(&gCamLock);
4392
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004393 rc = mCameraHandle->ops->set_dual_cam_cmd(
4394 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004395 if (rc < 0) {
4396 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004397 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004398 goto error_exit;
4399 }
4400 }
4401
4402 //Then start them.
4403 LOGH("Start META Channel");
4404 rc = mMetadataChannel->start();
4405 if (rc < 0) {
4406 LOGE("META channel start failed");
4407 pthread_mutex_unlock(&mMutex);
4408 goto error_exit;
4409 }
4410
4411 if (mAnalysisChannel) {
4412 rc = mAnalysisChannel->start();
4413 if (rc < 0) {
4414 LOGE("Analysis channel start failed");
4415 mMetadataChannel->stop();
4416 pthread_mutex_unlock(&mMutex);
4417 goto error_exit;
4418 }
4419 }
4420
4421 if (mSupportChannel) {
4422 rc = mSupportChannel->start();
4423 if (rc < 0) {
4424 LOGE("Support channel start failed");
4425 mMetadataChannel->stop();
4426 /* Although support and analysis are mutually exclusive today
4427 adding it in anycase for future proofing */
4428 if (mAnalysisChannel) {
4429 mAnalysisChannel->stop();
4430 }
4431 pthread_mutex_unlock(&mMutex);
4432 goto error_exit;
4433 }
4434 }
4435 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4436 it != mStreamInfo.end(); it++) {
4437 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4438 LOGH("Start Processing Channel mask=%d",
4439 channel->getStreamTypeMask());
4440 rc = channel->start();
4441 if (rc < 0) {
4442 LOGE("channel start failed");
4443 pthread_mutex_unlock(&mMutex);
4444 goto error_exit;
4445 }
4446 }
4447
4448 if (mRawDumpChannel) {
4449 LOGD("Starting raw dump stream");
4450 rc = mRawDumpChannel->start();
4451 if (rc != NO_ERROR) {
4452 LOGE("Error Starting Raw Dump Channel");
4453 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4454 it != mStreamInfo.end(); it++) {
4455 QCamera3Channel *channel =
4456 (QCamera3Channel *)(*it)->stream->priv;
4457 LOGH("Stopping Processing Channel mask=%d",
4458 channel->getStreamTypeMask());
4459 channel->stop();
4460 }
4461 if (mSupportChannel)
4462 mSupportChannel->stop();
4463 if (mAnalysisChannel) {
4464 mAnalysisChannel->stop();
4465 }
4466 mMetadataChannel->stop();
4467 pthread_mutex_unlock(&mMutex);
4468 goto error_exit;
4469 }
4470 }
4471
4472 if (mChannelHandle) {
4473
4474 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4475 mChannelHandle);
4476 if (rc != NO_ERROR) {
4477 LOGE("start_channel failed %d", rc);
4478 pthread_mutex_unlock(&mMutex);
4479 goto error_exit;
4480 }
4481 }
4482
4483 goto no_error;
4484error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004485 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004486 return rc;
4487no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004488 mWokenUpByDaemon = false;
4489 mPendingLiveRequest = 0;
4490 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004491 }
4492
4493 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004494 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004495
4496 if (mFlushPerf) {
4497 //we cannot accept any requests during flush
4498 LOGE("process_capture_request cannot proceed during flush");
4499 pthread_mutex_unlock(&mMutex);
4500 return NO_ERROR; //should return an error
4501 }
4502
4503 if (meta.exists(ANDROID_REQUEST_ID)) {
4504 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4505 mCurrentRequestId = request_id;
4506 LOGD("Received request with id: %d", request_id);
4507 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4508 LOGE("Unable to find request id field, \
4509 & no previous id available");
4510 pthread_mutex_unlock(&mMutex);
4511 return NAME_NOT_FOUND;
4512 } else {
4513 LOGD("Re-using old request id");
4514 request_id = mCurrentRequestId;
4515 }
4516
4517 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4518 request->num_output_buffers,
4519 request->input_buffer,
4520 frameNumber);
4521 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004522 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004523 int blob_request = 0;
4524 uint32_t snapshotStreamId = 0;
4525 for (size_t i = 0; i < request->num_output_buffers; i++) {
4526 const camera3_stream_buffer_t& output = request->output_buffers[i];
4527 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4528
4529 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004530 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004531 blob_request = 1;
4532 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4533 }
4534
4535 if (output.acquire_fence != -1) {
4536 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4537 close(output.acquire_fence);
4538 if (rc != OK) {
4539 LOGE("sync wait failed %d", rc);
4540 pthread_mutex_unlock(&mMutex);
4541 return rc;
4542 }
4543 }
4544
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004545 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004546 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004547
4548 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4549 isVidBufRequested = true;
4550 }
4551 }
4552
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004553 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4554 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4555 itr++) {
4556 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4557 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4558 channel->getStreamID(channel->getStreamTypeMask());
4559
4560 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4561 isVidBufRequested = true;
4562 }
4563 }
4564
Thierry Strudel3d639192016-09-09 11:52:26 -07004565 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004566 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004567 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 }
4569 if (blob_request && mRawDumpChannel) {
4570 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004571 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004572 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004573 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004574 }
4575
4576 if(request->input_buffer == NULL) {
4577 /* Parse the settings:
4578 * - For every request in NORMAL MODE
4579 * - For every request in HFR mode during preview only case
4580 * - For first request of every batch in HFR mode during video
4581 * recording. In batchmode the same settings except frame number is
4582 * repeated in each request of the batch.
4583 */
4584 if (!mBatchSize ||
4585 (mBatchSize && !isVidBufRequested) ||
4586 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004587 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004588 if (rc < 0) {
4589 LOGE("fail to set frame parameters");
4590 pthread_mutex_unlock(&mMutex);
4591 return rc;
4592 }
4593 }
4594 /* For batchMode HFR, setFrameParameters is not called for every
4595 * request. But only frame number of the latest request is parsed.
4596 * Keep track of first and last frame numbers in a batch so that
4597 * metadata for the frame numbers of batch can be duplicated in
4598 * handleBatchMetadta */
4599 if (mBatchSize) {
4600 if (!mToBeQueuedVidBufs) {
4601 //start of the batch
4602 mFirstFrameNumberInBatch = request->frame_number;
4603 }
4604 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4605 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4606 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004607 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004608 return BAD_VALUE;
4609 }
4610 }
4611 if (mNeedSensorRestart) {
4612 /* Unlock the mutex as restartSensor waits on the channels to be
4613 * stopped, which in turn calls stream callback functions -
4614 * handleBufferWithLock and handleMetadataWithLock */
4615 pthread_mutex_unlock(&mMutex);
4616 rc = dynamicUpdateMetaStreamInfo();
4617 if (rc != NO_ERROR) {
4618 LOGE("Restarting the sensor failed");
4619 return BAD_VALUE;
4620 }
4621 mNeedSensorRestart = false;
4622 pthread_mutex_lock(&mMutex);
4623 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004624 if(mResetInstantAEC) {
4625 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4626 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4627 mResetInstantAEC = false;
4628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004629 } else {
4630
4631 if (request->input_buffer->acquire_fence != -1) {
4632 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4633 close(request->input_buffer->acquire_fence);
4634 if (rc != OK) {
4635 LOGE("input buffer sync wait failed %d", rc);
4636 pthread_mutex_unlock(&mMutex);
4637 return rc;
4638 }
4639 }
4640 }
4641
4642 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4643 mLastCustIntentFrmNum = frameNumber;
4644 }
4645 /* Update pending request list and pending buffers map */
4646 PendingRequestInfo pendingRequest;
4647 pendingRequestIterator latestRequest;
4648 pendingRequest.frame_number = frameNumber;
4649 pendingRequest.num_buffers = request->num_output_buffers;
4650 pendingRequest.request_id = request_id;
4651 pendingRequest.blob_request = blob_request;
4652 pendingRequest.timestamp = 0;
4653 pendingRequest.bUrgentReceived = 0;
4654 if (request->input_buffer) {
4655 pendingRequest.input_buffer =
4656 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4657 *(pendingRequest.input_buffer) = *(request->input_buffer);
4658 pInputBuffer = pendingRequest.input_buffer;
4659 } else {
4660 pendingRequest.input_buffer = NULL;
4661 pInputBuffer = NULL;
4662 }
4663
4664 pendingRequest.pipeline_depth = 0;
4665 pendingRequest.partial_result_cnt = 0;
4666 extractJpegMetadata(mCurJpegMeta, request);
4667 pendingRequest.jpegMetadata = mCurJpegMeta;
4668 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4669 pendingRequest.shutter_notified = false;
4670
4671 //extract capture intent
4672 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4673 mCaptureIntent =
4674 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4675 }
4676 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004677 /* DevCamDebug metadata processCaptureRequest */
4678 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4679 mDevCamDebugMetaEnable =
4680 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4681 }
4682 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4683 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004684
4685 //extract CAC info
4686 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4687 mCacMode =
4688 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4689 }
4690 pendingRequest.fwkCacMode = mCacMode;
4691
4692 PendingBuffersInRequest bufsForCurRequest;
4693 bufsForCurRequest.frame_number = frameNumber;
4694 // Mark current timestamp for the new request
4695 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4696
4697 for (size_t i = 0; i < request->num_output_buffers; i++) {
4698 RequestedBufferInfo requestedBuf;
4699 memset(&requestedBuf, 0, sizeof(requestedBuf));
4700 requestedBuf.stream = request->output_buffers[i].stream;
4701 requestedBuf.buffer = NULL;
4702 pendingRequest.buffers.push_back(requestedBuf);
4703
4704 // Add to buffer handle the pending buffers list
4705 PendingBufferInfo bufferInfo;
4706 bufferInfo.buffer = request->output_buffers[i].buffer;
4707 bufferInfo.stream = request->output_buffers[i].stream;
4708 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4709 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4710 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4711 frameNumber, bufferInfo.buffer,
4712 channel->getStreamTypeMask(), bufferInfo.stream->format);
4713 }
4714 // Add this request packet into mPendingBuffersMap
4715 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4716 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4717 mPendingBuffersMap.get_num_overall_buffers());
4718
4719 latestRequest = mPendingRequestsList.insert(
4720 mPendingRequestsList.end(), pendingRequest);
4721 if(mFlush) {
4722 LOGI("mFlush is true");
4723 pthread_mutex_unlock(&mMutex);
4724 return NO_ERROR;
4725 }
4726
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004727 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004728 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004729 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004730
4731 if(request->input_buffer != NULL){
4732 LOGD("Input request, frame_number %d", frameNumber);
4733 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4734 if (NO_ERROR != rc) {
4735 LOGE("fail to set reproc parameters");
4736 pthread_mutex_unlock(&mMutex);
4737 return rc;
4738 }
4739 }
4740
4741 // Call request on other streams
4742 uint32_t streams_need_metadata = 0;
4743 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4744 for (size_t i = 0; i < request->num_output_buffers; i++) {
4745 const camera3_stream_buffer_t& output = request->output_buffers[i];
4746 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4747
4748 if (channel == NULL) {
4749 LOGW("invalid channel pointer for stream");
4750 continue;
4751 }
4752
4753 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4754 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4755 output.buffer, request->input_buffer, frameNumber);
4756 if(request->input_buffer != NULL){
4757 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004758 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 if (rc < 0) {
4760 LOGE("Fail to request on picture channel");
4761 pthread_mutex_unlock(&mMutex);
4762 return rc;
4763 }
4764 } else {
4765 LOGD("snapshot request with buffer %p, frame_number %d",
4766 output.buffer, frameNumber);
4767 if (!request->settings) {
4768 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004769 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004770 } else {
4771 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004772 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 }
4774 if (rc < 0) {
4775 LOGE("Fail to request on picture channel");
4776 pthread_mutex_unlock(&mMutex);
4777 return rc;
4778 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004779
4780 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4781 uint32_t j = 0;
4782 for (j = 0; j < streamsArray.num_streams; j++) {
4783 if (streamsArray.stream_request[j].streamID == streamId) {
4784 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4785 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4786 else
4787 streamsArray.stream_request[j].buf_index = indexUsed;
4788 break;
4789 }
4790 }
4791 if (j == streamsArray.num_streams) {
4792 LOGE("Did not find matching stream to update index");
4793 assert(0);
4794 }
4795
Thierry Strudel3d639192016-09-09 11:52:26 -07004796 pendingBufferIter->need_metadata = true;
4797 streams_need_metadata++;
4798 }
4799 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4800 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004801 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4802 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004803 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4804 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 if (rc < 0) {
4806 LOGE("Fail to request on YUV channel");
4807 pthread_mutex_unlock(&mMutex);
4808 return rc;
4809 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004810
4811 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4812 uint32_t j = 0;
4813 for (j = 0; j < streamsArray.num_streams; j++) {
4814 if (streamsArray.stream_request[j].streamID == streamId) {
4815 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4816 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4817 else
4818 streamsArray.stream_request[j].buf_index = indexUsed;
4819 break;
4820 }
4821 }
4822 if (j == streamsArray.num_streams) {
4823 LOGE("Did not find matching stream to update index");
4824 assert(0);
4825 }
4826
Thierry Strudel3d639192016-09-09 11:52:26 -07004827 pendingBufferIter->need_metadata = needMetadata;
4828 if (needMetadata)
4829 streams_need_metadata += 1;
4830 LOGD("calling YUV channel request, need_metadata is %d",
4831 needMetadata);
4832 } else {
4833 LOGD("request with buffer %p, frame_number %d",
4834 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004835
4836 rc = channel->request(output.buffer, frameNumber, indexUsed);
4837
4838 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4839 uint32_t j = 0;
4840 for (j = 0; j < streamsArray.num_streams; j++) {
4841 if (streamsArray.stream_request[j].streamID == streamId) {
4842 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4843 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4844 else
4845 streamsArray.stream_request[j].buf_index = indexUsed;
4846 break;
4847 }
4848 }
4849 if (j == streamsArray.num_streams) {
4850 LOGE("Did not find matching stream to update index");
4851 assert(0);
4852 }
4853
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4855 && mBatchSize) {
4856 mToBeQueuedVidBufs++;
4857 if (mToBeQueuedVidBufs == mBatchSize) {
4858 channel->queueBatchBuf();
4859 }
4860 }
4861 if (rc < 0) {
4862 LOGE("request failed");
4863 pthread_mutex_unlock(&mMutex);
4864 return rc;
4865 }
4866 }
4867 pendingBufferIter++;
4868 }
4869
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004870 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4871 itr++) {
4872 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4873
4874 if (channel == NULL) {
4875 LOGE("invalid channel pointer for stream");
4876 assert(0);
4877 return BAD_VALUE;
4878 }
4879
4880 InternalRequest requestedStream;
4881 requestedStream = (*itr);
4882
4883
4884 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4885 LOGD("snapshot request internally input buffer %p, frame_number %d",
4886 request->input_buffer, frameNumber);
4887 if(request->input_buffer != NULL){
4888 rc = channel->request(NULL, frameNumber,
4889 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4890 if (rc < 0) {
4891 LOGE("Fail to request on picture channel");
4892 pthread_mutex_unlock(&mMutex);
4893 return rc;
4894 }
4895 } else {
4896 LOGD("snapshot request with frame_number %d", frameNumber);
4897 if (!request->settings) {
4898 rc = channel->request(NULL, frameNumber,
4899 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4900 } else {
4901 rc = channel->request(NULL, frameNumber,
4902 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4903 }
4904 if (rc < 0) {
4905 LOGE("Fail to request on picture channel");
4906 pthread_mutex_unlock(&mMutex);
4907 return rc;
4908 }
4909
4910 if ((*itr).meteringOnly != 1) {
4911 requestedStream.need_metadata = 1;
4912 streams_need_metadata++;
4913 }
4914 }
4915
4916 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4917 uint32_t j = 0;
4918 for (j = 0; j < streamsArray.num_streams; j++) {
4919 if (streamsArray.stream_request[j].streamID == streamId) {
4920 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4921 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4922 else
4923 streamsArray.stream_request[j].buf_index = indexUsed;
4924 break;
4925 }
4926 }
4927 if (j == streamsArray.num_streams) {
4928 LOGE("Did not find matching stream to update index");
4929 assert(0);
4930 }
4931
4932 } else {
4933 LOGE("Internal requests not supported on this stream type");
4934 assert(0);
4935 return INVALID_OPERATION;
4936 }
4937 latestRequest->internalRequestList.push_back(requestedStream);
4938 }
4939
Thierry Strudel3d639192016-09-09 11:52:26 -07004940 //If 2 streams have need_metadata set to true, fail the request, unless
4941 //we copy/reference count the metadata buffer
4942 if (streams_need_metadata > 1) {
4943 LOGE("not supporting request in which two streams requires"
4944 " 2 HAL metadata for reprocessing");
4945 pthread_mutex_unlock(&mMutex);
4946 return -EINVAL;
4947 }
4948
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004949 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004950 /* Set the parameters to backend:
4951 * - For every request in NORMAL MODE
4952 * - For every request in HFR mode during preview only case
4953 * - Once every batch in HFR mode during video recording
4954 */
4955 if (!mBatchSize ||
4956 (mBatchSize && !isVidBufRequested) ||
4957 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4958 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4959 mBatchSize, isVidBufRequested,
4960 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004961
4962 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4963 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4964 uint32_t m = 0;
4965 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4966 if (streamsArray.stream_request[k].streamID ==
4967 mBatchedStreamsArray.stream_request[m].streamID)
4968 break;
4969 }
4970 if (m == mBatchedStreamsArray.num_streams) {
4971 mBatchedStreamsArray.stream_request\
4972 [mBatchedStreamsArray.num_streams].streamID =
4973 streamsArray.stream_request[k].streamID;
4974 mBatchedStreamsArray.stream_request\
4975 [mBatchedStreamsArray.num_streams].buf_index =
4976 streamsArray.stream_request[k].buf_index;
4977 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4978 }
4979 }
4980 streamsArray = mBatchedStreamsArray;
4981 }
4982 /* Update stream id of all the requested buffers */
4983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4984 LOGE("Failed to set stream type mask in the parameters");
4985 return BAD_VALUE;
4986 }
4987
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4989 mParameters);
4990 if (rc < 0) {
4991 LOGE("set_parms failed");
4992 }
4993 /* reset to zero coz, the batch is queued */
4994 mToBeQueuedVidBufs = 0;
4995 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004996 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4997 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4998 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4999 uint32_t m = 0;
5000 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5001 if (streamsArray.stream_request[k].streamID ==
5002 mBatchedStreamsArray.stream_request[m].streamID)
5003 break;
5004 }
5005 if (m == mBatchedStreamsArray.num_streams) {
5006 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5007 streamsArray.stream_request[k].streamID;
5008 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5009 streamsArray.stream_request[k].buf_index;
5010 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5011 }
5012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 }
5014 mPendingLiveRequest++;
5015 }
5016
5017 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5018
5019 mState = STARTED;
5020 // Added a timed condition wait
5021 struct timespec ts;
5022 uint8_t isValidTimeout = 1;
5023 rc = clock_gettime(CLOCK_REALTIME, &ts);
5024 if (rc < 0) {
5025 isValidTimeout = 0;
5026 LOGE("Error reading the real time clock!!");
5027 }
5028 else {
5029 // Make timeout as 5 sec for request to be honored
5030 ts.tv_sec += 5;
5031 }
5032 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005033 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005034 (mState != ERROR) && (mState != DEINIT)) {
5035 if (!isValidTimeout) {
5036 LOGD("Blocking on conditional wait");
5037 pthread_cond_wait(&mRequestCond, &mMutex);
5038 }
5039 else {
5040 LOGD("Blocking on timed conditional wait");
5041 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5042 if (rc == ETIMEDOUT) {
5043 rc = -ENODEV;
5044 LOGE("Unblocked on timeout!!!!");
5045 break;
5046 }
5047 }
5048 LOGD("Unblocked");
5049 if (mWokenUpByDaemon) {
5050 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005051 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005052 break;
5053 }
5054 }
5055 pthread_mutex_unlock(&mMutex);
5056
5057 return rc;
5058}
5059
5060/*===========================================================================
5061 * FUNCTION : dump
5062 *
5063 * DESCRIPTION:
5064 *
5065 * PARAMETERS :
5066 *
5067 *
5068 * RETURN :
5069 *==========================================================================*/
5070void QCamera3HardwareInterface::dump(int fd)
5071{
5072 pthread_mutex_lock(&mMutex);
5073 dprintf(fd, "\n Camera HAL3 information Begin \n");
5074
5075 dprintf(fd, "\nNumber of pending requests: %zu \n",
5076 mPendingRequestsList.size());
5077 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5078 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5079 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5080 for(pendingRequestIterator i = mPendingRequestsList.begin();
5081 i != mPendingRequestsList.end(); i++) {
5082 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5083 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5084 i->input_buffer);
5085 }
5086 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5087 mPendingBuffersMap.get_num_overall_buffers());
5088 dprintf(fd, "-------+------------------\n");
5089 dprintf(fd, " Frame | Stream type mask \n");
5090 dprintf(fd, "-------+------------------\n");
5091 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5092 for(auto &j : req.mPendingBufferList) {
5093 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5094 dprintf(fd, " %5d | %11d \n",
5095 req.frame_number, channel->getStreamTypeMask());
5096 }
5097 }
5098 dprintf(fd, "-------+------------------\n");
5099
5100 dprintf(fd, "\nPending frame drop list: %zu\n",
5101 mPendingFrameDropList.size());
5102 dprintf(fd, "-------+-----------\n");
5103 dprintf(fd, " Frame | Stream ID \n");
5104 dprintf(fd, "-------+-----------\n");
5105 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5106 i != mPendingFrameDropList.end(); i++) {
5107 dprintf(fd, " %5d | %9d \n",
5108 i->frame_number, i->stream_ID);
5109 }
5110 dprintf(fd, "-------+-----------\n");
5111
5112 dprintf(fd, "\n Camera HAL3 information End \n");
5113
5114 /* use dumpsys media.camera as trigger to send update debug level event */
5115 mUpdateDebugLevel = true;
5116 pthread_mutex_unlock(&mMutex);
5117 return;
5118}
5119
5120/*===========================================================================
5121 * FUNCTION : flush
5122 *
5123 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5124 * conditionally restarts channels
5125 *
5126 * PARAMETERS :
5127 * @ restartChannels: re-start all channels
5128 *
5129 *
5130 * RETURN :
5131 * 0 on success
5132 * Error code on failure
5133 *==========================================================================*/
5134int QCamera3HardwareInterface::flush(bool restartChannels)
5135{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005136 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005137 int32_t rc = NO_ERROR;
5138
5139 LOGD("Unblocking Process Capture Request");
5140 pthread_mutex_lock(&mMutex);
5141 mFlush = true;
5142 pthread_mutex_unlock(&mMutex);
5143
5144 rc = stopAllChannels();
5145 // unlink of dualcam
5146 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005147 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5148 &m_pDualCamCmdPtr->bundle_info;
5149 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5151 pthread_mutex_lock(&gCamLock);
5152
5153 if (mIsMainCamera == 1) {
5154 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5155 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005156 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 // related session id should be session id of linked session
5158 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5159 } else {
5160 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5161 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005162 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005163 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5164 }
5165 pthread_mutex_unlock(&gCamLock);
5166
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005167 rc = mCameraHandle->ops->set_dual_cam_cmd(
5168 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 if (rc < 0) {
5170 LOGE("Dualcam: Unlink failed, but still proceed to close");
5171 }
5172 }
5173
5174 if (rc < 0) {
5175 LOGE("stopAllChannels failed");
5176 return rc;
5177 }
5178 if (mChannelHandle) {
5179 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5180 mChannelHandle);
5181 }
5182
5183 // Reset bundle info
5184 rc = setBundleInfo();
5185 if (rc < 0) {
5186 LOGE("setBundleInfo failed %d", rc);
5187 return rc;
5188 }
5189
5190 // Mutex Lock
5191 pthread_mutex_lock(&mMutex);
5192
5193 // Unblock process_capture_request
5194 mPendingLiveRequest = 0;
5195 pthread_cond_signal(&mRequestCond);
5196
5197 rc = notifyErrorForPendingRequests();
5198 if (rc < 0) {
5199 LOGE("notifyErrorForPendingRequests failed");
5200 pthread_mutex_unlock(&mMutex);
5201 return rc;
5202 }
5203
5204 mFlush = false;
5205
5206 // Start the Streams/Channels
5207 if (restartChannels) {
5208 rc = startAllChannels();
5209 if (rc < 0) {
5210 LOGE("startAllChannels failed");
5211 pthread_mutex_unlock(&mMutex);
5212 return rc;
5213 }
5214 }
5215
5216 if (mChannelHandle) {
5217 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5218 mChannelHandle);
5219 if (rc < 0) {
5220 LOGE("start_channel failed");
5221 pthread_mutex_unlock(&mMutex);
5222 return rc;
5223 }
5224 }
5225
5226 pthread_mutex_unlock(&mMutex);
5227
5228 return 0;
5229}
5230
5231/*===========================================================================
5232 * FUNCTION : flushPerf
5233 *
5234 * DESCRIPTION: This is the performance optimization version of flush that does
5235 * not use stream off, rather flushes the system
5236 *
5237 * PARAMETERS :
5238 *
5239 *
5240 * RETURN : 0 : success
5241 * -EINVAL: input is malformed (device is not valid)
5242 * -ENODEV: if the device has encountered a serious error
5243 *==========================================================================*/
5244int QCamera3HardwareInterface::flushPerf()
5245{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005246 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005247 int32_t rc = 0;
5248 struct timespec timeout;
5249 bool timed_wait = false;
5250
5251 pthread_mutex_lock(&mMutex);
5252 mFlushPerf = true;
5253 mPendingBuffersMap.numPendingBufsAtFlush =
5254 mPendingBuffersMap.get_num_overall_buffers();
5255 LOGD("Calling flush. Wait for %d buffers to return",
5256 mPendingBuffersMap.numPendingBufsAtFlush);
5257
5258 /* send the flush event to the backend */
5259 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5260 if (rc < 0) {
5261 LOGE("Error in flush: IOCTL failure");
5262 mFlushPerf = false;
5263 pthread_mutex_unlock(&mMutex);
5264 return -ENODEV;
5265 }
5266
5267 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5268 LOGD("No pending buffers in HAL, return flush");
5269 mFlushPerf = false;
5270 pthread_mutex_unlock(&mMutex);
5271 return rc;
5272 }
5273
5274 /* wait on a signal that buffers were received */
5275 rc = clock_gettime(CLOCK_REALTIME, &timeout);
5276 if (rc < 0) {
5277 LOGE("Error reading the real time clock, cannot use timed wait");
5278 } else {
5279 timeout.tv_sec += FLUSH_TIMEOUT;
5280 timed_wait = true;
5281 }
5282
5283 //Block on conditional variable
5284 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5285 LOGD("Waiting on mBuffersCond");
5286 if (!timed_wait) {
5287 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5288 if (rc != 0) {
5289 LOGE("pthread_cond_wait failed due to rc = %s",
5290 strerror(rc));
5291 break;
5292 }
5293 } else {
5294 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5295 if (rc != 0) {
5296 LOGE("pthread_cond_timedwait failed due to rc = %s",
5297 strerror(rc));
5298 break;
5299 }
5300 }
5301 }
5302 if (rc != 0) {
5303 mFlushPerf = false;
5304 pthread_mutex_unlock(&mMutex);
5305 return -ENODEV;
5306 }
5307
5308 LOGD("Received buffers, now safe to return them");
5309
5310 //make sure the channels handle flush
5311 //currently only required for the picture channel to release snapshot resources
5312 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5313 it != mStreamInfo.end(); it++) {
5314 QCamera3Channel *channel = (*it)->channel;
5315 if (channel) {
5316 rc = channel->flush();
5317 if (rc) {
5318 LOGE("Flushing the channels failed with error %d", rc);
5319 // even though the channel flush failed we need to continue and
5320 // return the buffers we have to the framework, however the return
5321 // value will be an error
5322 rc = -ENODEV;
5323 }
5324 }
5325 }
5326
5327 /* notify the frameworks and send errored results */
5328 rc = notifyErrorForPendingRequests();
5329 if (rc < 0) {
5330 LOGE("notifyErrorForPendingRequests failed");
5331 pthread_mutex_unlock(&mMutex);
5332 return rc;
5333 }
5334
5335 //unblock process_capture_request
5336 mPendingLiveRequest = 0;
5337 unblockRequestIfNecessary();
5338
5339 mFlushPerf = false;
5340 pthread_mutex_unlock(&mMutex);
5341 LOGD ("Flush Operation complete. rc = %d", rc);
5342 return rc;
5343}
5344
5345/*===========================================================================
5346 * FUNCTION : handleCameraDeviceError
5347 *
5348 * DESCRIPTION: This function calls internal flush and notifies the error to
5349 * framework and updates the state variable.
5350 *
5351 * PARAMETERS : None
5352 *
5353 * RETURN : NO_ERROR on Success
5354 * Error code on failure
5355 *==========================================================================*/
5356int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5357{
5358 int32_t rc = NO_ERROR;
5359
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005360 {
5361 Mutex::Autolock lock(mFlushLock);
5362 pthread_mutex_lock(&mMutex);
5363 if (mState != ERROR) {
5364 //if mState != ERROR, nothing to be done
5365 pthread_mutex_unlock(&mMutex);
5366 return NO_ERROR;
5367 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005368 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005369
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005370 rc = flush(false /* restart channels */);
5371 if (NO_ERROR != rc) {
5372 LOGE("internal flush to handle mState = ERROR failed");
5373 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005374
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005375 pthread_mutex_lock(&mMutex);
5376 mState = DEINIT;
5377 pthread_mutex_unlock(&mMutex);
5378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 camera3_notify_msg_t notify_msg;
5381 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5382 notify_msg.type = CAMERA3_MSG_ERROR;
5383 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5384 notify_msg.message.error.error_stream = NULL;
5385 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005386 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005387
5388 return rc;
5389}
5390
5391/*===========================================================================
5392 * FUNCTION : captureResultCb
5393 *
5394 * DESCRIPTION: Callback handler for all capture result
5395 * (streams, as well as metadata)
5396 *
5397 * PARAMETERS :
5398 * @metadata : metadata information
5399 * @buffer : actual gralloc buffer to be returned to frameworks.
5400 * NULL if metadata.
5401 *
5402 * RETURN : NONE
5403 *==========================================================================*/
5404void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5405 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5406{
5407 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005408 pthread_mutex_lock(&mMutex);
5409 uint8_t batchSize = mBatchSize;
5410 pthread_mutex_unlock(&mMutex);
5411 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005412 handleBatchMetadata(metadata_buf,
5413 true /* free_and_bufdone_meta_buf */);
5414 } else { /* mBatchSize = 0 */
5415 hdrPlusPerfLock(metadata_buf);
5416 pthread_mutex_lock(&mMutex);
5417 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005418 true /* free_and_bufdone_meta_buf */,
5419 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 pthread_mutex_unlock(&mMutex);
5421 }
5422 } else if (isInputBuffer) {
5423 pthread_mutex_lock(&mMutex);
5424 handleInputBufferWithLock(frame_number);
5425 pthread_mutex_unlock(&mMutex);
5426 } else {
5427 pthread_mutex_lock(&mMutex);
5428 handleBufferWithLock(buffer, frame_number);
5429 pthread_mutex_unlock(&mMutex);
5430 }
5431 return;
5432}
5433
5434/*===========================================================================
5435 * FUNCTION : getReprocessibleOutputStreamId
5436 *
5437 * DESCRIPTION: Get source output stream id for the input reprocess stream
5438 * based on size and format, which would be the largest
5439 * output stream if an input stream exists.
5440 *
5441 * PARAMETERS :
5442 * @id : return the stream id if found
5443 *
5444 * RETURN : int32_t type of status
5445 * NO_ERROR -- success
5446 * none-zero failure code
5447 *==========================================================================*/
5448int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5449{
5450 /* check if any output or bidirectional stream with the same size and format
5451 and return that stream */
5452 if ((mInputStreamInfo.dim.width > 0) &&
5453 (mInputStreamInfo.dim.height > 0)) {
5454 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5455 it != mStreamInfo.end(); it++) {
5456
5457 camera3_stream_t *stream = (*it)->stream;
5458 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5459 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5460 (stream->format == mInputStreamInfo.format)) {
5461 // Usage flag for an input stream and the source output stream
5462 // may be different.
5463 LOGD("Found reprocessible output stream! %p", *it);
5464 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5465 stream->usage, mInputStreamInfo.usage);
5466
5467 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5468 if (channel != NULL && channel->mStreams[0]) {
5469 id = channel->mStreams[0]->getMyServerID();
5470 return NO_ERROR;
5471 }
5472 }
5473 }
5474 } else {
5475 LOGD("No input stream, so no reprocessible output stream");
5476 }
5477 return NAME_NOT_FOUND;
5478}
5479
5480/*===========================================================================
5481 * FUNCTION : lookupFwkName
5482 *
5483 * DESCRIPTION: In case the enum is not same in fwk and backend
5484 * make sure the parameter is correctly propogated
5485 *
5486 * PARAMETERS :
5487 * @arr : map between the two enums
5488 * @len : len of the map
5489 * @hal_name : name of the hal_parm to map
5490 *
5491 * RETURN : int type of status
5492 * fwk_name -- success
5493 * none-zero failure code
5494 *==========================================================================*/
5495template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5496 size_t len, halType hal_name)
5497{
5498
5499 for (size_t i = 0; i < len; i++) {
5500 if (arr[i].hal_name == hal_name) {
5501 return arr[i].fwk_name;
5502 }
5503 }
5504
5505 /* Not able to find matching framework type is not necessarily
5506 * an error case. This happens when mm-camera supports more attributes
5507 * than the frameworks do */
5508 LOGH("Cannot find matching framework type");
5509 return NAME_NOT_FOUND;
5510}
5511
5512/*===========================================================================
5513 * FUNCTION : lookupHalName
5514 *
5515 * DESCRIPTION: In case the enum is not same in fwk and backend
5516 * make sure the parameter is correctly propogated
5517 *
5518 * PARAMETERS :
5519 * @arr : map between the two enums
5520 * @len : len of the map
5521 * @fwk_name : name of the hal_parm to map
5522 *
5523 * RETURN : int32_t type of status
5524 * hal_name -- success
5525 * none-zero failure code
5526 *==========================================================================*/
5527template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5528 size_t len, fwkType fwk_name)
5529{
5530 for (size_t i = 0; i < len; i++) {
5531 if (arr[i].fwk_name == fwk_name) {
5532 return arr[i].hal_name;
5533 }
5534 }
5535
5536 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5537 return NAME_NOT_FOUND;
5538}
5539
5540/*===========================================================================
5541 * FUNCTION : lookupProp
5542 *
5543 * DESCRIPTION: lookup a value by its name
5544 *
5545 * PARAMETERS :
5546 * @arr : map between the two enums
5547 * @len : size of the map
5548 * @name : name to be looked up
5549 *
5550 * RETURN : Value if found
5551 * CAM_CDS_MODE_MAX if not found
5552 *==========================================================================*/
5553template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5554 size_t len, const char *name)
5555{
5556 if (name) {
5557 for (size_t i = 0; i < len; i++) {
5558 if (!strcmp(arr[i].desc, name)) {
5559 return arr[i].val;
5560 }
5561 }
5562 }
5563 return CAM_CDS_MODE_MAX;
5564}
5565
5566/*===========================================================================
5567 *
5568 * DESCRIPTION:
5569 *
5570 * PARAMETERS :
5571 * @metadata : metadata information from callback
5572 * @timestamp: metadata buffer timestamp
5573 * @request_id: request id
5574 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005575 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5576 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005577 * @pprocDone: whether internal offline postprocsesing is done
5578 *
5579 * RETURN : camera_metadata_t*
5580 * metadata in a format specified by fwk
5581 *==========================================================================*/
5582camera_metadata_t*
5583QCamera3HardwareInterface::translateFromHalMetadata(
5584 metadata_buffer_t *metadata,
5585 nsecs_t timestamp,
5586 int32_t request_id,
5587 const CameraMetadata& jpegMetadata,
5588 uint8_t pipeline_depth,
5589 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005590 /* DevCamDebug metadata translateFromHalMetadata argument */
5591 uint8_t DevCamDebug_meta_enable,
5592 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005593 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005594 uint8_t fwk_cacMode,
5595 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005596{
5597 CameraMetadata camMetadata;
5598 camera_metadata_t *resultMetadata;
5599
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005600 if (mBatchSize && !firstMetadataInBatch) {
5601 /* In batch mode, use cached metadata from the first metadata
5602 in the batch */
5603 camMetadata.clear();
5604 camMetadata = mCachedMetadata;
5605 }
5606
Thierry Strudel3d639192016-09-09 11:52:26 -07005607 if (jpegMetadata.entryCount())
5608 camMetadata.append(jpegMetadata);
5609
5610 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5611 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5612 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5613 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005614 if (mBatchSize == 0) {
5615 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5616 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005618
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005619 if (mBatchSize && !firstMetadataInBatch) {
5620 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5621 resultMetadata = camMetadata.release();
5622 return resultMetadata;
5623 }
5624
Samuel Ha68ba5172016-12-15 18:41:12 -08005625 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5626 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5627 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5628 // DevCamDebug metadata translateFromHalMetadata AF
5629 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5630 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5631 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5632 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5633 }
5634 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5635 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5636 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5637 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5638 }
5639 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5640 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5641 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5642 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5643 }
5644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5645 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5646 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5647 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5648 }
5649 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5650 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5651 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5652 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5653 }
5654 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5655 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5656 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5657 *DevCamDebug_af_monitor_pdaf_target_pos;
5658 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5659 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5660 }
5661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5662 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5663 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5664 *DevCamDebug_af_monitor_pdaf_confidence;
5665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5666 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5667 }
5668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5669 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5670 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5671 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5672 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5673 }
5674 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5675 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5676 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5677 *DevCamDebug_af_monitor_tof_target_pos;
5678 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5679 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5680 }
5681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5682 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5683 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5684 *DevCamDebug_af_monitor_tof_confidence;
5685 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5686 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5687 }
5688 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5689 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5690 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5691 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5692 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5693 }
5694 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5695 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5696 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5697 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5698 &fwk_DevCamDebug_af_monitor_type_select, 1);
5699 }
5700 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5701 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5702 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5703 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5704 &fwk_DevCamDebug_af_monitor_refocus, 1);
5705 }
5706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5707 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5708 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5709 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5710 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5711 }
5712 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5713 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5714 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5715 *DevCamDebug_af_search_pdaf_target_pos;
5716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5717 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5718 }
5719 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5720 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5721 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5722 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5723 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5724 }
5725 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5726 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5727 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5728 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5729 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5730 }
5731 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5732 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5733 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5735 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5736 }
5737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5738 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5739 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5741 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5742 }
5743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5744 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5745 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5746 *DevCamDebug_af_search_tof_target_pos;
5747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5748 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5749 }
5750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5751 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5752 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5753 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5754 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5755 }
5756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5757 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5758 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5760 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5761 }
5762 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5763 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5764 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5765 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5766 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5767 }
5768 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5769 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5770 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5771 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5772 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5773 }
5774 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5775 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5776 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5777 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5778 &fwk_DevCamDebug_af_search_type_select, 1);
5779 }
5780 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5781 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5782 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5783 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5784 &fwk_DevCamDebug_af_search_next_pos, 1);
5785 }
5786 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5787 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5788 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5789 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5790 &fwk_DevCamDebug_af_search_target_pos, 1);
5791 }
5792 // DevCamDebug metadata translateFromHalMetadata AEC
5793 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5794 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5795 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5796 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5797 }
5798 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5799 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5800 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5801 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5802 }
5803 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5804 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5805 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5806 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5807 }
5808 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5809 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5810 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5811 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5812 }
5813 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5814 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5815 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5816 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5817 }
5818 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5819 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5820 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5821 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5822 }
5823 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5824 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5825 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5826 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5827 }
5828 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5829 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5830 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5831 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5832 }
5833 // DevCamDebug metadata translateFromHalMetadata AWB
5834 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5835 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5836 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5837 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5838 }
5839 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5840 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5841 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5842 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5843 }
5844 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5845 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5846 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5847 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5848 }
5849 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5850 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5851 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5852 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5853 }
5854 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5855 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5856 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5857 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5858 }
5859 }
5860 // atrace_end(ATRACE_TAG_ALWAYS);
5861
Thierry Strudel3d639192016-09-09 11:52:26 -07005862 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5863 int64_t fwk_frame_number = *frame_number;
5864 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5865 }
5866
5867 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5868 int32_t fps_range[2];
5869 fps_range[0] = (int32_t)float_range->min_fps;
5870 fps_range[1] = (int32_t)float_range->max_fps;
5871 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5872 fps_range, 2);
5873 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5874 fps_range[0], fps_range[1]);
5875 }
5876
5877 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5878 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5879 }
5880
5881 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5882 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5883 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5884 *sceneMode);
5885 if (NAME_NOT_FOUND != val) {
5886 uint8_t fwkSceneMode = (uint8_t)val;
5887 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5888 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5889 fwkSceneMode);
5890 }
5891 }
5892
5893 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5894 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5895 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5896 }
5897
5898 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5899 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5900 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5901 }
5902
5903 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5904 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5905 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5906 }
5907
5908 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5909 CAM_INTF_META_EDGE_MODE, metadata) {
5910 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5911 }
5912
5913 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5914 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5915 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5916 }
5917
5918 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5919 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5920 }
5921
5922 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5923 if (0 <= *flashState) {
5924 uint8_t fwk_flashState = (uint8_t) *flashState;
5925 if (!gCamCapability[mCameraId]->flash_available) {
5926 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5927 }
5928 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5929 }
5930 }
5931
5932 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5933 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5934 if (NAME_NOT_FOUND != val) {
5935 uint8_t fwk_flashMode = (uint8_t)val;
5936 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5937 }
5938 }
5939
5940 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5941 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5942 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5943 }
5944
5945 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5946 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5947 }
5948
5949 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5950 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5951 }
5952
5953 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5954 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5955 }
5956
5957 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5958 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5959 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5960 }
5961
5962 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5963 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5964 LOGD("fwk_videoStab = %d", fwk_videoStab);
5965 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5966 } else {
5967 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5968 // and so hardcoding the Video Stab result to OFF mode.
5969 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5970 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005971 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005972 }
5973
5974 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5975 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5976 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5977 }
5978
5979 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5980 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5981 }
5982
5983 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5984 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5985
5986 LOGD("dynamicblackLevel = %f %f %f %f",
5987 blackLevelSourcePattern->cam_black_level[0],
5988 blackLevelSourcePattern->cam_black_level[1],
5989 blackLevelSourcePattern->cam_black_level[2],
5990 blackLevelSourcePattern->cam_black_level[3]);
5991 }
5992
5993 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5994 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5995 float fwk_blackLevelInd[4];
5996
5997 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
5998 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
5999 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
6000 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
6001
6002 LOGD("applied dynamicblackLevel = %f %f %f %f",
6003 blackLevelAppliedPattern->cam_black_level[0],
6004 blackLevelAppliedPattern->cam_black_level[1],
6005 blackLevelAppliedPattern->cam_black_level[2],
6006 blackLevelAppliedPattern->cam_black_level[3]);
6007 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006008
6009#ifndef USE_HAL_3_3
6010 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006011 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6012 // depth space.
6013 fwk_blackLevelInd[0] /= 4.0;
6014 fwk_blackLevelInd[1] /= 4.0;
6015 fwk_blackLevelInd[2] /= 4.0;
6016 fwk_blackLevelInd[3] /= 4.0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006017 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
6018#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006019 }
6020
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006021#ifndef USE_HAL_3_3
6022 // Fixed whitelevel is used by ISP/Sensor
6023 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6024 &gCamCapability[mCameraId]->white_level, 1);
6025#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006026
6027 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6028 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6029 int32_t scalerCropRegion[4];
6030 scalerCropRegion[0] = hScalerCropRegion->left;
6031 scalerCropRegion[1] = hScalerCropRegion->top;
6032 scalerCropRegion[2] = hScalerCropRegion->width;
6033 scalerCropRegion[3] = hScalerCropRegion->height;
6034
6035 // Adjust crop region from sensor output coordinate system to active
6036 // array coordinate system.
6037 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6038 scalerCropRegion[2], scalerCropRegion[3]);
6039
6040 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6041 }
6042
6043 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6044 LOGD("sensorExpTime = %lld", *sensorExpTime);
6045 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6046 }
6047
6048 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6049 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6050 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6051 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6052 }
6053
6054 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6055 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6056 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6057 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6058 sensorRollingShutterSkew, 1);
6059 }
6060
6061 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6062 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6063 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6064
6065 //calculate the noise profile based on sensitivity
6066 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6067 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6068 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6069 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6070 noise_profile[i] = noise_profile_S;
6071 noise_profile[i+1] = noise_profile_O;
6072 }
6073 LOGD("noise model entry (S, O) is (%f, %f)",
6074 noise_profile_S, noise_profile_O);
6075 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6076 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6077 }
6078
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006079#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006080 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006081 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006082 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006083 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006084 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6085 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6086 }
6087 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006088#endif
6089
Thierry Strudel3d639192016-09-09 11:52:26 -07006090 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6091 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6092 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6093 }
6094
6095 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6096 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6097 *faceDetectMode);
6098 if (NAME_NOT_FOUND != val) {
6099 uint8_t fwk_faceDetectMode = (uint8_t)val;
6100 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6101
6102 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6103 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6104 CAM_INTF_META_FACE_DETECTION, metadata) {
6105 uint8_t numFaces = MIN(
6106 faceDetectionInfo->num_faces_detected, MAX_ROI);
6107 int32_t faceIds[MAX_ROI];
6108 uint8_t faceScores[MAX_ROI];
6109 int32_t faceRectangles[MAX_ROI * 4];
6110 int32_t faceLandmarks[MAX_ROI * 6];
6111 size_t j = 0, k = 0;
6112
6113 for (size_t i = 0; i < numFaces; i++) {
6114 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6115 // Adjust crop region from sensor output coordinate system to active
6116 // array coordinate system.
6117 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6118 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6119 rect.width, rect.height);
6120
6121 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6122 faceRectangles+j, -1);
6123
6124 j+= 4;
6125 }
6126 if (numFaces <= 0) {
6127 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6128 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6129 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6130 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6131 }
6132
6133 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6134 numFaces);
6135 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6136 faceRectangles, numFaces * 4U);
6137 if (fwk_faceDetectMode ==
6138 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6139 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6140 CAM_INTF_META_FACE_LANDMARK, metadata) {
6141
6142 for (size_t i = 0; i < numFaces; i++) {
6143 // Map the co-ordinate sensor output coordinate system to active
6144 // array coordinate system.
6145 mCropRegionMapper.toActiveArray(
6146 landmarks->face_landmarks[i].left_eye_center.x,
6147 landmarks->face_landmarks[i].left_eye_center.y);
6148 mCropRegionMapper.toActiveArray(
6149 landmarks->face_landmarks[i].right_eye_center.x,
6150 landmarks->face_landmarks[i].right_eye_center.y);
6151 mCropRegionMapper.toActiveArray(
6152 landmarks->face_landmarks[i].mouth_center.x,
6153 landmarks->face_landmarks[i].mouth_center.y);
6154
6155 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006156 k+= TOTAL_LANDMARK_INDICES;
6157 }
6158 } else {
6159 for (size_t i = 0; i < numFaces; i++) {
6160 setInvalidLandmarks(faceLandmarks+k);
6161 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006162 }
6163 }
6164
6165 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6166 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6167 faceLandmarks, numFaces * 6U);
6168 }
6169 }
6170 }
6171 }
6172 }
6173
6174 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6175 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6176 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006177
6178 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6179 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6180 // process histogram statistics info
6181 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6182 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6183 cam_histogram_data_t rHistData, gHistData, bHistData;
6184 memset(&rHistData, 0, sizeof(rHistData));
6185 memset(&gHistData, 0, sizeof(gHistData));
6186 memset(&bHistData, 0, sizeof(bHistData));
6187
6188 switch (stats_data->type) {
6189 case CAM_HISTOGRAM_TYPE_BAYER:
6190 switch (stats_data->bayer_stats.data_type) {
6191 case CAM_STATS_CHANNEL_GR:
6192 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6193 break;
6194 case CAM_STATS_CHANNEL_GB:
6195 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6196 break;
6197 case CAM_STATS_CHANNEL_B:
6198 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6199 break;
6200 case CAM_STATS_CHANNEL_ALL:
6201 rHistData = stats_data->bayer_stats.r_stats;
6202 //Framework expects only 3 channels. So, for now,
6203 //use gb stats for G channel.
6204 gHistData = stats_data->bayer_stats.gb_stats;
6205 bHistData = stats_data->bayer_stats.b_stats;
6206 break;
6207 case CAM_STATS_CHANNEL_Y:
6208 case CAM_STATS_CHANNEL_R:
6209 default:
6210 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6211 break;
6212 }
6213 break;
6214 case CAM_HISTOGRAM_TYPE_YUV:
6215 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6216 break;
6217 }
6218
6219 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6220 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6221 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6222
6223 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6224 }
6225 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006226 }
6227
6228 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6229 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6230 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6231 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6232 }
6233
6234 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6235 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6236 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6237 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6238 }
6239
6240 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6241 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6242 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6243 CAM_MAX_SHADING_MAP_HEIGHT);
6244 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6245 CAM_MAX_SHADING_MAP_WIDTH);
6246 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6247 lensShadingMap->lens_shading, 4U * map_width * map_height);
6248 }
6249
6250 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6251 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6252 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6253 }
6254
6255 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6256 //Populate CAM_INTF_META_TONEMAP_CURVES
6257 /* ch0 = G, ch 1 = B, ch 2 = R*/
6258 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6259 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6260 tonemap->tonemap_points_cnt,
6261 CAM_MAX_TONEMAP_CURVE_SIZE);
6262 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6263 }
6264
6265 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6266 &tonemap->curves[0].tonemap_points[0][0],
6267 tonemap->tonemap_points_cnt * 2);
6268
6269 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6270 &tonemap->curves[1].tonemap_points[0][0],
6271 tonemap->tonemap_points_cnt * 2);
6272
6273 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6274 &tonemap->curves[2].tonemap_points[0][0],
6275 tonemap->tonemap_points_cnt * 2);
6276 }
6277
6278 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6279 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6280 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6281 CC_GAIN_MAX);
6282 }
6283
6284 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6285 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6286 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6287 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6288 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6289 }
6290
6291 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6292 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6293 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6294 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6295 toneCurve->tonemap_points_cnt,
6296 CAM_MAX_TONEMAP_CURVE_SIZE);
6297 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6298 }
6299 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6300 (float*)toneCurve->curve.tonemap_points,
6301 toneCurve->tonemap_points_cnt * 2);
6302 }
6303
6304 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6305 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6306 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6307 predColorCorrectionGains->gains, 4);
6308 }
6309
6310 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6311 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6312 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6313 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6314 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6315 }
6316
6317 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6318 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6319 }
6320
6321 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6322 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6323 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6324 }
6325
6326 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6327 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6328 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6329 }
6330
6331 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6332 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6333 *effectMode);
6334 if (NAME_NOT_FOUND != val) {
6335 uint8_t fwk_effectMode = (uint8_t)val;
6336 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6337 }
6338 }
6339
6340 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6341 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6342 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6343 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6344 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6345 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6346 }
6347 int32_t fwk_testPatternData[4];
6348 fwk_testPatternData[0] = testPatternData->r;
6349 fwk_testPatternData[3] = testPatternData->b;
6350 switch (gCamCapability[mCameraId]->color_arrangement) {
6351 case CAM_FILTER_ARRANGEMENT_RGGB:
6352 case CAM_FILTER_ARRANGEMENT_GRBG:
6353 fwk_testPatternData[1] = testPatternData->gr;
6354 fwk_testPatternData[2] = testPatternData->gb;
6355 break;
6356 case CAM_FILTER_ARRANGEMENT_GBRG:
6357 case CAM_FILTER_ARRANGEMENT_BGGR:
6358 fwk_testPatternData[2] = testPatternData->gr;
6359 fwk_testPatternData[1] = testPatternData->gb;
6360 break;
6361 default:
6362 LOGE("color arrangement %d is not supported",
6363 gCamCapability[mCameraId]->color_arrangement);
6364 break;
6365 }
6366 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6367 }
6368
6369 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6370 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6371 }
6372
6373 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6374 String8 str((const char *)gps_methods);
6375 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6376 }
6377
6378 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6379 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6380 }
6381
6382 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6383 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6384 }
6385
6386 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6387 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6388 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6389 }
6390
6391 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6392 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6393 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6394 }
6395
6396 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6397 int32_t fwk_thumb_size[2];
6398 fwk_thumb_size[0] = thumb_size->width;
6399 fwk_thumb_size[1] = thumb_size->height;
6400 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6401 }
6402
6403 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6404 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6405 privateData,
6406 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6407 }
6408
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006409 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6410 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6411 meteringMode, 1);
6412 }
6413
Thierry Strudel3d639192016-09-09 11:52:26 -07006414 if (metadata->is_tuning_params_valid) {
6415 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6416 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6417 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6418
6419
6420 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6421 sizeof(uint32_t));
6422 data += sizeof(uint32_t);
6423
6424 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6425 sizeof(uint32_t));
6426 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6427 data += sizeof(uint32_t);
6428
6429 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6430 sizeof(uint32_t));
6431 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6432 data += sizeof(uint32_t);
6433
6434 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6435 sizeof(uint32_t));
6436 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6437 data += sizeof(uint32_t);
6438
6439 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6440 sizeof(uint32_t));
6441 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6442 data += sizeof(uint32_t);
6443
6444 metadata->tuning_params.tuning_mod3_data_size = 0;
6445 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6446 sizeof(uint32_t));
6447 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6448 data += sizeof(uint32_t);
6449
6450 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6451 TUNING_SENSOR_DATA_MAX);
6452 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6453 count);
6454 data += count;
6455
6456 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6457 TUNING_VFE_DATA_MAX);
6458 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6459 count);
6460 data += count;
6461
6462 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6463 TUNING_CPP_DATA_MAX);
6464 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6465 count);
6466 data += count;
6467
6468 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6469 TUNING_CAC_DATA_MAX);
6470 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6471 count);
6472 data += count;
6473
6474 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6475 (int32_t *)(void *)tuning_meta_data_blob,
6476 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6477 }
6478
6479 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6480 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6481 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6482 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6483 NEUTRAL_COL_POINTS);
6484 }
6485
6486 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6487 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6488 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6489 }
6490
6491 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6492 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6493 // Adjust crop region from sensor output coordinate system to active
6494 // array coordinate system.
6495 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6496 hAeRegions->rect.width, hAeRegions->rect.height);
6497
6498 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6499 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6500 REGIONS_TUPLE_COUNT);
6501 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6502 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6503 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6504 hAeRegions->rect.height);
6505 }
6506
6507 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6508 uint8_t fwk_afState = (uint8_t) *afState;
6509 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
6510 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
6511 }
6512
6513 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6514 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6515 }
6516
6517 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6518 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6519 }
6520
6521 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6522 uint8_t fwk_lensState = *lensState;
6523 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6524 }
6525
6526 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6527 /*af regions*/
6528 int32_t afRegions[REGIONS_TUPLE_COUNT];
6529 // Adjust crop region from sensor output coordinate system to active
6530 // array coordinate system.
6531 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6532 hAfRegions->rect.width, hAfRegions->rect.height);
6533
6534 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6535 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6536 REGIONS_TUPLE_COUNT);
6537 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6538 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6539 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6540 hAfRegions->rect.height);
6541 }
6542
6543 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
6544 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6545 *hal_ab_mode);
6546 if (NAME_NOT_FOUND != val) {
6547 uint8_t fwk_ab_mode = (uint8_t)val;
6548 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6549 }
6550 }
6551
6552 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6553 int val = lookupFwkName(SCENE_MODES_MAP,
6554 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6555 if (NAME_NOT_FOUND != val) {
6556 uint8_t fwkBestshotMode = (uint8_t)val;
6557 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6558 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6559 } else {
6560 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6561 }
6562 }
6563
6564 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6565 uint8_t fwk_mode = (uint8_t) *mode;
6566 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6567 }
6568
6569 /* Constant metadata values to be update*/
6570 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6571 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6572
6573 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6574 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6575
6576 int32_t hotPixelMap[2];
6577 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6578
6579 // CDS
6580 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6581 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6582 }
6583
Thierry Strudel04e026f2016-10-10 11:27:36 -07006584 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6585 int32_t fwk_hdr;
6586 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6587 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6588 } else {
6589 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6590 }
6591 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6592 }
6593
6594 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006595 int32_t fwk_ir = (int32_t) *ir;
6596 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006597 }
6598
Thierry Strudel269c81a2016-10-12 12:13:59 -07006599 // AEC SPEED
6600 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6601 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6602 }
6603
6604 // AWB SPEED
6605 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6606 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6607 }
6608
Thierry Strudel3d639192016-09-09 11:52:26 -07006609 // TNR
6610 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6611 uint8_t tnr_enable = tnr->denoise_enable;
6612 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6613
6614 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6615 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6616 }
6617
6618 // Reprocess crop data
6619 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6620 uint8_t cnt = crop_data->num_of_streams;
6621 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6622 // mm-qcamera-daemon only posts crop_data for streams
6623 // not linked to pproc. So no valid crop metadata is not
6624 // necessarily an error case.
6625 LOGD("No valid crop metadata entries");
6626 } else {
6627 uint32_t reproc_stream_id;
6628 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6629 LOGD("No reprocessible stream found, ignore crop data");
6630 } else {
6631 int rc = NO_ERROR;
6632 Vector<int32_t> roi_map;
6633 int32_t *crop = new int32_t[cnt*4];
6634 if (NULL == crop) {
6635 rc = NO_MEMORY;
6636 }
6637 if (NO_ERROR == rc) {
6638 int32_t streams_found = 0;
6639 for (size_t i = 0; i < cnt; i++) {
6640 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6641 if (pprocDone) {
6642 // HAL already does internal reprocessing,
6643 // either via reprocessing before JPEG encoding,
6644 // or offline postprocessing for pproc bypass case.
6645 crop[0] = 0;
6646 crop[1] = 0;
6647 crop[2] = mInputStreamInfo.dim.width;
6648 crop[3] = mInputStreamInfo.dim.height;
6649 } else {
6650 crop[0] = crop_data->crop_info[i].crop.left;
6651 crop[1] = crop_data->crop_info[i].crop.top;
6652 crop[2] = crop_data->crop_info[i].crop.width;
6653 crop[3] = crop_data->crop_info[i].crop.height;
6654 }
6655 roi_map.add(crop_data->crop_info[i].roi_map.left);
6656 roi_map.add(crop_data->crop_info[i].roi_map.top);
6657 roi_map.add(crop_data->crop_info[i].roi_map.width);
6658 roi_map.add(crop_data->crop_info[i].roi_map.height);
6659 streams_found++;
6660 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6661 crop[0], crop[1], crop[2], crop[3]);
6662 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6663 crop_data->crop_info[i].roi_map.left,
6664 crop_data->crop_info[i].roi_map.top,
6665 crop_data->crop_info[i].roi_map.width,
6666 crop_data->crop_info[i].roi_map.height);
6667 break;
6668
6669 }
6670 }
6671 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6672 &streams_found, 1);
6673 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6674 crop, (size_t)(streams_found * 4));
6675 if (roi_map.array()) {
6676 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6677 roi_map.array(), roi_map.size());
6678 }
6679 }
6680 if (crop) {
6681 delete [] crop;
6682 }
6683 }
6684 }
6685 }
6686
6687 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6688 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6689 // so hardcoding the CAC result to OFF mode.
6690 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6691 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6692 } else {
6693 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6694 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6695 *cacMode);
6696 if (NAME_NOT_FOUND != val) {
6697 uint8_t resultCacMode = (uint8_t)val;
6698 // check whether CAC result from CB is equal to Framework set CAC mode
6699 // If not equal then set the CAC mode came in corresponding request
6700 if (fwk_cacMode != resultCacMode) {
6701 resultCacMode = fwk_cacMode;
6702 }
6703 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6704 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6705 } else {
6706 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6707 }
6708 }
6709 }
6710
6711 // Post blob of cam_cds_data through vendor tag.
6712 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6713 uint8_t cnt = cdsInfo->num_of_streams;
6714 cam_cds_data_t cdsDataOverride;
6715 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6716 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6717 cdsDataOverride.num_of_streams = 1;
6718 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6719 uint32_t reproc_stream_id;
6720 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6721 LOGD("No reprocessible stream found, ignore cds data");
6722 } else {
6723 for (size_t i = 0; i < cnt; i++) {
6724 if (cdsInfo->cds_info[i].stream_id ==
6725 reproc_stream_id) {
6726 cdsDataOverride.cds_info[0].cds_enable =
6727 cdsInfo->cds_info[i].cds_enable;
6728 break;
6729 }
6730 }
6731 }
6732 } else {
6733 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6734 }
6735 camMetadata.update(QCAMERA3_CDS_INFO,
6736 (uint8_t *)&cdsDataOverride,
6737 sizeof(cam_cds_data_t));
6738 }
6739
6740 // Ldaf calibration data
6741 if (!mLdafCalibExist) {
6742 IF_META_AVAILABLE(uint32_t, ldafCalib,
6743 CAM_INTF_META_LDAF_EXIF, metadata) {
6744 mLdafCalibExist = true;
6745 mLdafCalib[0] = ldafCalib[0];
6746 mLdafCalib[1] = ldafCalib[1];
6747 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6748 ldafCalib[0], ldafCalib[1]);
6749 }
6750 }
6751
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006752 // Reprocess and DDM debug data through vendor tag
6753 cam_reprocess_info_t repro_info;
6754 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006755 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6756 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006757 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006758 }
6759 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6760 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006761 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006762 }
6763 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6764 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006765 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006766 }
6767 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6768 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006769 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006770 }
6771 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6772 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006773 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006774 }
6775 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006776 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006777 }
6778 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6779 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006780 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006781 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006782 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6783 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6784 }
6785 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6786 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6787 }
6788 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6789 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006790
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006791 // INSTANT AEC MODE
6792 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6793 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6794 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6795 }
6796
Shuzhen Wange763e802016-03-31 10:24:29 -07006797 // AF scene change
6798 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6799 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6800 }
6801
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006802 /* In batch mode, cache the first metadata in the batch */
6803 if (mBatchSize && firstMetadataInBatch) {
6804 mCachedMetadata.clear();
6805 mCachedMetadata = camMetadata;
6806 }
6807
Thierry Strudel3d639192016-09-09 11:52:26 -07006808 resultMetadata = camMetadata.release();
6809 return resultMetadata;
6810}
6811
6812/*===========================================================================
6813 * FUNCTION : saveExifParams
6814 *
6815 * DESCRIPTION:
6816 *
6817 * PARAMETERS :
6818 * @metadata : metadata information from callback
6819 *
6820 * RETURN : none
6821 *
6822 *==========================================================================*/
6823void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6824{
6825 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6826 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6827 if (mExifParams.debug_params) {
6828 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6829 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6830 }
6831 }
6832 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6833 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6834 if (mExifParams.debug_params) {
6835 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6836 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6837 }
6838 }
6839 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6840 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6841 if (mExifParams.debug_params) {
6842 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6843 mExifParams.debug_params->af_debug_params_valid = TRUE;
6844 }
6845 }
6846 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6847 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6848 if (mExifParams.debug_params) {
6849 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6850 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6851 }
6852 }
6853 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6854 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6855 if (mExifParams.debug_params) {
6856 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6857 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6858 }
6859 }
6860 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6861 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6862 if (mExifParams.debug_params) {
6863 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6864 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6865 }
6866 }
6867 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6868 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6869 if (mExifParams.debug_params) {
6870 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6871 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6872 }
6873 }
6874 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6875 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6876 if (mExifParams.debug_params) {
6877 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6878 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6879 }
6880 }
6881}
6882
6883/*===========================================================================
6884 * FUNCTION : get3AExifParams
6885 *
6886 * DESCRIPTION:
6887 *
6888 * PARAMETERS : none
6889 *
6890 *
6891 * RETURN : mm_jpeg_exif_params_t
6892 *
6893 *==========================================================================*/
6894mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6895{
6896 return mExifParams;
6897}
6898
6899/*===========================================================================
6900 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6901 *
6902 * DESCRIPTION:
6903 *
6904 * PARAMETERS :
6905 * @metadata : metadata information from callback
6906 *
6907 * RETURN : camera_metadata_t*
6908 * metadata in a format specified by fwk
6909 *==========================================================================*/
6910camera_metadata_t*
6911QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6912 (metadata_buffer_t *metadata)
6913{
6914 CameraMetadata camMetadata;
6915 camera_metadata_t *resultMetadata;
6916
6917
6918 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6919 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6920 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6921 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6922 }
6923
6924 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6925 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6926 &aecTrigger->trigger, 1);
6927 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6928 &aecTrigger->trigger_id, 1);
6929 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6930 aecTrigger->trigger);
6931 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6932 aecTrigger->trigger_id);
6933 }
6934
6935 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6936 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6937 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6938 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6939 }
6940
6941 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6942 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6943 if (NAME_NOT_FOUND != val) {
6944 uint8_t fwkAfMode = (uint8_t)val;
6945 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6946 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6947 } else {
6948 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6949 val);
6950 }
6951 }
6952
6953 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6954 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6955 &af_trigger->trigger, 1);
6956 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6957 af_trigger->trigger);
6958 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6959 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6960 af_trigger->trigger_id);
6961 }
6962
6963 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6964 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6965 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6966 if (NAME_NOT_FOUND != val) {
6967 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6968 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6969 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6970 } else {
6971 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6972 }
6973 }
6974
6975 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6976 uint32_t aeMode = CAM_AE_MODE_MAX;
6977 int32_t flashMode = CAM_FLASH_MODE_MAX;
6978 int32_t redeye = -1;
6979 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6980 aeMode = *pAeMode;
6981 }
6982 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6983 flashMode = *pFlashMode;
6984 }
6985 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6986 redeye = *pRedeye;
6987 }
6988
6989 if (1 == redeye) {
6990 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6991 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6992 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6993 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6994 flashMode);
6995 if (NAME_NOT_FOUND != val) {
6996 fwk_aeMode = (uint8_t)val;
6997 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6998 } else {
6999 LOGE("Unsupported flash mode %d", flashMode);
7000 }
7001 } else if (aeMode == CAM_AE_MODE_ON) {
7002 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7003 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7004 } else if (aeMode == CAM_AE_MODE_OFF) {
7005 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7006 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7007 } else {
7008 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7009 "flashMode:%d, aeMode:%u!!!",
7010 redeye, flashMode, aeMode);
7011 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007012 if (mInstantAEC) {
7013 // Increment frame Idx count untill a bound reached for instant AEC.
7014 mInstantAecFrameIdxCount++;
7015 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7016 CAM_INTF_META_AEC_INFO, metadata) {
7017 LOGH("ae_params->settled = %d",ae_params->settled);
7018 // If AEC settled, or if number of frames reached bound value,
7019 // should reset instant AEC.
7020 if (ae_params->settled ||
7021 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7022 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7023 mInstantAEC = false;
7024 mResetInstantAEC = true;
7025 mInstantAecFrameIdxCount = 0;
7026 }
7027 }
7028 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007029 resultMetadata = camMetadata.release();
7030 return resultMetadata;
7031}
7032
7033/*===========================================================================
7034 * FUNCTION : dumpMetadataToFile
7035 *
7036 * DESCRIPTION: Dumps tuning metadata to file system
7037 *
7038 * PARAMETERS :
7039 * @meta : tuning metadata
7040 * @dumpFrameCount : current dump frame count
7041 * @enabled : Enable mask
7042 *
7043 *==========================================================================*/
7044void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7045 uint32_t &dumpFrameCount,
7046 bool enabled,
7047 const char *type,
7048 uint32_t frameNumber)
7049{
7050 //Some sanity checks
7051 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7052 LOGE("Tuning sensor data size bigger than expected %d: %d",
7053 meta.tuning_sensor_data_size,
7054 TUNING_SENSOR_DATA_MAX);
7055 return;
7056 }
7057
7058 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7059 LOGE("Tuning VFE data size bigger than expected %d: %d",
7060 meta.tuning_vfe_data_size,
7061 TUNING_VFE_DATA_MAX);
7062 return;
7063 }
7064
7065 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7066 LOGE("Tuning CPP data size bigger than expected %d: %d",
7067 meta.tuning_cpp_data_size,
7068 TUNING_CPP_DATA_MAX);
7069 return;
7070 }
7071
7072 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7073 LOGE("Tuning CAC data size bigger than expected %d: %d",
7074 meta.tuning_cac_data_size,
7075 TUNING_CAC_DATA_MAX);
7076 return;
7077 }
7078 //
7079
7080 if(enabled){
7081 char timeBuf[FILENAME_MAX];
7082 char buf[FILENAME_MAX];
7083 memset(buf, 0, sizeof(buf));
7084 memset(timeBuf, 0, sizeof(timeBuf));
7085 time_t current_time;
7086 struct tm * timeinfo;
7087 time (&current_time);
7088 timeinfo = localtime (&current_time);
7089 if (timeinfo != NULL) {
7090 strftime (timeBuf, sizeof(timeBuf),
7091 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7092 }
7093 String8 filePath(timeBuf);
7094 snprintf(buf,
7095 sizeof(buf),
7096 "%dm_%s_%d.bin",
7097 dumpFrameCount,
7098 type,
7099 frameNumber);
7100 filePath.append(buf);
7101 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7102 if (file_fd >= 0) {
7103 ssize_t written_len = 0;
7104 meta.tuning_data_version = TUNING_DATA_VERSION;
7105 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7106 written_len += write(file_fd, data, sizeof(uint32_t));
7107 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7108 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7109 written_len += write(file_fd, data, sizeof(uint32_t));
7110 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7111 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7112 written_len += write(file_fd, data, sizeof(uint32_t));
7113 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7114 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7115 written_len += write(file_fd, data, sizeof(uint32_t));
7116 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7117 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7118 written_len += write(file_fd, data, sizeof(uint32_t));
7119 meta.tuning_mod3_data_size = 0;
7120 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7121 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7122 written_len += write(file_fd, data, sizeof(uint32_t));
7123 size_t total_size = meta.tuning_sensor_data_size;
7124 data = (void *)((uint8_t *)&meta.data);
7125 written_len += write(file_fd, data, total_size);
7126 total_size = meta.tuning_vfe_data_size;
7127 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7128 written_len += write(file_fd, data, total_size);
7129 total_size = meta.tuning_cpp_data_size;
7130 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7131 written_len += write(file_fd, data, total_size);
7132 total_size = meta.tuning_cac_data_size;
7133 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7134 written_len += write(file_fd, data, total_size);
7135 close(file_fd);
7136 }else {
7137 LOGE("fail to open file for metadata dumping");
7138 }
7139 }
7140}
7141
7142/*===========================================================================
7143 * FUNCTION : cleanAndSortStreamInfo
7144 *
7145 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7146 * and sort them such that raw stream is at the end of the list
7147 * This is a workaround for camera daemon constraint.
7148 *
7149 * PARAMETERS : None
7150 *
7151 *==========================================================================*/
7152void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7153{
7154 List<stream_info_t *> newStreamInfo;
7155
7156 /*clean up invalid streams*/
7157 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7158 it != mStreamInfo.end();) {
7159 if(((*it)->status) == INVALID){
7160 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7161 delete channel;
7162 free(*it);
7163 it = mStreamInfo.erase(it);
7164 } else {
7165 it++;
7166 }
7167 }
7168
7169 // Move preview/video/callback/snapshot streams into newList
7170 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7171 it != mStreamInfo.end();) {
7172 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7173 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7174 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7175 newStreamInfo.push_back(*it);
7176 it = mStreamInfo.erase(it);
7177 } else
7178 it++;
7179 }
7180 // Move raw streams into newList
7181 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7182 it != mStreamInfo.end();) {
7183 newStreamInfo.push_back(*it);
7184 it = mStreamInfo.erase(it);
7185 }
7186
7187 mStreamInfo = newStreamInfo;
7188}
7189
7190/*===========================================================================
7191 * FUNCTION : extractJpegMetadata
7192 *
7193 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7194 * JPEG metadata is cached in HAL, and return as part of capture
7195 * result when metadata is returned from camera daemon.
7196 *
7197 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7198 * @request: capture request
7199 *
7200 *==========================================================================*/
7201void QCamera3HardwareInterface::extractJpegMetadata(
7202 CameraMetadata& jpegMetadata,
7203 const camera3_capture_request_t *request)
7204{
7205 CameraMetadata frame_settings;
7206 frame_settings = request->settings;
7207
7208 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7209 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7210 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7211 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7212
7213 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7214 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7215 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7216 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7217
7218 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7219 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7220 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7221 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7222
7223 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7224 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7225 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7226 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7227
7228 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7229 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7230 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7231 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7232
7233 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7234 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7235 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7236 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7237
7238 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7239 int32_t thumbnail_size[2];
7240 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7241 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7242 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7243 int32_t orientation =
7244 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007245 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007246 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7247 int32_t temp;
7248 temp = thumbnail_size[0];
7249 thumbnail_size[0] = thumbnail_size[1];
7250 thumbnail_size[1] = temp;
7251 }
7252 }
7253 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7254 thumbnail_size,
7255 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7256 }
7257
7258}
7259
7260/*===========================================================================
7261 * FUNCTION : convertToRegions
7262 *
7263 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7264 *
7265 * PARAMETERS :
7266 * @rect : cam_rect_t struct to convert
7267 * @region : int32_t destination array
7268 * @weight : if we are converting from cam_area_t, weight is valid
7269 * else weight = -1
7270 *
7271 *==========================================================================*/
7272void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7273 int32_t *region, int weight)
7274{
7275 region[0] = rect.left;
7276 region[1] = rect.top;
7277 region[2] = rect.left + rect.width;
7278 region[3] = rect.top + rect.height;
7279 if (weight > -1) {
7280 region[4] = weight;
7281 }
7282}
7283
7284/*===========================================================================
7285 * FUNCTION : convertFromRegions
7286 *
7287 * DESCRIPTION: helper method to convert from array to cam_rect_t
7288 *
7289 * PARAMETERS :
7290 * @rect : cam_rect_t struct to convert
7291 * @region : int32_t destination array
7292 * @weight : if we are converting from cam_area_t, weight is valid
7293 * else weight = -1
7294 *
7295 *==========================================================================*/
7296void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7297 const camera_metadata_t *settings, uint32_t tag)
7298{
7299 CameraMetadata frame_settings;
7300 frame_settings = settings;
7301 int32_t x_min = frame_settings.find(tag).data.i32[0];
7302 int32_t y_min = frame_settings.find(tag).data.i32[1];
7303 int32_t x_max = frame_settings.find(tag).data.i32[2];
7304 int32_t y_max = frame_settings.find(tag).data.i32[3];
7305 roi.weight = frame_settings.find(tag).data.i32[4];
7306 roi.rect.left = x_min;
7307 roi.rect.top = y_min;
7308 roi.rect.width = x_max - x_min;
7309 roi.rect.height = y_max - y_min;
7310}
7311
7312/*===========================================================================
7313 * FUNCTION : resetIfNeededROI
7314 *
7315 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7316 * crop region
7317 *
7318 * PARAMETERS :
7319 * @roi : cam_area_t struct to resize
7320 * @scalerCropRegion : cam_crop_region_t region to compare against
7321 *
7322 *
7323 *==========================================================================*/
7324bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7325 const cam_crop_region_t* scalerCropRegion)
7326{
7327 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7328 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7329 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7330 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7331
7332 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7333 * without having this check the calculations below to validate if the roi
7334 * is inside scalar crop region will fail resulting in the roi not being
7335 * reset causing algorithm to continue to use stale roi window
7336 */
7337 if (roi->weight == 0) {
7338 return true;
7339 }
7340
7341 if ((roi_x_max < scalerCropRegion->left) ||
7342 // right edge of roi window is left of scalar crop's left edge
7343 (roi_y_max < scalerCropRegion->top) ||
7344 // bottom edge of roi window is above scalar crop's top edge
7345 (roi->rect.left > crop_x_max) ||
7346 // left edge of roi window is beyond(right) of scalar crop's right edge
7347 (roi->rect.top > crop_y_max)){
7348 // top edge of roi windo is above scalar crop's top edge
7349 return false;
7350 }
7351 if (roi->rect.left < scalerCropRegion->left) {
7352 roi->rect.left = scalerCropRegion->left;
7353 }
7354 if (roi->rect.top < scalerCropRegion->top) {
7355 roi->rect.top = scalerCropRegion->top;
7356 }
7357 if (roi_x_max > crop_x_max) {
7358 roi_x_max = crop_x_max;
7359 }
7360 if (roi_y_max > crop_y_max) {
7361 roi_y_max = crop_y_max;
7362 }
7363 roi->rect.width = roi_x_max - roi->rect.left;
7364 roi->rect.height = roi_y_max - roi->rect.top;
7365 return true;
7366}
7367
7368/*===========================================================================
7369 * FUNCTION : convertLandmarks
7370 *
7371 * DESCRIPTION: helper method to extract the landmarks from face detection info
7372 *
7373 * PARAMETERS :
7374 * @landmark_data : input landmark data to be converted
7375 * @landmarks : int32_t destination array
7376 *
7377 *
7378 *==========================================================================*/
7379void QCamera3HardwareInterface::convertLandmarks(
7380 cam_face_landmarks_info_t landmark_data,
7381 int32_t *landmarks)
7382{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007383 if (landmark_data.is_left_eye_valid) {
7384 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7385 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7386 } else {
7387 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7388 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7389 }
7390
7391 if (landmark_data.is_right_eye_valid) {
7392 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7393 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7394 } else {
7395 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7396 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7397 }
7398
7399 if (landmark_data.is_mouth_valid) {
7400 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7401 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7402 } else {
7403 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7404 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7405 }
7406}
7407
7408/*===========================================================================
7409 * FUNCTION : setInvalidLandmarks
7410 *
7411 * DESCRIPTION: helper method to set invalid landmarks
7412 *
7413 * PARAMETERS :
7414 * @landmarks : int32_t destination array
7415 *
7416 *
7417 *==========================================================================*/
7418void QCamera3HardwareInterface::setInvalidLandmarks(
7419 int32_t *landmarks)
7420{
7421 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7422 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7423 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7424 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7425 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7426 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007427}
7428
7429#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007430
7431/*===========================================================================
7432 * FUNCTION : getCapabilities
7433 *
7434 * DESCRIPTION: query camera capability from back-end
7435 *
7436 * PARAMETERS :
7437 * @ops : mm-interface ops structure
7438 * @cam_handle : camera handle for which we need capability
7439 *
7440 * RETURN : ptr type of capability structure
7441 * capability for success
7442 * NULL for failure
7443 *==========================================================================*/
7444cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7445 uint32_t cam_handle)
7446{
7447 int rc = NO_ERROR;
7448 QCamera3HeapMemory *capabilityHeap = NULL;
7449 cam_capability_t *cap_ptr = NULL;
7450
7451 if (ops == NULL) {
7452 LOGE("Invalid arguments");
7453 return NULL;
7454 }
7455
7456 capabilityHeap = new QCamera3HeapMemory(1);
7457 if (capabilityHeap == NULL) {
7458 LOGE("creation of capabilityHeap failed");
7459 return NULL;
7460 }
7461
7462 /* Allocate memory for capability buffer */
7463 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7464 if(rc != OK) {
7465 LOGE("No memory for cappability");
7466 goto allocate_failed;
7467 }
7468
7469 /* Map memory for capability buffer */
7470 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7471
7472 rc = ops->map_buf(cam_handle,
7473 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7474 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7475 if(rc < 0) {
7476 LOGE("failed to map capability buffer");
7477 rc = FAILED_TRANSACTION;
7478 goto map_failed;
7479 }
7480
7481 /* Query Capability */
7482 rc = ops->query_capability(cam_handle);
7483 if(rc < 0) {
7484 LOGE("failed to query capability");
7485 rc = FAILED_TRANSACTION;
7486 goto query_failed;
7487 }
7488
7489 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7490 if (cap_ptr == NULL) {
7491 LOGE("out of memory");
7492 rc = NO_MEMORY;
7493 goto query_failed;
7494 }
7495
7496 memset(cap_ptr, 0, sizeof(cam_capability_t));
7497 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7498
7499 int index;
7500 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7501 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7502 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7503 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7504 }
7505
7506query_failed:
7507 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7508map_failed:
7509 capabilityHeap->deallocate();
7510allocate_failed:
7511 delete capabilityHeap;
7512
7513 if (rc != NO_ERROR) {
7514 return NULL;
7515 } else {
7516 return cap_ptr;
7517 }
7518}
7519
Thierry Strudel3d639192016-09-09 11:52:26 -07007520/*===========================================================================
7521 * FUNCTION : initCapabilities
7522 *
7523 * DESCRIPTION: initialize camera capabilities in static data struct
7524 *
7525 * PARAMETERS :
7526 * @cameraId : camera Id
7527 *
7528 * RETURN : int32_t type of status
7529 * NO_ERROR -- success
7530 * none-zero failure code
7531 *==========================================================================*/
7532int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7533{
7534 int rc = 0;
7535 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007536 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007537
7538 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7539 if (rc) {
7540 LOGE("camera_open failed. rc = %d", rc);
7541 goto open_failed;
7542 }
7543 if (!cameraHandle) {
7544 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7545 goto open_failed;
7546 }
7547
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007548 handle = get_main_camera_handle(cameraHandle->camera_handle);
7549 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7550 if (gCamCapability[cameraId] == NULL) {
7551 rc = FAILED_TRANSACTION;
7552 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007553 }
7554
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007555 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007556 if (is_dual_camera_by_idx(cameraId)) {
7557 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7558 gCamCapability[cameraId]->aux_cam_cap =
7559 getCapabilities(cameraHandle->ops, handle);
7560 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7561 rc = FAILED_TRANSACTION;
7562 free(gCamCapability[cameraId]);
7563 goto failed_op;
7564 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007565
7566 // Copy the main camera capability to main_cam_cap struct
7567 gCamCapability[cameraId]->main_cam_cap =
7568 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7569 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7570 LOGE("out of memory");
7571 rc = NO_MEMORY;
7572 goto failed_op;
7573 }
7574 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7575 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007576 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007577failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007578 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7579 cameraHandle = NULL;
7580open_failed:
7581 return rc;
7582}
7583
7584/*==========================================================================
7585 * FUNCTION : get3Aversion
7586 *
7587 * DESCRIPTION: get the Q3A S/W version
7588 *
7589 * PARAMETERS :
7590 * @sw_version: Reference of Q3A structure which will hold version info upon
7591 * return
7592 *
7593 * RETURN : None
7594 *
7595 *==========================================================================*/
7596void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7597{
7598 if(gCamCapability[mCameraId])
7599 sw_version = gCamCapability[mCameraId]->q3a_version;
7600 else
7601 LOGE("Capability structure NULL!");
7602}
7603
7604
7605/*===========================================================================
7606 * FUNCTION : initParameters
7607 *
7608 * DESCRIPTION: initialize camera parameters
7609 *
7610 * PARAMETERS :
7611 *
7612 * RETURN : int32_t type of status
7613 * NO_ERROR -- success
7614 * none-zero failure code
7615 *==========================================================================*/
7616int QCamera3HardwareInterface::initParameters()
7617{
7618 int rc = 0;
7619
7620 //Allocate Set Param Buffer
7621 mParamHeap = new QCamera3HeapMemory(1);
7622 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7623 if(rc != OK) {
7624 rc = NO_MEMORY;
7625 LOGE("Failed to allocate SETPARM Heap memory");
7626 delete mParamHeap;
7627 mParamHeap = NULL;
7628 return rc;
7629 }
7630
7631 //Map memory for parameters buffer
7632 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7633 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7634 mParamHeap->getFd(0),
7635 sizeof(metadata_buffer_t),
7636 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7637 if(rc < 0) {
7638 LOGE("failed to map SETPARM buffer");
7639 rc = FAILED_TRANSACTION;
7640 mParamHeap->deallocate();
7641 delete mParamHeap;
7642 mParamHeap = NULL;
7643 return rc;
7644 }
7645
7646 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7647
7648 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7649 return rc;
7650}
7651
7652/*===========================================================================
7653 * FUNCTION : deinitParameters
7654 *
7655 * DESCRIPTION: de-initialize camera parameters
7656 *
7657 * PARAMETERS :
7658 *
7659 * RETURN : NONE
7660 *==========================================================================*/
7661void QCamera3HardwareInterface::deinitParameters()
7662{
7663 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7664 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7665
7666 mParamHeap->deallocate();
7667 delete mParamHeap;
7668 mParamHeap = NULL;
7669
7670 mParameters = NULL;
7671
7672 free(mPrevParameters);
7673 mPrevParameters = NULL;
7674}
7675
7676/*===========================================================================
7677 * FUNCTION : calcMaxJpegSize
7678 *
7679 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7680 *
7681 * PARAMETERS :
7682 *
7683 * RETURN : max_jpeg_size
7684 *==========================================================================*/
7685size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7686{
7687 size_t max_jpeg_size = 0;
7688 size_t temp_width, temp_height;
7689 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7690 MAX_SIZES_CNT);
7691 for (size_t i = 0; i < count; i++) {
7692 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7693 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7694 if (temp_width * temp_height > max_jpeg_size ) {
7695 max_jpeg_size = temp_width * temp_height;
7696 }
7697 }
7698 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7699 return max_jpeg_size;
7700}
7701
7702/*===========================================================================
7703 * FUNCTION : getMaxRawSize
7704 *
7705 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7706 *
7707 * PARAMETERS :
7708 *
7709 * RETURN : Largest supported Raw Dimension
7710 *==========================================================================*/
7711cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7712{
7713 int max_width = 0;
7714 cam_dimension_t maxRawSize;
7715
7716 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7717 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7718 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7719 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7720 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7721 }
7722 }
7723 return maxRawSize;
7724}
7725
7726
7727/*===========================================================================
7728 * FUNCTION : calcMaxJpegDim
7729 *
7730 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7731 *
7732 * PARAMETERS :
7733 *
7734 * RETURN : max_jpeg_dim
7735 *==========================================================================*/
7736cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7737{
7738 cam_dimension_t max_jpeg_dim;
7739 cam_dimension_t curr_jpeg_dim;
7740 max_jpeg_dim.width = 0;
7741 max_jpeg_dim.height = 0;
7742 curr_jpeg_dim.width = 0;
7743 curr_jpeg_dim.height = 0;
7744 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7745 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7746 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7747 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7748 max_jpeg_dim.width * max_jpeg_dim.height ) {
7749 max_jpeg_dim.width = curr_jpeg_dim.width;
7750 max_jpeg_dim.height = curr_jpeg_dim.height;
7751 }
7752 }
7753 return max_jpeg_dim;
7754}
7755
7756/*===========================================================================
7757 * FUNCTION : addStreamConfig
7758 *
7759 * DESCRIPTION: adds the stream configuration to the array
7760 *
7761 * PARAMETERS :
7762 * @available_stream_configs : pointer to stream configuration array
7763 * @scalar_format : scalar format
7764 * @dim : configuration dimension
7765 * @config_type : input or output configuration type
7766 *
7767 * RETURN : NONE
7768 *==========================================================================*/
7769void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7770 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7771{
7772 available_stream_configs.add(scalar_format);
7773 available_stream_configs.add(dim.width);
7774 available_stream_configs.add(dim.height);
7775 available_stream_configs.add(config_type);
7776}
7777
7778/*===========================================================================
7779 * FUNCTION : suppportBurstCapture
7780 *
7781 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7782 *
7783 * PARAMETERS :
7784 * @cameraId : camera Id
7785 *
7786 * RETURN : true if camera supports BURST_CAPTURE
7787 * false otherwise
7788 *==========================================================================*/
7789bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7790{
7791 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7792 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7793 const int32_t highResWidth = 3264;
7794 const int32_t highResHeight = 2448;
7795
7796 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7797 // Maximum resolution images cannot be captured at >= 10fps
7798 // -> not supporting BURST_CAPTURE
7799 return false;
7800 }
7801
7802 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7803 // Maximum resolution images can be captured at >= 20fps
7804 // --> supporting BURST_CAPTURE
7805 return true;
7806 }
7807
7808 // Find the smallest highRes resolution, or largest resolution if there is none
7809 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7810 MAX_SIZES_CNT);
7811 size_t highRes = 0;
7812 while ((highRes + 1 < totalCnt) &&
7813 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7814 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7815 highResWidth * highResHeight)) {
7816 highRes++;
7817 }
7818 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7819 return true;
7820 } else {
7821 return false;
7822 }
7823}
7824
7825/*===========================================================================
7826 * FUNCTION : initStaticMetadata
7827 *
7828 * DESCRIPTION: initialize the static metadata
7829 *
7830 * PARAMETERS :
7831 * @cameraId : camera Id
7832 *
7833 * RETURN : int32_t type of status
7834 * 0 -- success
7835 * non-zero failure code
7836 *==========================================================================*/
7837int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7838{
7839 int rc = 0;
7840 CameraMetadata staticInfo;
7841 size_t count = 0;
7842 bool limitedDevice = false;
7843 char prop[PROPERTY_VALUE_MAX];
7844 bool supportBurst = false;
7845
7846 supportBurst = supportBurstCapture(cameraId);
7847
7848 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7849 * guaranteed or if min fps of max resolution is less than 20 fps, its
7850 * advertised as limited device*/
7851 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7852 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7853 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7854 !supportBurst;
7855
7856 uint8_t supportedHwLvl = limitedDevice ?
7857 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007858#ifndef USE_HAL_3_3
7859 // LEVEL_3 - This device will support level 3.
7860 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7861#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007862 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007863#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007864
7865 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7866 &supportedHwLvl, 1);
7867
7868 bool facingBack = false;
7869 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7870 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7871 facingBack = true;
7872 }
7873 /*HAL 3 only*/
7874 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7875 &gCamCapability[cameraId]->min_focus_distance, 1);
7876
7877 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7878 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7879
7880 /*should be using focal lengths but sensor doesn't provide that info now*/
7881 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7882 &gCamCapability[cameraId]->focal_length,
7883 1);
7884
7885 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7886 gCamCapability[cameraId]->apertures,
7887 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7888
7889 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7890 gCamCapability[cameraId]->filter_densities,
7891 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7892
7893
7894 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7895 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7896 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7897
7898 int32_t lens_shading_map_size[] = {
7899 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7900 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7901 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7902 lens_shading_map_size,
7903 sizeof(lens_shading_map_size)/sizeof(int32_t));
7904
7905 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7906 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7907
7908 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7909 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7910
7911 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7912 &gCamCapability[cameraId]->max_frame_duration, 1);
7913
7914 camera_metadata_rational baseGainFactor = {
7915 gCamCapability[cameraId]->base_gain_factor.numerator,
7916 gCamCapability[cameraId]->base_gain_factor.denominator};
7917 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7918 &baseGainFactor, 1);
7919
7920 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7921 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7922
7923 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7924 gCamCapability[cameraId]->pixel_array_size.height};
7925 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7926 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7927
7928 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7929 gCamCapability[cameraId]->active_array_size.top,
7930 gCamCapability[cameraId]->active_array_size.width,
7931 gCamCapability[cameraId]->active_array_size.height};
7932 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7933 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7934
7935 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7936 &gCamCapability[cameraId]->white_level, 1);
7937
7938 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7939 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
7940
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007941#ifndef USE_HAL_3_3
7942 bool hasBlackRegions = false;
7943 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7944 LOGW("black_region_count: %d is bounded to %d",
7945 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7946 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7947 }
7948 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7949 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7950 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7951 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7952 }
7953 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7954 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7955 hasBlackRegions = true;
7956 }
7957#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007958 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7959 &gCamCapability[cameraId]->flash_charge_duration, 1);
7960
7961 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7962 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7963
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007964 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7965 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7966 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7967 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007968 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7969 &timestampSource, 1);
7970
7971 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7972 &gCamCapability[cameraId]->histogram_size, 1);
7973
7974 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7975 &gCamCapability[cameraId]->max_histogram_count, 1);
7976
7977 int32_t sharpness_map_size[] = {
7978 gCamCapability[cameraId]->sharpness_map_size.width,
7979 gCamCapability[cameraId]->sharpness_map_size.height};
7980
7981 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7982 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7983
7984 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7985 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7986
7987 int32_t scalar_formats[] = {
7988 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7989 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7990 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7991 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7992 HAL_PIXEL_FORMAT_RAW10,
7993 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7994 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7995 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7996 scalar_formats,
7997 scalar_formats_count);
7998
7999 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8000 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8001 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8002 count, MAX_SIZES_CNT, available_processed_sizes);
8003 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8004 available_processed_sizes, count * 2);
8005
8006 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8007 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8008 makeTable(gCamCapability[cameraId]->raw_dim,
8009 count, MAX_SIZES_CNT, available_raw_sizes);
8010 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8011 available_raw_sizes, count * 2);
8012
8013 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8014 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8015 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8016 count, MAX_SIZES_CNT, available_fps_ranges);
8017 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8018 available_fps_ranges, count * 2);
8019
8020 camera_metadata_rational exposureCompensationStep = {
8021 gCamCapability[cameraId]->exp_compensation_step.numerator,
8022 gCamCapability[cameraId]->exp_compensation_step.denominator};
8023 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8024 &exposureCompensationStep, 1);
8025
8026 Vector<uint8_t> availableVstabModes;
8027 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8028 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008029 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008030 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008031 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008032 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008033 count = IS_TYPE_MAX;
8034 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8035 for (size_t i = 0; i < count; i++) {
8036 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8037 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8038 eisSupported = true;
8039 break;
8040 }
8041 }
8042 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008043 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8044 }
8045 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8046 availableVstabModes.array(), availableVstabModes.size());
8047
8048 /*HAL 1 and HAL 3 common*/
8049 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8050 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8051 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8052 float maxZoom = maxZoomStep/minZoomStep;
8053 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8054 &maxZoom, 1);
8055
8056 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8057 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8058
8059 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8060 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8061 max3aRegions[2] = 0; /* AF not supported */
8062 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8063 max3aRegions, 3);
8064
8065 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8066 memset(prop, 0, sizeof(prop));
8067 property_get("persist.camera.facedetect", prop, "1");
8068 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8069 LOGD("Support face detection mode: %d",
8070 supportedFaceDetectMode);
8071
8072 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008073 /* support mode should be OFF if max number of face is 0 */
8074 if (maxFaces <= 0) {
8075 supportedFaceDetectMode = 0;
8076 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 Vector<uint8_t> availableFaceDetectModes;
8078 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8079 if (supportedFaceDetectMode == 1) {
8080 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8081 } else if (supportedFaceDetectMode == 2) {
8082 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8083 } else if (supportedFaceDetectMode == 3) {
8084 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8085 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8086 } else {
8087 maxFaces = 0;
8088 }
8089 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8090 availableFaceDetectModes.array(),
8091 availableFaceDetectModes.size());
8092 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8093 (int32_t *)&maxFaces, 1);
8094
8095 int32_t exposureCompensationRange[] = {
8096 gCamCapability[cameraId]->exposure_compensation_min,
8097 gCamCapability[cameraId]->exposure_compensation_max};
8098 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8099 exposureCompensationRange,
8100 sizeof(exposureCompensationRange)/sizeof(int32_t));
8101
8102 uint8_t lensFacing = (facingBack) ?
8103 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8104 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8105
8106 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8107 available_thumbnail_sizes,
8108 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8109
8110 /*all sizes will be clubbed into this tag*/
8111 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8112 /*android.scaler.availableStreamConfigurations*/
8113 Vector<int32_t> available_stream_configs;
8114 cam_dimension_t active_array_dim;
8115 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8116 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8117 /* Add input/output stream configurations for each scalar formats*/
8118 for (size_t j = 0; j < scalar_formats_count; j++) {
8119 switch (scalar_formats[j]) {
8120 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8121 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8122 case HAL_PIXEL_FORMAT_RAW10:
8123 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8124 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8125 addStreamConfig(available_stream_configs, scalar_formats[j],
8126 gCamCapability[cameraId]->raw_dim[i],
8127 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8128 }
8129 break;
8130 case HAL_PIXEL_FORMAT_BLOB:
8131 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8132 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8133 addStreamConfig(available_stream_configs, scalar_formats[j],
8134 gCamCapability[cameraId]->picture_sizes_tbl[i],
8135 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8136 }
8137 break;
8138 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8139 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8140 default:
8141 cam_dimension_t largest_picture_size;
8142 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8143 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8144 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8145 addStreamConfig(available_stream_configs, scalar_formats[j],
8146 gCamCapability[cameraId]->picture_sizes_tbl[i],
8147 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8148 /* Book keep largest */
8149 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8150 >= largest_picture_size.width &&
8151 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8152 >= largest_picture_size.height)
8153 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8154 }
8155 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8156 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8157 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8158 addStreamConfig(available_stream_configs, scalar_formats[j],
8159 largest_picture_size,
8160 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8161 }
8162 break;
8163 }
8164 }
8165
8166 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8167 available_stream_configs.array(), available_stream_configs.size());
8168 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8169 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8170
8171 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8172 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8173
8174 /* android.scaler.availableMinFrameDurations */
8175 Vector<int64_t> available_min_durations;
8176 for (size_t j = 0; j < scalar_formats_count; j++) {
8177 switch (scalar_formats[j]) {
8178 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8179 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8180 case HAL_PIXEL_FORMAT_RAW10:
8181 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8182 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8183 available_min_durations.add(scalar_formats[j]);
8184 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8185 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8186 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8187 }
8188 break;
8189 default:
8190 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8191 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8192 available_min_durations.add(scalar_formats[j]);
8193 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8194 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8195 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8196 }
8197 break;
8198 }
8199 }
8200 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8201 available_min_durations.array(), available_min_durations.size());
8202
8203 Vector<int32_t> available_hfr_configs;
8204 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8205 int32_t fps = 0;
8206 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8207 case CAM_HFR_MODE_60FPS:
8208 fps = 60;
8209 break;
8210 case CAM_HFR_MODE_90FPS:
8211 fps = 90;
8212 break;
8213 case CAM_HFR_MODE_120FPS:
8214 fps = 120;
8215 break;
8216 case CAM_HFR_MODE_150FPS:
8217 fps = 150;
8218 break;
8219 case CAM_HFR_MODE_180FPS:
8220 fps = 180;
8221 break;
8222 case CAM_HFR_MODE_210FPS:
8223 fps = 210;
8224 break;
8225 case CAM_HFR_MODE_240FPS:
8226 fps = 240;
8227 break;
8228 case CAM_HFR_MODE_480FPS:
8229 fps = 480;
8230 break;
8231 case CAM_HFR_MODE_OFF:
8232 case CAM_HFR_MODE_MAX:
8233 default:
8234 break;
8235 }
8236
8237 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8238 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8239 /* For each HFR frame rate, need to advertise one variable fps range
8240 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8241 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8242 * set by the app. When video recording is started, [120, 120] is
8243 * set. This way sensor configuration does not change when recording
8244 * is started */
8245
8246 /* (width, height, fps_min, fps_max, batch_size_max) */
8247 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8248 j < MAX_SIZES_CNT; j++) {
8249 available_hfr_configs.add(
8250 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8251 available_hfr_configs.add(
8252 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8253 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8254 available_hfr_configs.add(fps);
8255 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8256
8257 /* (width, height, fps_min, fps_max, batch_size_max) */
8258 available_hfr_configs.add(
8259 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8260 available_hfr_configs.add(
8261 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8262 available_hfr_configs.add(fps);
8263 available_hfr_configs.add(fps);
8264 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8265 }
8266 }
8267 }
8268 //Advertise HFR capability only if the property is set
8269 memset(prop, 0, sizeof(prop));
8270 property_get("persist.camera.hal3hfr.enable", prop, "1");
8271 uint8_t hfrEnable = (uint8_t)atoi(prop);
8272
8273 if(hfrEnable && available_hfr_configs.array()) {
8274 staticInfo.update(
8275 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8276 available_hfr_configs.array(), available_hfr_configs.size());
8277 }
8278
8279 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8280 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8281 &max_jpeg_size, 1);
8282
8283 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8284 size_t size = 0;
8285 count = CAM_EFFECT_MODE_MAX;
8286 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8287 for (size_t i = 0; i < count; i++) {
8288 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8289 gCamCapability[cameraId]->supported_effects[i]);
8290 if (NAME_NOT_FOUND != val) {
8291 avail_effects[size] = (uint8_t)val;
8292 size++;
8293 }
8294 }
8295 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8296 avail_effects,
8297 size);
8298
8299 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8300 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8301 size_t supported_scene_modes_cnt = 0;
8302 count = CAM_SCENE_MODE_MAX;
8303 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8304 for (size_t i = 0; i < count; i++) {
8305 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8306 CAM_SCENE_MODE_OFF) {
8307 int val = lookupFwkName(SCENE_MODES_MAP,
8308 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8309 gCamCapability[cameraId]->supported_scene_modes[i]);
8310 if (NAME_NOT_FOUND != val) {
8311 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8312 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8313 supported_scene_modes_cnt++;
8314 }
8315 }
8316 }
8317 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8318 avail_scene_modes,
8319 supported_scene_modes_cnt);
8320
8321 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8322 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8323 supported_scene_modes_cnt,
8324 CAM_SCENE_MODE_MAX,
8325 scene_mode_overrides,
8326 supported_indexes,
8327 cameraId);
8328
8329 if (supported_scene_modes_cnt == 0) {
8330 supported_scene_modes_cnt = 1;
8331 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8332 }
8333
8334 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8335 scene_mode_overrides, supported_scene_modes_cnt * 3);
8336
8337 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8338 ANDROID_CONTROL_MODE_AUTO,
8339 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8340 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8341 available_control_modes,
8342 3);
8343
8344 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8345 size = 0;
8346 count = CAM_ANTIBANDING_MODE_MAX;
8347 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8348 for (size_t i = 0; i < count; i++) {
8349 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8350 gCamCapability[cameraId]->supported_antibandings[i]);
8351 if (NAME_NOT_FOUND != val) {
8352 avail_antibanding_modes[size] = (uint8_t)val;
8353 size++;
8354 }
8355
8356 }
8357 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8358 avail_antibanding_modes,
8359 size);
8360
8361 uint8_t avail_abberation_modes[] = {
8362 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8363 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8364 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8365 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8366 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8367 if (0 == count) {
8368 // If no aberration correction modes are available for a device, this advertise OFF mode
8369 size = 1;
8370 } else {
8371 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8372 // So, advertize all 3 modes if atleast any one mode is supported as per the
8373 // new M requirement
8374 size = 3;
8375 }
8376 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8377 avail_abberation_modes,
8378 size);
8379
8380 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8381 size = 0;
8382 count = CAM_FOCUS_MODE_MAX;
8383 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8384 for (size_t i = 0; i < count; i++) {
8385 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8386 gCamCapability[cameraId]->supported_focus_modes[i]);
8387 if (NAME_NOT_FOUND != val) {
8388 avail_af_modes[size] = (uint8_t)val;
8389 size++;
8390 }
8391 }
8392 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8393 avail_af_modes,
8394 size);
8395
8396 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8397 size = 0;
8398 count = CAM_WB_MODE_MAX;
8399 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8400 for (size_t i = 0; i < count; i++) {
8401 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8402 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8403 gCamCapability[cameraId]->supported_white_balances[i]);
8404 if (NAME_NOT_FOUND != val) {
8405 avail_awb_modes[size] = (uint8_t)val;
8406 size++;
8407 }
8408 }
8409 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8410 avail_awb_modes,
8411 size);
8412
8413 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8414 count = CAM_FLASH_FIRING_LEVEL_MAX;
8415 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8416 count);
8417 for (size_t i = 0; i < count; i++) {
8418 available_flash_levels[i] =
8419 gCamCapability[cameraId]->supported_firing_levels[i];
8420 }
8421 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8422 available_flash_levels, count);
8423
8424 uint8_t flashAvailable;
8425 if (gCamCapability[cameraId]->flash_available)
8426 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8427 else
8428 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8429 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8430 &flashAvailable, 1);
8431
8432 Vector<uint8_t> avail_ae_modes;
8433 count = CAM_AE_MODE_MAX;
8434 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8435 for (size_t i = 0; i < count; i++) {
8436 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8437 }
8438 if (flashAvailable) {
8439 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8440 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8441 }
8442 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8443 avail_ae_modes.array(),
8444 avail_ae_modes.size());
8445
8446 int32_t sensitivity_range[2];
8447 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8448 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8449 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8450 sensitivity_range,
8451 sizeof(sensitivity_range) / sizeof(int32_t));
8452
8453 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8454 &gCamCapability[cameraId]->max_analog_sensitivity,
8455 1);
8456
8457 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8458 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8459 &sensor_orientation,
8460 1);
8461
8462 int32_t max_output_streams[] = {
8463 MAX_STALLING_STREAMS,
8464 MAX_PROCESSED_STREAMS,
8465 MAX_RAW_STREAMS};
8466 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8467 max_output_streams,
8468 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8469
8470 uint8_t avail_leds = 0;
8471 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8472 &avail_leds, 0);
8473
8474 uint8_t focus_dist_calibrated;
8475 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8476 gCamCapability[cameraId]->focus_dist_calibrated);
8477 if (NAME_NOT_FOUND != val) {
8478 focus_dist_calibrated = (uint8_t)val;
8479 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8480 &focus_dist_calibrated, 1);
8481 }
8482
8483 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8484 size = 0;
8485 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8486 MAX_TEST_PATTERN_CNT);
8487 for (size_t i = 0; i < count; i++) {
8488 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8489 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8490 if (NAME_NOT_FOUND != testpatternMode) {
8491 avail_testpattern_modes[size] = testpatternMode;
8492 size++;
8493 }
8494 }
8495 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8496 avail_testpattern_modes,
8497 size);
8498
8499 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8500 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8501 &max_pipeline_depth,
8502 1);
8503
8504 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8505 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8506 &partial_result_count,
8507 1);
8508
8509 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8510 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8511
8512 Vector<uint8_t> available_capabilities;
8513 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8514 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8515 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8516 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8517 if (supportBurst) {
8518 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8519 }
8520 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8521 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8522 if (hfrEnable && available_hfr_configs.array()) {
8523 available_capabilities.add(
8524 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8525 }
8526
8527 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8528 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8529 }
8530 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8531 available_capabilities.array(),
8532 available_capabilities.size());
8533
8534 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8535 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8536 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8537 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8538
8539 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8540 &aeLockAvailable, 1);
8541
8542 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8543 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8544 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8545 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8546
8547 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8548 &awbLockAvailable, 1);
8549
8550 int32_t max_input_streams = 1;
8551 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8552 &max_input_streams,
8553 1);
8554
8555 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8556 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8557 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8558 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8559 HAL_PIXEL_FORMAT_YCbCr_420_888};
8560 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8561 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8562
8563 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8564 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8565 &max_latency,
8566 1);
8567
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008568#ifndef USE_HAL_3_3
8569 int32_t isp_sensitivity_range[2];
8570 isp_sensitivity_range[0] =
8571 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8572 isp_sensitivity_range[1] =
8573 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8574 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8575 isp_sensitivity_range,
8576 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8577#endif
8578
Thierry Strudel3d639192016-09-09 11:52:26 -07008579 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8580 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8581 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8582 available_hot_pixel_modes,
8583 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8584
8585 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8586 ANDROID_SHADING_MODE_FAST,
8587 ANDROID_SHADING_MODE_HIGH_QUALITY};
8588 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8589 available_shading_modes,
8590 3);
8591
8592 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8593 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8594 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8595 available_lens_shading_map_modes,
8596 2);
8597
8598 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8599 ANDROID_EDGE_MODE_FAST,
8600 ANDROID_EDGE_MODE_HIGH_QUALITY,
8601 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8602 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8603 available_edge_modes,
8604 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8605
8606 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8607 ANDROID_NOISE_REDUCTION_MODE_FAST,
8608 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8609 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8610 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8611 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8612 available_noise_red_modes,
8613 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8614
8615 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8616 ANDROID_TONEMAP_MODE_FAST,
8617 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8618 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8619 available_tonemap_modes,
8620 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8621
8622 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8623 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8624 available_hot_pixel_map_modes,
8625 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8626
8627 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8628 gCamCapability[cameraId]->reference_illuminant1);
8629 if (NAME_NOT_FOUND != val) {
8630 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8631 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8632 }
8633
8634 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8635 gCamCapability[cameraId]->reference_illuminant2);
8636 if (NAME_NOT_FOUND != val) {
8637 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8638 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8639 }
8640
8641 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8642 (void *)gCamCapability[cameraId]->forward_matrix1,
8643 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8644
8645 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8646 (void *)gCamCapability[cameraId]->forward_matrix2,
8647 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8648
8649 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8650 (void *)gCamCapability[cameraId]->color_transform1,
8651 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8652
8653 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8654 (void *)gCamCapability[cameraId]->color_transform2,
8655 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8656
8657 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8658 (void *)gCamCapability[cameraId]->calibration_transform1,
8659 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8660
8661 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8662 (void *)gCamCapability[cameraId]->calibration_transform2,
8663 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8664
8665 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8666 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8667 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8668 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8669 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8670 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8671 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8672 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8673 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8674 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8675 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8676 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8677 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8678 ANDROID_JPEG_GPS_COORDINATES,
8679 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8680 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8681 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8682 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8683 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8684 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8685 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8686 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8687 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8688 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008689#ifndef USE_HAL_3_3
8690 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8691#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008692 ANDROID_STATISTICS_FACE_DETECT_MODE,
8693 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8694 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8695 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008696 ANDROID_BLACK_LEVEL_LOCK,
8697 /* DevCamDebug metadata request_keys_basic */
8698 DEVCAMDEBUG_META_ENABLE,
8699 /* DevCamDebug metadata end */
8700 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008701
8702 size_t request_keys_cnt =
8703 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8704 Vector<int32_t> available_request_keys;
8705 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8706 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8707 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8708 }
8709
8710 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8711 available_request_keys.array(), available_request_keys.size());
8712
8713 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8714 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8715 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8716 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8717 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8718 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8719 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8720 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8721 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8722 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8723 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8724 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8725 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8726 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8727 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8728 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8729 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8730 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8731 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8732 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8733 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008734 ANDROID_STATISTICS_FACE_SCORES,
8735#ifndef USE_HAL_3_3
8736 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8737#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07008738 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008739 // DevCamDebug metadata result_keys_basic
8740 DEVCAMDEBUG_META_ENABLE,
8741 // DevCamDebug metadata result_keys AF
8742 DEVCAMDEBUG_AF_LENS_POSITION,
8743 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8744 DEVCAMDEBUG_AF_TOF_DISTANCE,
8745 DEVCAMDEBUG_AF_LUMA,
8746 DEVCAMDEBUG_AF_HAF_STATE,
8747 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8748 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8749 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8750 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8751 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8752 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8753 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8754 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8755 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8756 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8757 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8758 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8759 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8760 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8761 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8762 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8763 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8764 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8765 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8766 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8767 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8768 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8769 // DevCamDebug metadata result_keys AEC
8770 DEVCAMDEBUG_AEC_TARGET_LUMA,
8771 DEVCAMDEBUG_AEC_COMP_LUMA,
8772 DEVCAMDEBUG_AEC_AVG_LUMA,
8773 DEVCAMDEBUG_AEC_CUR_LUMA,
8774 DEVCAMDEBUG_AEC_LINECOUNT,
8775 DEVCAMDEBUG_AEC_REAL_GAIN,
8776 DEVCAMDEBUG_AEC_EXP_INDEX,
8777 DEVCAMDEBUG_AEC_LUX_IDX,
8778 // DevCamDebug metadata result_keys AWB
8779 DEVCAMDEBUG_AWB_R_GAIN,
8780 DEVCAMDEBUG_AWB_G_GAIN,
8781 DEVCAMDEBUG_AWB_B_GAIN,
8782 DEVCAMDEBUG_AWB_CCT,
8783 DEVCAMDEBUG_AWB_DECISION,
8784 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008785 };
8786
Thierry Strudel3d639192016-09-09 11:52:26 -07008787 size_t result_keys_cnt =
8788 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8789
8790 Vector<int32_t> available_result_keys;
8791 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8792 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8793 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8794 }
8795 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8796 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8797 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8798 }
8799 if (supportedFaceDetectMode == 1) {
8800 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8801 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8802 } else if ((supportedFaceDetectMode == 2) ||
8803 (supportedFaceDetectMode == 3)) {
8804 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8805 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8806 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008807#ifndef USE_HAL_3_3
8808 if (hasBlackRegions) {
8809 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8810 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8811 }
8812#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008813 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8814 available_result_keys.array(), available_result_keys.size());
8815
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008816 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008817 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8818 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8819 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8820 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8821 ANDROID_SCALER_CROPPING_TYPE,
8822 ANDROID_SYNC_MAX_LATENCY,
8823 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8824 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8825 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8826 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8827 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8828 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8829 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8830 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8831 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8832 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8833 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8834 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8835 ANDROID_LENS_FACING,
8836 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8837 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8838 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8839 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8840 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8841 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8842 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8843 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8844 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8845 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8846 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8847 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8848 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8849 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8850 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8851 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8852 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8853 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8854 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8855 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8856 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8857 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8858 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8859 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8860 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8861 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8862 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8863 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8864 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8865 ANDROID_CONTROL_AVAILABLE_MODES,
8866 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8867 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8868 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8869 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008870 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8871#ifndef USE_HAL_3_3
8872 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8873 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8874#endif
8875 };
8876
8877 Vector<int32_t> available_characteristics_keys;
8878 available_characteristics_keys.appendArray(characteristics_keys_basic,
8879 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8880#ifndef USE_HAL_3_3
8881 if (hasBlackRegions) {
8882 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8883 }
8884#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008885 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008886 available_characteristics_keys.array(),
8887 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008888
8889 /*available stall durations depend on the hw + sw and will be different for different devices */
8890 /*have to add for raw after implementation*/
8891 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8892 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8893
8894 Vector<int64_t> available_stall_durations;
8895 for (uint32_t j = 0; j < stall_formats_count; j++) {
8896 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8897 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8898 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8899 available_stall_durations.add(stall_formats[j]);
8900 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8901 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8902 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8903 }
8904 } else {
8905 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8906 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8907 available_stall_durations.add(stall_formats[j]);
8908 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8909 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8910 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8911 }
8912 }
8913 }
8914 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8915 available_stall_durations.array(),
8916 available_stall_durations.size());
8917
8918 //QCAMERA3_OPAQUE_RAW
8919 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8920 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8921 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8922 case LEGACY_RAW:
8923 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8924 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8925 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8926 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8927 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8928 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8929 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8930 break;
8931 case MIPI_RAW:
8932 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8933 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8934 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8935 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8936 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8937 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8938 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8939 break;
8940 default:
8941 LOGE("unknown opaque_raw_format %d",
8942 gCamCapability[cameraId]->opaque_raw_fmt);
8943 break;
8944 }
8945 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8946
8947 Vector<int32_t> strides;
8948 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8949 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8950 cam_stream_buf_plane_info_t buf_planes;
8951 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8952 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8953 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8954 &gCamCapability[cameraId]->padding_info, &buf_planes);
8955 strides.add(buf_planes.plane_info.mp[0].stride);
8956 }
8957 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8958 strides.size());
8959
Thierry Strudel04e026f2016-10-10 11:27:36 -07008960 //Video HDR default
8961 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8962 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8963 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8964 int32_t vhdr_mode[] = {
8965 QCAMERA3_VIDEO_HDR_MODE_OFF,
8966 QCAMERA3_VIDEO_HDR_MODE_ON};
8967
8968 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8969 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8970 vhdr_mode, vhdr_mode_count);
8971 }
8972
Thierry Strudel3d639192016-09-09 11:52:26 -07008973 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8974 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8975 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8976
8977 uint8_t isMonoOnly =
8978 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8979 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8980 &isMonoOnly, 1);
8981
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008982#ifndef USE_HAL_3_3
8983 Vector<int32_t> opaque_size;
8984 for (size_t j = 0; j < scalar_formats_count; j++) {
8985 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8986 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8987 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8988 cam_stream_buf_plane_info_t buf_planes;
8989
8990 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8991 &gCamCapability[cameraId]->padding_info, &buf_planes);
8992
8993 if (rc == 0) {
8994 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8995 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8996 opaque_size.add(buf_planes.plane_info.frame_len);
8997 }else {
8998 LOGE("raw frame calculation failed!");
8999 }
9000 }
9001 }
9002 }
9003
9004 if ((opaque_size.size() > 0) &&
9005 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9006 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9007 else
9008 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9009#endif
9010
Thierry Strudel04e026f2016-10-10 11:27:36 -07009011 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9012 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9013 size = 0;
9014 count = CAM_IR_MODE_MAX;
9015 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9016 for (size_t i = 0; i < count; i++) {
9017 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9018 gCamCapability[cameraId]->supported_ir_modes[i]);
9019 if (NAME_NOT_FOUND != val) {
9020 avail_ir_modes[size] = (int32_t)val;
9021 size++;
9022 }
9023 }
9024 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9025 avail_ir_modes, size);
9026 }
9027
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009028 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9029 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9030 size = 0;
9031 count = CAM_AEC_CONVERGENCE_MAX;
9032 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9033 for (size_t i = 0; i < count; i++) {
9034 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9035 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9036 if (NAME_NOT_FOUND != val) {
9037 available_instant_aec_modes[size] = (int32_t)val;
9038 size++;
9039 }
9040 }
9041 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9042 available_instant_aec_modes, size);
9043 }
9044
Thierry Strudel3d639192016-09-09 11:52:26 -07009045 gStaticMetadata[cameraId] = staticInfo.release();
9046 return rc;
9047}
9048
9049/*===========================================================================
9050 * FUNCTION : makeTable
9051 *
9052 * DESCRIPTION: make a table of sizes
9053 *
9054 * PARAMETERS :
9055 *
9056 *
9057 *==========================================================================*/
9058void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9059 size_t max_size, int32_t *sizeTable)
9060{
9061 size_t j = 0;
9062 if (size > max_size) {
9063 size = max_size;
9064 }
9065 for (size_t i = 0; i < size; i++) {
9066 sizeTable[j] = dimTable[i].width;
9067 sizeTable[j+1] = dimTable[i].height;
9068 j+=2;
9069 }
9070}
9071
9072/*===========================================================================
9073 * FUNCTION : makeFPSTable
9074 *
9075 * DESCRIPTION: make a table of fps ranges
9076 *
9077 * PARAMETERS :
9078 *
9079 *==========================================================================*/
9080void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9081 size_t max_size, int32_t *fpsRangesTable)
9082{
9083 size_t j = 0;
9084 if (size > max_size) {
9085 size = max_size;
9086 }
9087 for (size_t i = 0; i < size; i++) {
9088 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9089 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9090 j+=2;
9091 }
9092}
9093
9094/*===========================================================================
9095 * FUNCTION : makeOverridesList
9096 *
9097 * DESCRIPTION: make a list of scene mode overrides
9098 *
9099 * PARAMETERS :
9100 *
9101 *
9102 *==========================================================================*/
9103void QCamera3HardwareInterface::makeOverridesList(
9104 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9105 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9106{
9107 /*daemon will give a list of overrides for all scene modes.
9108 However we should send the fwk only the overrides for the scene modes
9109 supported by the framework*/
9110 size_t j = 0;
9111 if (size > max_size) {
9112 size = max_size;
9113 }
9114 size_t focus_count = CAM_FOCUS_MODE_MAX;
9115 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9116 focus_count);
9117 for (size_t i = 0; i < size; i++) {
9118 bool supt = false;
9119 size_t index = supported_indexes[i];
9120 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9121 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9122 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9123 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9124 overridesTable[index].awb_mode);
9125 if (NAME_NOT_FOUND != val) {
9126 overridesList[j+1] = (uint8_t)val;
9127 }
9128 uint8_t focus_override = overridesTable[index].af_mode;
9129 for (size_t k = 0; k < focus_count; k++) {
9130 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9131 supt = true;
9132 break;
9133 }
9134 }
9135 if (supt) {
9136 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9137 focus_override);
9138 if (NAME_NOT_FOUND != val) {
9139 overridesList[j+2] = (uint8_t)val;
9140 }
9141 } else {
9142 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9143 }
9144 j+=3;
9145 }
9146}
9147
9148/*===========================================================================
9149 * FUNCTION : filterJpegSizes
9150 *
9151 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9152 * could be downscaled to
9153 *
9154 * PARAMETERS :
9155 *
9156 * RETURN : length of jpegSizes array
9157 *==========================================================================*/
9158
9159size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9160 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9161 uint8_t downscale_factor)
9162{
9163 if (0 == downscale_factor) {
9164 downscale_factor = 1;
9165 }
9166
9167 int32_t min_width = active_array_size.width / downscale_factor;
9168 int32_t min_height = active_array_size.height / downscale_factor;
9169 size_t jpegSizesCnt = 0;
9170 if (processedSizesCnt > maxCount) {
9171 processedSizesCnt = maxCount;
9172 }
9173 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9174 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9175 jpegSizes[jpegSizesCnt] = processedSizes[i];
9176 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9177 jpegSizesCnt += 2;
9178 }
9179 }
9180 return jpegSizesCnt;
9181}
9182
9183/*===========================================================================
9184 * FUNCTION : computeNoiseModelEntryS
9185 *
9186 * DESCRIPTION: function to map a given sensitivity to the S noise
9187 * model parameters in the DNG noise model.
9188 *
9189 * PARAMETERS : sens : the sensor sensitivity
9190 *
9191 ** RETURN : S (sensor amplification) noise
9192 *
9193 *==========================================================================*/
9194double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9195 double s = gCamCapability[mCameraId]->gradient_S * sens +
9196 gCamCapability[mCameraId]->offset_S;
9197 return ((s < 0.0) ? 0.0 : s);
9198}
9199
9200/*===========================================================================
9201 * FUNCTION : computeNoiseModelEntryO
9202 *
9203 * DESCRIPTION: function to map a given sensitivity to the O noise
9204 * model parameters in the DNG noise model.
9205 *
9206 * PARAMETERS : sens : the sensor sensitivity
9207 *
9208 ** RETURN : O (sensor readout) noise
9209 *
9210 *==========================================================================*/
9211double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9212 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9213 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9214 1.0 : (1.0 * sens / max_analog_sens);
9215 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9216 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9217 return ((o < 0.0) ? 0.0 : o);
9218}
9219
9220/*===========================================================================
9221 * FUNCTION : getSensorSensitivity
9222 *
9223 * DESCRIPTION: convert iso_mode to an integer value
9224 *
9225 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9226 *
9227 ** RETURN : sensitivity supported by sensor
9228 *
9229 *==========================================================================*/
9230int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9231{
9232 int32_t sensitivity;
9233
9234 switch (iso_mode) {
9235 case CAM_ISO_MODE_100:
9236 sensitivity = 100;
9237 break;
9238 case CAM_ISO_MODE_200:
9239 sensitivity = 200;
9240 break;
9241 case CAM_ISO_MODE_400:
9242 sensitivity = 400;
9243 break;
9244 case CAM_ISO_MODE_800:
9245 sensitivity = 800;
9246 break;
9247 case CAM_ISO_MODE_1600:
9248 sensitivity = 1600;
9249 break;
9250 default:
9251 sensitivity = -1;
9252 break;
9253 }
9254 return sensitivity;
9255}
9256
9257/*===========================================================================
9258 * FUNCTION : getCamInfo
9259 *
9260 * DESCRIPTION: query camera capabilities
9261 *
9262 * PARAMETERS :
9263 * @cameraId : camera Id
9264 * @info : camera info struct to be filled in with camera capabilities
9265 *
9266 * RETURN : int type of status
9267 * NO_ERROR -- success
9268 * none-zero failure code
9269 *==========================================================================*/
9270int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9271 struct camera_info *info)
9272{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009273 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009274 int rc = 0;
9275
9276 pthread_mutex_lock(&gCamLock);
9277 if (NULL == gCamCapability[cameraId]) {
9278 rc = initCapabilities(cameraId);
9279 if (rc < 0) {
9280 pthread_mutex_unlock(&gCamLock);
9281 return rc;
9282 }
9283 }
9284
9285 if (NULL == gStaticMetadata[cameraId]) {
9286 rc = initStaticMetadata(cameraId);
9287 if (rc < 0) {
9288 pthread_mutex_unlock(&gCamLock);
9289 return rc;
9290 }
9291 }
9292
9293 switch(gCamCapability[cameraId]->position) {
9294 case CAM_POSITION_BACK:
9295 case CAM_POSITION_BACK_AUX:
9296 info->facing = CAMERA_FACING_BACK;
9297 break;
9298
9299 case CAM_POSITION_FRONT:
9300 case CAM_POSITION_FRONT_AUX:
9301 info->facing = CAMERA_FACING_FRONT;
9302 break;
9303
9304 default:
9305 LOGE("Unknown position type %d for camera id:%d",
9306 gCamCapability[cameraId]->position, cameraId);
9307 rc = -1;
9308 break;
9309 }
9310
9311
9312 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009313#ifndef USE_HAL_3_3
9314 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9315#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009316 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009317#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009318 info->static_camera_characteristics = gStaticMetadata[cameraId];
9319
9320 //For now assume both cameras can operate independently.
9321 info->conflicting_devices = NULL;
9322 info->conflicting_devices_length = 0;
9323
9324 //resource cost is 100 * MIN(1.0, m/M),
9325 //where m is throughput requirement with maximum stream configuration
9326 //and M is CPP maximum throughput.
9327 float max_fps = 0.0;
9328 for (uint32_t i = 0;
9329 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9330 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9331 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9332 }
9333 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9334 gCamCapability[cameraId]->active_array_size.width *
9335 gCamCapability[cameraId]->active_array_size.height * max_fps /
9336 gCamCapability[cameraId]->max_pixel_bandwidth;
9337 info->resource_cost = 100 * MIN(1.0, ratio);
9338 LOGI("camera %d resource cost is %d", cameraId,
9339 info->resource_cost);
9340
9341 pthread_mutex_unlock(&gCamLock);
9342 return rc;
9343}
9344
9345/*===========================================================================
9346 * FUNCTION : translateCapabilityToMetadata
9347 *
9348 * DESCRIPTION: translate the capability into camera_metadata_t
9349 *
9350 * PARAMETERS : type of the request
9351 *
9352 *
9353 * RETURN : success: camera_metadata_t*
9354 * failure: NULL
9355 *
9356 *==========================================================================*/
9357camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9358{
9359 if (mDefaultMetadata[type] != NULL) {
9360 return mDefaultMetadata[type];
9361 }
9362 //first time we are handling this request
9363 //fill up the metadata structure using the wrapper class
9364 CameraMetadata settings;
9365 //translate from cam_capability_t to camera_metadata_tag_t
9366 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9367 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9368 int32_t defaultRequestID = 0;
9369 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9370
9371 /* OIS disable */
9372 char ois_prop[PROPERTY_VALUE_MAX];
9373 memset(ois_prop, 0, sizeof(ois_prop));
9374 property_get("persist.camera.ois.disable", ois_prop, "0");
9375 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9376
9377 /* Force video to use OIS */
9378 char videoOisProp[PROPERTY_VALUE_MAX];
9379 memset(videoOisProp, 0, sizeof(videoOisProp));
9380 property_get("persist.camera.ois.video", videoOisProp, "1");
9381 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009382
9383 // Hybrid AE enable/disable
9384 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9385 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9386 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9387 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9388
Thierry Strudel3d639192016-09-09 11:52:26 -07009389 uint8_t controlIntent = 0;
9390 uint8_t focusMode;
9391 uint8_t vsMode;
9392 uint8_t optStabMode;
9393 uint8_t cacMode;
9394 uint8_t edge_mode;
9395 uint8_t noise_red_mode;
9396 uint8_t tonemap_mode;
9397 bool highQualityModeEntryAvailable = FALSE;
9398 bool fastModeEntryAvailable = FALSE;
9399 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9400 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9401 switch (type) {
9402 case CAMERA3_TEMPLATE_PREVIEW:
9403 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9404 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9405 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9406 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9407 edge_mode = ANDROID_EDGE_MODE_FAST;
9408 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9409 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9410 break;
9411 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9412 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9413 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9414 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9415 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9416 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9417 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9418 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9419 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9420 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9421 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9422 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9423 highQualityModeEntryAvailable = TRUE;
9424 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9425 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9426 fastModeEntryAvailable = TRUE;
9427 }
9428 }
9429 if (highQualityModeEntryAvailable) {
9430 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9431 } else if (fastModeEntryAvailable) {
9432 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9433 }
9434 break;
9435 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9436 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9437 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9438 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009439 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9440 edge_mode = ANDROID_EDGE_MODE_FAST;
9441 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9442 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9443 if (forceVideoOis)
9444 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9445 break;
9446 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9447 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9448 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9449 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009450 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9451 edge_mode = ANDROID_EDGE_MODE_FAST;
9452 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9453 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9454 if (forceVideoOis)
9455 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9456 break;
9457 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9458 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9459 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9460 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9461 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9462 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9463 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9464 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9465 break;
9466 case CAMERA3_TEMPLATE_MANUAL:
9467 edge_mode = ANDROID_EDGE_MODE_FAST;
9468 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9469 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9470 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9471 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9472 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9473 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9474 break;
9475 default:
9476 edge_mode = ANDROID_EDGE_MODE_FAST;
9477 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9478 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9479 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9480 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9481 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9482 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9483 break;
9484 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009485 // Set CAC to OFF if underlying device doesn't support
9486 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9487 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009489 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9490 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9491 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9492 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9493 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9494 }
9495 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9496
9497 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9498 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9499 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9500 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9501 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9502 || ois_disable)
9503 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9504 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9505
9506 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9507 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9508
9509 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9510 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9511
9512 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9513 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9514
9515 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9516 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9517
9518 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9519 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9520
9521 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9522 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9523
9524 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9525 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9526
9527 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9528 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9529
9530 /*flash*/
9531 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9532 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9533
9534 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9535 settings.update(ANDROID_FLASH_FIRING_POWER,
9536 &flashFiringLevel, 1);
9537
9538 /* lens */
9539 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9540 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9541
9542 if (gCamCapability[mCameraId]->filter_densities_count) {
9543 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9544 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9545 gCamCapability[mCameraId]->filter_densities_count);
9546 }
9547
9548 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9549 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9550
9551 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9552 float default_focus_distance = 0;
9553 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9554 }
9555
9556 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9557 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9558
9559 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9560 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9561
9562 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9563 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9564
9565 /* face detection (default to OFF) */
9566 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9567 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9568
9569 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9570 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9571
9572 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9573 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9574
9575 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9576 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9577
9578 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9579 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9580
9581 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9582 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9583
9584 /* Exposure time(Update the Min Exposure Time)*/
9585 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9586 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9587
9588 /* frame duration */
9589 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9590 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9591
9592 /* sensitivity */
9593 static const int32_t default_sensitivity = 100;
9594 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009595#ifndef USE_HAL_3_3
9596 static const int32_t default_isp_sensitivity =
9597 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9598 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9599#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009600
9601 /*edge mode*/
9602 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9603
9604 /*noise reduction mode*/
9605 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9606
9607 /*color correction mode*/
9608 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9609 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9610
9611 /*transform matrix mode*/
9612 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9613
9614 int32_t scaler_crop_region[4];
9615 scaler_crop_region[0] = 0;
9616 scaler_crop_region[1] = 0;
9617 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9618 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9619 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9620
9621 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9622 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9623
9624 /*focus distance*/
9625 float focus_distance = 0.0;
9626 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9627
9628 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009629 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009630 float max_range = 0.0;
9631 float max_fixed_fps = 0.0;
9632 int32_t fps_range[2] = {0, 0};
9633 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9634 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009635 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9636 TEMPLATE_MAX_PREVIEW_FPS) {
9637 continue;
9638 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009639 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9640 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9641 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9642 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9643 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9644 if (range > max_range) {
9645 fps_range[0] =
9646 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9647 fps_range[1] =
9648 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9649 max_range = range;
9650 }
9651 } else {
9652 if (range < 0.01 && max_fixed_fps <
9653 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9654 fps_range[0] =
9655 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9656 fps_range[1] =
9657 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9658 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9659 }
9660 }
9661 }
9662 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9663
9664 /*precapture trigger*/
9665 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9666 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9667
9668 /*af trigger*/
9669 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9670 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9671
9672 /* ae & af regions */
9673 int32_t active_region[] = {
9674 gCamCapability[mCameraId]->active_array_size.left,
9675 gCamCapability[mCameraId]->active_array_size.top,
9676 gCamCapability[mCameraId]->active_array_size.left +
9677 gCamCapability[mCameraId]->active_array_size.width,
9678 gCamCapability[mCameraId]->active_array_size.top +
9679 gCamCapability[mCameraId]->active_array_size.height,
9680 0};
9681 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9682 sizeof(active_region) / sizeof(active_region[0]));
9683 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9684 sizeof(active_region) / sizeof(active_region[0]));
9685
9686 /* black level lock */
9687 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9688 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9689
9690 /* lens shading map mode */
9691 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9692 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9693 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9694 }
9695 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9696
9697 //special defaults for manual template
9698 if (type == CAMERA3_TEMPLATE_MANUAL) {
9699 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9700 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9701
9702 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9703 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9704
9705 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9706 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9707
9708 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9709 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9710
9711 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9712 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9713
9714 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9715 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9716 }
9717
9718
9719 /* TNR
9720 * We'll use this location to determine which modes TNR will be set.
9721 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9722 * This is not to be confused with linking on a per stream basis that decision
9723 * is still on per-session basis and will be handled as part of config stream
9724 */
9725 uint8_t tnr_enable = 0;
9726
9727 if (m_bTnrPreview || m_bTnrVideo) {
9728
9729 switch (type) {
9730 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9731 tnr_enable = 1;
9732 break;
9733
9734 default:
9735 tnr_enable = 0;
9736 break;
9737 }
9738
9739 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9740 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9741 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9742
9743 LOGD("TNR:%d with process plate %d for template:%d",
9744 tnr_enable, tnr_process_type, type);
9745 }
9746
9747 //Update Link tags to default
9748 int32_t sync_type = CAM_TYPE_STANDALONE;
9749 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9750
9751 int32_t is_main = 0; //this doesn't matter as app should overwrite
9752 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9753
9754 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9755
9756 /* CDS default */
9757 char prop[PROPERTY_VALUE_MAX];
9758 memset(prop, 0, sizeof(prop));
9759 property_get("persist.camera.CDS", prop, "Auto");
9760 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9761 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9762 if (CAM_CDS_MODE_MAX == cds_mode) {
9763 cds_mode = CAM_CDS_MODE_AUTO;
9764 }
9765
9766 /* Disabling CDS in templates which have TNR enabled*/
9767 if (tnr_enable)
9768 cds_mode = CAM_CDS_MODE_OFF;
9769
9770 int32_t mode = cds_mode;
9771 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009772
9773 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9774 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9775
9776 /* IR Mode Default Off */
9777 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9778 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9779
Thierry Strudel269c81a2016-10-12 12:13:59 -07009780 /* Manual Convergence AEC Speed is disabled by default*/
9781 float default_aec_speed = 0;
9782 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9783
9784 /* Manual Convergence AWB Speed is disabled by default*/
9785 float default_awb_speed = 0;
9786 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9787
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009788 // Set instant AEC to normal convergence by default
9789 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9790 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9791
Shuzhen Wang19463d72016-03-08 11:09:52 -08009792 /* hybrid ae */
9793 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9794
Thierry Strudel3d639192016-09-09 11:52:26 -07009795 mDefaultMetadata[type] = settings.release();
9796
9797 return mDefaultMetadata[type];
9798}
9799
9800/*===========================================================================
9801 * FUNCTION : setFrameParameters
9802 *
9803 * DESCRIPTION: set parameters per frame as requested in the metadata from
9804 * framework
9805 *
9806 * PARAMETERS :
9807 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009808 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009809 * @blob_request: Whether this request is a blob request or not
9810 *
9811 * RETURN : success: NO_ERROR
9812 * failure:
9813 *==========================================================================*/
9814int QCamera3HardwareInterface::setFrameParameters(
9815 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009816 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009817 int blob_request,
9818 uint32_t snapshotStreamId)
9819{
9820 /*translate from camera_metadata_t type to parm_type_t*/
9821 int rc = 0;
9822 int32_t hal_version = CAM_HAL_V3;
9823
9824 clear_metadata_buffer(mParameters);
9825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9826 LOGE("Failed to set hal version in the parameters");
9827 return BAD_VALUE;
9828 }
9829
9830 /*we need to update the frame number in the parameters*/
9831 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9832 request->frame_number)) {
9833 LOGE("Failed to set the frame number in the parameters");
9834 return BAD_VALUE;
9835 }
9836
9837 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009839 LOGE("Failed to set stream type mask in the parameters");
9840 return BAD_VALUE;
9841 }
9842
9843 if (mUpdateDebugLevel) {
9844 uint32_t dummyDebugLevel = 0;
9845 /* The value of dummyDebugLevel is irrelavent. On
9846 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9848 dummyDebugLevel)) {
9849 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9850 return BAD_VALUE;
9851 }
9852 mUpdateDebugLevel = false;
9853 }
9854
9855 if(request->settings != NULL){
9856 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9857 if (blob_request)
9858 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9859 }
9860
9861 return rc;
9862}
9863
9864/*===========================================================================
9865 * FUNCTION : setReprocParameters
9866 *
9867 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9868 * return it.
9869 *
9870 * PARAMETERS :
9871 * @request : request that needs to be serviced
9872 *
9873 * RETURN : success: NO_ERROR
9874 * failure:
9875 *==========================================================================*/
9876int32_t QCamera3HardwareInterface::setReprocParameters(
9877 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9878 uint32_t snapshotStreamId)
9879{
9880 /*translate from camera_metadata_t type to parm_type_t*/
9881 int rc = 0;
9882
9883 if (NULL == request->settings){
9884 LOGE("Reprocess settings cannot be NULL");
9885 return BAD_VALUE;
9886 }
9887
9888 if (NULL == reprocParam) {
9889 LOGE("Invalid reprocessing metadata buffer");
9890 return BAD_VALUE;
9891 }
9892 clear_metadata_buffer(reprocParam);
9893
9894 /*we need to update the frame number in the parameters*/
9895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9896 request->frame_number)) {
9897 LOGE("Failed to set the frame number in the parameters");
9898 return BAD_VALUE;
9899 }
9900
9901 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9902 if (rc < 0) {
9903 LOGE("Failed to translate reproc request");
9904 return rc;
9905 }
9906
9907 CameraMetadata frame_settings;
9908 frame_settings = request->settings;
9909 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9910 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9911 int32_t *crop_count =
9912 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9913 int32_t *crop_data =
9914 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9915 int32_t *roi_map =
9916 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9917 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9918 cam_crop_data_t crop_meta;
9919 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9920 crop_meta.num_of_streams = 1;
9921 crop_meta.crop_info[0].crop.left = crop_data[0];
9922 crop_meta.crop_info[0].crop.top = crop_data[1];
9923 crop_meta.crop_info[0].crop.width = crop_data[2];
9924 crop_meta.crop_info[0].crop.height = crop_data[3];
9925
9926 crop_meta.crop_info[0].roi_map.left =
9927 roi_map[0];
9928 crop_meta.crop_info[0].roi_map.top =
9929 roi_map[1];
9930 crop_meta.crop_info[0].roi_map.width =
9931 roi_map[2];
9932 crop_meta.crop_info[0].roi_map.height =
9933 roi_map[3];
9934
9935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9936 rc = BAD_VALUE;
9937 }
9938 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9939 request->input_buffer->stream,
9940 crop_meta.crop_info[0].crop.left,
9941 crop_meta.crop_info[0].crop.top,
9942 crop_meta.crop_info[0].crop.width,
9943 crop_meta.crop_info[0].crop.height);
9944 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9945 request->input_buffer->stream,
9946 crop_meta.crop_info[0].roi_map.left,
9947 crop_meta.crop_info[0].roi_map.top,
9948 crop_meta.crop_info[0].roi_map.width,
9949 crop_meta.crop_info[0].roi_map.height);
9950 } else {
9951 LOGE("Invalid reprocess crop count %d!", *crop_count);
9952 }
9953 } else {
9954 LOGE("No crop data from matching output stream");
9955 }
9956
9957 /* These settings are not needed for regular requests so handle them specially for
9958 reprocess requests; information needed for EXIF tags */
9959 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9960 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9961 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9962 if (NAME_NOT_FOUND != val) {
9963 uint32_t flashMode = (uint32_t)val;
9964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9965 rc = BAD_VALUE;
9966 }
9967 } else {
9968 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9969 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9970 }
9971 } else {
9972 LOGH("No flash mode in reprocess settings");
9973 }
9974
9975 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9976 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9978 rc = BAD_VALUE;
9979 }
9980 } else {
9981 LOGH("No flash state in reprocess settings");
9982 }
9983
9984 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9985 uint8_t *reprocessFlags =
9986 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9988 *reprocessFlags)) {
9989 rc = BAD_VALUE;
9990 }
9991 }
9992
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009993 // Add metadata which reprocess needs
9994 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9995 cam_reprocess_info_t *repro_info =
9996 (cam_reprocess_info_t *)frame_settings.find
9997 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -07009998 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009999 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010000 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010001 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010002 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010003 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010004 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010005 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010006 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010007 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010009 repro_info->pipeline_flip);
10010 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10011 repro_info->af_roi);
10012 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10013 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010014 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10015 CAM_INTF_PARM_ROTATION metadata then has been added in
10016 translateToHalMetadata. HAL need to keep this new rotation
10017 metadata. Otherwise, the old rotation info saved in the vendor tag
10018 would be used */
10019 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10020 CAM_INTF_PARM_ROTATION, reprocParam) {
10021 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10022 } else {
10023 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010024 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010025 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010026 }
10027
10028 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10029 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10030 roi.width and roi.height would be the final JPEG size.
10031 For now, HAL only checks this for reprocess request */
10032 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10033 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10034 uint8_t *enable =
10035 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10036 if (*enable == TRUE) {
10037 int32_t *crop_data =
10038 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10039 cam_stream_crop_info_t crop_meta;
10040 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10041 crop_meta.stream_id = 0;
10042 crop_meta.crop.left = crop_data[0];
10043 crop_meta.crop.top = crop_data[1];
10044 crop_meta.crop.width = crop_data[2];
10045 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010046 // The JPEG crop roi should match cpp output size
10047 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10048 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10049 crop_meta.roi_map.left = 0;
10050 crop_meta.roi_map.top = 0;
10051 crop_meta.roi_map.width = cpp_crop->crop.width;
10052 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010053 }
10054 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10055 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010056 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010057 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010058 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10059 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010060 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010061 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10062
10063 // Add JPEG scale information
10064 cam_dimension_t scale_dim;
10065 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10066 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10067 int32_t *roi =
10068 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10069 scale_dim.width = roi[2];
10070 scale_dim.height = roi[3];
10071 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10072 scale_dim);
10073 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10074 scale_dim.width, scale_dim.height, mCameraId);
10075 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010076 }
10077 }
10078
10079 return rc;
10080}
10081
10082/*===========================================================================
10083 * FUNCTION : saveRequestSettings
10084 *
10085 * DESCRIPTION: Add any settings that might have changed to the request settings
10086 * and save the settings to be applied on the frame
10087 *
10088 * PARAMETERS :
10089 * @jpegMetadata : the extracted and/or modified jpeg metadata
10090 * @request : request with initial settings
10091 *
10092 * RETURN :
10093 * camera_metadata_t* : pointer to the saved request settings
10094 *==========================================================================*/
10095camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10096 const CameraMetadata &jpegMetadata,
10097 camera3_capture_request_t *request)
10098{
10099 camera_metadata_t *resultMetadata;
10100 CameraMetadata camMetadata;
10101 camMetadata = request->settings;
10102
10103 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10104 int32_t thumbnail_size[2];
10105 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10106 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10107 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10108 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10109 }
10110
10111 if (request->input_buffer != NULL) {
10112 uint8_t reprocessFlags = 1;
10113 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10114 (uint8_t*)&reprocessFlags,
10115 sizeof(reprocessFlags));
10116 }
10117
10118 resultMetadata = camMetadata.release();
10119 return resultMetadata;
10120}
10121
10122/*===========================================================================
10123 * FUNCTION : setHalFpsRange
10124 *
10125 * DESCRIPTION: set FPS range parameter
10126 *
10127 *
10128 * PARAMETERS :
10129 * @settings : Metadata from framework
10130 * @hal_metadata: Metadata buffer
10131 *
10132 *
10133 * RETURN : success: NO_ERROR
10134 * failure:
10135 *==========================================================================*/
10136int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10137 metadata_buffer_t *hal_metadata)
10138{
10139 int32_t rc = NO_ERROR;
10140 cam_fps_range_t fps_range;
10141 fps_range.min_fps = (float)
10142 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10143 fps_range.max_fps = (float)
10144 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10145 fps_range.video_min_fps = fps_range.min_fps;
10146 fps_range.video_max_fps = fps_range.max_fps;
10147
10148 LOGD("aeTargetFpsRange fps: [%f %f]",
10149 fps_range.min_fps, fps_range.max_fps);
10150 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10151 * follows:
10152 * ---------------------------------------------------------------|
10153 * Video stream is absent in configure_streams |
10154 * (Camcorder preview before the first video record |
10155 * ---------------------------------------------------------------|
10156 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10157 * | | | vid_min/max_fps|
10158 * ---------------------------------------------------------------|
10159 * NO | [ 30, 240] | 240 | [240, 240] |
10160 * |-------------|-------------|----------------|
10161 * | [240, 240] | 240 | [240, 240] |
10162 * ---------------------------------------------------------------|
10163 * Video stream is present in configure_streams |
10164 * ---------------------------------------------------------------|
10165 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10166 * | | | vid_min/max_fps|
10167 * ---------------------------------------------------------------|
10168 * NO | [ 30, 240] | 240 | [240, 240] |
10169 * (camcorder prev |-------------|-------------|----------------|
10170 * after video rec | [240, 240] | 240 | [240, 240] |
10171 * is stopped) | | | |
10172 * ---------------------------------------------------------------|
10173 * YES | [ 30, 240] | 240 | [240, 240] |
10174 * |-------------|-------------|----------------|
10175 * | [240, 240] | 240 | [240, 240] |
10176 * ---------------------------------------------------------------|
10177 * When Video stream is absent in configure_streams,
10178 * preview fps = sensor_fps / batchsize
10179 * Eg: for 240fps at batchSize 4, preview = 60fps
10180 * for 120fps at batchSize 4, preview = 30fps
10181 *
10182 * When video stream is present in configure_streams, preview fps is as per
10183 * the ratio of preview buffers to video buffers requested in process
10184 * capture request
10185 */
10186 mBatchSize = 0;
10187 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10188 fps_range.min_fps = fps_range.video_max_fps;
10189 fps_range.video_min_fps = fps_range.video_max_fps;
10190 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10191 fps_range.max_fps);
10192 if (NAME_NOT_FOUND != val) {
10193 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10195 return BAD_VALUE;
10196 }
10197
10198 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10199 /* If batchmode is currently in progress and the fps changes,
10200 * set the flag to restart the sensor */
10201 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10202 (mHFRVideoFps != fps_range.max_fps)) {
10203 mNeedSensorRestart = true;
10204 }
10205 mHFRVideoFps = fps_range.max_fps;
10206 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10207 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10208 mBatchSize = MAX_HFR_BATCH_SIZE;
10209 }
10210 }
10211 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10212
10213 }
10214 } else {
10215 /* HFR mode is session param in backend/ISP. This should be reset when
10216 * in non-HFR mode */
10217 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10219 return BAD_VALUE;
10220 }
10221 }
10222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10223 return BAD_VALUE;
10224 }
10225 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10226 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10227 return rc;
10228}
10229
10230/*===========================================================================
10231 * FUNCTION : translateToHalMetadata
10232 *
10233 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10234 *
10235 *
10236 * PARAMETERS :
10237 * @request : request sent from framework
10238 *
10239 *
10240 * RETURN : success: NO_ERROR
10241 * failure:
10242 *==========================================================================*/
10243int QCamera3HardwareInterface::translateToHalMetadata
10244 (const camera3_capture_request_t *request,
10245 metadata_buffer_t *hal_metadata,
10246 uint32_t snapshotStreamId)
10247{
10248 int rc = 0;
10249 CameraMetadata frame_settings;
10250 frame_settings = request->settings;
10251
10252 /* Do not change the order of the following list unless you know what you are
10253 * doing.
10254 * The order is laid out in such a way that parameters in the front of the table
10255 * may be used to override the parameters later in the table. Examples are:
10256 * 1. META_MODE should precede AEC/AWB/AF MODE
10257 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10258 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10259 * 4. Any mode should precede it's corresponding settings
10260 */
10261 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10262 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10263 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10264 rc = BAD_VALUE;
10265 }
10266 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10267 if (rc != NO_ERROR) {
10268 LOGE("extractSceneMode failed");
10269 }
10270 }
10271
10272 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10273 uint8_t fwk_aeMode =
10274 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10275 uint8_t aeMode;
10276 int32_t redeye;
10277
10278 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10279 aeMode = CAM_AE_MODE_OFF;
10280 } else {
10281 aeMode = CAM_AE_MODE_ON;
10282 }
10283 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10284 redeye = 1;
10285 } else {
10286 redeye = 0;
10287 }
10288
10289 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10290 fwk_aeMode);
10291 if (NAME_NOT_FOUND != val) {
10292 int32_t flashMode = (int32_t)val;
10293 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10294 }
10295
10296 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10298 rc = BAD_VALUE;
10299 }
10300 }
10301
10302 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10303 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10304 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10305 fwk_whiteLevel);
10306 if (NAME_NOT_FOUND != val) {
10307 uint8_t whiteLevel = (uint8_t)val;
10308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10309 rc = BAD_VALUE;
10310 }
10311 }
10312 }
10313
10314 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10315 uint8_t fwk_cacMode =
10316 frame_settings.find(
10317 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10318 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10319 fwk_cacMode);
10320 if (NAME_NOT_FOUND != val) {
10321 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10322 bool entryAvailable = FALSE;
10323 // Check whether Frameworks set CAC mode is supported in device or not
10324 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10325 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10326 entryAvailable = TRUE;
10327 break;
10328 }
10329 }
10330 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10331 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10332 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10333 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10334 if (entryAvailable == FALSE) {
10335 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10336 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10337 } else {
10338 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10339 // High is not supported and so set the FAST as spec say's underlying
10340 // device implementation can be the same for both modes.
10341 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10342 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10343 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10344 // in order to avoid the fps drop due to high quality
10345 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10346 } else {
10347 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10348 }
10349 }
10350 }
10351 LOGD("Final cacMode is %d", cacMode);
10352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10353 rc = BAD_VALUE;
10354 }
10355 } else {
10356 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10357 }
10358 }
10359
10360 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10361 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10362 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10363 fwk_focusMode);
10364 if (NAME_NOT_FOUND != val) {
10365 uint8_t focusMode = (uint8_t)val;
10366 LOGD("set focus mode %d", focusMode);
10367 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10368 rc = BAD_VALUE;
10369 }
10370 }
10371 }
10372
10373 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10374 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10376 focalDistance)) {
10377 rc = BAD_VALUE;
10378 }
10379 }
10380
10381 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10382 uint8_t fwk_antibandingMode =
10383 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10384 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10385 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10386 if (NAME_NOT_FOUND != val) {
10387 uint32_t hal_antibandingMode = (uint32_t)val;
10388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10389 hal_antibandingMode)) {
10390 rc = BAD_VALUE;
10391 }
10392 }
10393 }
10394
10395 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10396 int32_t expCompensation = frame_settings.find(
10397 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10398 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10399 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10400 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10401 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010402 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010403 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10404 expCompensation)) {
10405 rc = BAD_VALUE;
10406 }
10407 }
10408
10409 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10410 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10412 rc = BAD_VALUE;
10413 }
10414 }
10415 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10416 rc = setHalFpsRange(frame_settings, hal_metadata);
10417 if (rc != NO_ERROR) {
10418 LOGE("setHalFpsRange failed");
10419 }
10420 }
10421
10422 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10423 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10424 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10425 rc = BAD_VALUE;
10426 }
10427 }
10428
10429 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10430 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10431 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10432 fwk_effectMode);
10433 if (NAME_NOT_FOUND != val) {
10434 uint8_t effectMode = (uint8_t)val;
10435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10436 rc = BAD_VALUE;
10437 }
10438 }
10439 }
10440
10441 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10442 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10444 colorCorrectMode)) {
10445 rc = BAD_VALUE;
10446 }
10447 }
10448
10449 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10450 cam_color_correct_gains_t colorCorrectGains;
10451 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10452 colorCorrectGains.gains[i] =
10453 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10454 }
10455 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10456 colorCorrectGains)) {
10457 rc = BAD_VALUE;
10458 }
10459 }
10460
10461 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10462 cam_color_correct_matrix_t colorCorrectTransform;
10463 cam_rational_type_t transform_elem;
10464 size_t num = 0;
10465 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10466 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10467 transform_elem.numerator =
10468 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10469 transform_elem.denominator =
10470 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10471 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10472 num++;
10473 }
10474 }
10475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10476 colorCorrectTransform)) {
10477 rc = BAD_VALUE;
10478 }
10479 }
10480
10481 cam_trigger_t aecTrigger;
10482 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10483 aecTrigger.trigger_id = -1;
10484 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10485 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10486 aecTrigger.trigger =
10487 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10488 aecTrigger.trigger_id =
10489 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10491 aecTrigger)) {
10492 rc = BAD_VALUE;
10493 }
10494 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10495 aecTrigger.trigger, aecTrigger.trigger_id);
10496 }
10497
10498 /*af_trigger must come with a trigger id*/
10499 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10500 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10501 cam_trigger_t af_trigger;
10502 af_trigger.trigger =
10503 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10504 af_trigger.trigger_id =
10505 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10507 rc = BAD_VALUE;
10508 }
10509 LOGD("AfTrigger: %d AfTriggerID: %d",
10510 af_trigger.trigger, af_trigger.trigger_id);
10511 }
10512
10513 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10514 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10516 rc = BAD_VALUE;
10517 }
10518 }
10519 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10520 cam_edge_application_t edge_application;
10521 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10522 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10523 edge_application.sharpness = 0;
10524 } else {
10525 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10526 }
10527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10528 rc = BAD_VALUE;
10529 }
10530 }
10531
10532 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10533 int32_t respectFlashMode = 1;
10534 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10535 uint8_t fwk_aeMode =
10536 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10537 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10538 respectFlashMode = 0;
10539 LOGH("AE Mode controls flash, ignore android.flash.mode");
10540 }
10541 }
10542 if (respectFlashMode) {
10543 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10544 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10545 LOGH("flash mode after mapping %d", val);
10546 // To check: CAM_INTF_META_FLASH_MODE usage
10547 if (NAME_NOT_FOUND != val) {
10548 uint8_t flashMode = (uint8_t)val;
10549 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10550 rc = BAD_VALUE;
10551 }
10552 }
10553 }
10554 }
10555
10556 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10557 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10559 rc = BAD_VALUE;
10560 }
10561 }
10562
10563 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10564 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10565 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10566 flashFiringTime)) {
10567 rc = BAD_VALUE;
10568 }
10569 }
10570
10571 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10572 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10574 hotPixelMode)) {
10575 rc = BAD_VALUE;
10576 }
10577 }
10578
10579 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10580 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10582 lensAperture)) {
10583 rc = BAD_VALUE;
10584 }
10585 }
10586
10587 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10588 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10590 filterDensity)) {
10591 rc = BAD_VALUE;
10592 }
10593 }
10594
10595 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10596 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10598 focalLength)) {
10599 rc = BAD_VALUE;
10600 }
10601 }
10602
10603 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10604 uint8_t optStabMode =
10605 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10607 optStabMode)) {
10608 rc = BAD_VALUE;
10609 }
10610 }
10611
10612 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10613 uint8_t videoStabMode =
10614 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10615 LOGD("videoStabMode from APP = %d", videoStabMode);
10616 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10617 videoStabMode)) {
10618 rc = BAD_VALUE;
10619 }
10620 }
10621
10622
10623 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10624 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10626 noiseRedMode)) {
10627 rc = BAD_VALUE;
10628 }
10629 }
10630
10631 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10632 float reprocessEffectiveExposureFactor =
10633 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10635 reprocessEffectiveExposureFactor)) {
10636 rc = BAD_VALUE;
10637 }
10638 }
10639
10640 cam_crop_region_t scalerCropRegion;
10641 bool scalerCropSet = false;
10642 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10643 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10644 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10645 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10646 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10647
10648 // Map coordinate system from active array to sensor output.
10649 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10650 scalerCropRegion.width, scalerCropRegion.height);
10651
10652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10653 scalerCropRegion)) {
10654 rc = BAD_VALUE;
10655 }
10656 scalerCropSet = true;
10657 }
10658
10659 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10660 int64_t sensorExpTime =
10661 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10662 LOGD("setting sensorExpTime %lld", sensorExpTime);
10663 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10664 sensorExpTime)) {
10665 rc = BAD_VALUE;
10666 }
10667 }
10668
10669 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10670 int64_t sensorFrameDuration =
10671 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10672 int64_t minFrameDuration = getMinFrameDuration(request);
10673 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10674 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10675 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10676 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10678 sensorFrameDuration)) {
10679 rc = BAD_VALUE;
10680 }
10681 }
10682
10683 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10684 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10685 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10686 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10687 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10688 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10689 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10691 sensorSensitivity)) {
10692 rc = BAD_VALUE;
10693 }
10694 }
10695
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010696#ifndef USE_HAL_3_3
10697 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10698 int32_t ispSensitivity =
10699 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10700 if (ispSensitivity <
10701 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10702 ispSensitivity =
10703 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10704 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10705 }
10706 if (ispSensitivity >
10707 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10708 ispSensitivity =
10709 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10710 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10711 }
10712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10713 ispSensitivity)) {
10714 rc = BAD_VALUE;
10715 }
10716 }
10717#endif
10718
Thierry Strudel3d639192016-09-09 11:52:26 -070010719 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10720 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10722 rc = BAD_VALUE;
10723 }
10724 }
10725
10726 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10727 uint8_t fwk_facedetectMode =
10728 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10729
10730 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10731 fwk_facedetectMode);
10732
10733 if (NAME_NOT_FOUND != val) {
10734 uint8_t facedetectMode = (uint8_t)val;
10735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10736 facedetectMode)) {
10737 rc = BAD_VALUE;
10738 }
10739 }
10740 }
10741
10742 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10743 uint8_t histogramMode =
10744 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10746 histogramMode)) {
10747 rc = BAD_VALUE;
10748 }
10749 }
10750
10751 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10752 uint8_t sharpnessMapMode =
10753 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10755 sharpnessMapMode)) {
10756 rc = BAD_VALUE;
10757 }
10758 }
10759
10760 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10761 uint8_t tonemapMode =
10762 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10764 rc = BAD_VALUE;
10765 }
10766 }
10767 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10768 /*All tonemap channels will have the same number of points*/
10769 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10770 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10771 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10772 cam_rgb_tonemap_curves tonemapCurves;
10773 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10774 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10775 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10776 tonemapCurves.tonemap_points_cnt,
10777 CAM_MAX_TONEMAP_CURVE_SIZE);
10778 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10779 }
10780
10781 /* ch0 = G*/
10782 size_t point = 0;
10783 cam_tonemap_curve_t tonemapCurveGreen;
10784 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10785 for (size_t j = 0; j < 2; j++) {
10786 tonemapCurveGreen.tonemap_points[i][j] =
10787 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10788 point++;
10789 }
10790 }
10791 tonemapCurves.curves[0] = tonemapCurveGreen;
10792
10793 /* ch 1 = B */
10794 point = 0;
10795 cam_tonemap_curve_t tonemapCurveBlue;
10796 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10797 for (size_t j = 0; j < 2; j++) {
10798 tonemapCurveBlue.tonemap_points[i][j] =
10799 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10800 point++;
10801 }
10802 }
10803 tonemapCurves.curves[1] = tonemapCurveBlue;
10804
10805 /* ch 2 = R */
10806 point = 0;
10807 cam_tonemap_curve_t tonemapCurveRed;
10808 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10809 for (size_t j = 0; j < 2; j++) {
10810 tonemapCurveRed.tonemap_points[i][j] =
10811 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10812 point++;
10813 }
10814 }
10815 tonemapCurves.curves[2] = tonemapCurveRed;
10816
10817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10818 tonemapCurves)) {
10819 rc = BAD_VALUE;
10820 }
10821 }
10822
10823 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10824 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10826 captureIntent)) {
10827 rc = BAD_VALUE;
10828 }
10829 }
10830
10831 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10832 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10834 blackLevelLock)) {
10835 rc = BAD_VALUE;
10836 }
10837 }
10838
10839 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10840 uint8_t lensShadingMapMode =
10841 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10843 lensShadingMapMode)) {
10844 rc = BAD_VALUE;
10845 }
10846 }
10847
10848 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10849 cam_area_t roi;
10850 bool reset = true;
10851 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10852
10853 // Map coordinate system from active array to sensor output.
10854 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10855 roi.rect.height);
10856
10857 if (scalerCropSet) {
10858 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10859 }
10860 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10861 rc = BAD_VALUE;
10862 }
10863 }
10864
10865 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10866 cam_area_t roi;
10867 bool reset = true;
10868 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10869
10870 // Map coordinate system from active array to sensor output.
10871 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10872 roi.rect.height);
10873
10874 if (scalerCropSet) {
10875 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10876 }
10877 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10878 rc = BAD_VALUE;
10879 }
10880 }
10881
10882 // CDS for non-HFR non-video mode
10883 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10884 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10885 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10886 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10887 LOGE("Invalid CDS mode %d!", *fwk_cds);
10888 } else {
10889 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10890 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10891 rc = BAD_VALUE;
10892 }
10893 }
10894 }
10895
Thierry Strudel04e026f2016-10-10 11:27:36 -070010896 // Video HDR
10897 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10898 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10899 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10900 rc = setVideoHdrMode(mParameters, vhdr);
10901 if (rc != NO_ERROR) {
10902 LOGE("setVideoHDR is failed");
10903 }
10904 }
10905
10906 //IR
10907 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10908 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10909 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10910 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10911 LOGE("Invalid IR mode %d!", fwk_ir);
10912 } else {
10913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10914 CAM_INTF_META_IR_MODE, fwk_ir)) {
10915 rc = BAD_VALUE;
10916 }
10917 }
10918 }
10919
Thierry Strudel269c81a2016-10-12 12:13:59 -070010920 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10921 float aec_speed;
10922 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10923 LOGD("AEC Speed :%f", aec_speed);
10924 if ( aec_speed < 0 ) {
10925 LOGE("Invalid AEC mode %f!", aec_speed);
10926 } else {
10927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10928 aec_speed)) {
10929 rc = BAD_VALUE;
10930 }
10931 }
10932 }
10933
10934 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10935 float awb_speed;
10936 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10937 LOGD("AWB Speed :%f", awb_speed);
10938 if ( awb_speed < 0 ) {
10939 LOGE("Invalid AWB mode %f!", awb_speed);
10940 } else {
10941 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10942 awb_speed)) {
10943 rc = BAD_VALUE;
10944 }
10945 }
10946 }
10947
Thierry Strudel3d639192016-09-09 11:52:26 -070010948 // TNR
10949 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10950 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10951 uint8_t b_TnrRequested = 0;
10952 cam_denoise_param_t tnr;
10953 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10954 tnr.process_plates =
10955 (cam_denoise_process_type_t)frame_settings.find(
10956 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10957 b_TnrRequested = tnr.denoise_enable;
10958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10959 rc = BAD_VALUE;
10960 }
10961 }
10962
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010963 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10964 int32_t* exposure_metering_mode =
10965 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10967 *exposure_metering_mode)) {
10968 rc = BAD_VALUE;
10969 }
10970 }
10971
Thierry Strudel3d639192016-09-09 11:52:26 -070010972 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10973 int32_t fwk_testPatternMode =
10974 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10975 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10976 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10977
10978 if (NAME_NOT_FOUND != testPatternMode) {
10979 cam_test_pattern_data_t testPatternData;
10980 memset(&testPatternData, 0, sizeof(testPatternData));
10981 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10982 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10983 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10984 int32_t *fwk_testPatternData =
10985 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10986 testPatternData.r = fwk_testPatternData[0];
10987 testPatternData.b = fwk_testPatternData[3];
10988 switch (gCamCapability[mCameraId]->color_arrangement) {
10989 case CAM_FILTER_ARRANGEMENT_RGGB:
10990 case CAM_FILTER_ARRANGEMENT_GRBG:
10991 testPatternData.gr = fwk_testPatternData[1];
10992 testPatternData.gb = fwk_testPatternData[2];
10993 break;
10994 case CAM_FILTER_ARRANGEMENT_GBRG:
10995 case CAM_FILTER_ARRANGEMENT_BGGR:
10996 testPatternData.gr = fwk_testPatternData[2];
10997 testPatternData.gb = fwk_testPatternData[1];
10998 break;
10999 default:
11000 LOGE("color arrangement %d is not supported",
11001 gCamCapability[mCameraId]->color_arrangement);
11002 break;
11003 }
11004 }
11005 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11006 testPatternData)) {
11007 rc = BAD_VALUE;
11008 }
11009 } else {
11010 LOGE("Invalid framework sensor test pattern mode %d",
11011 fwk_testPatternMode);
11012 }
11013 }
11014
11015 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11016 size_t count = 0;
11017 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11018 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11019 gps_coords.data.d, gps_coords.count, count);
11020 if (gps_coords.count != count) {
11021 rc = BAD_VALUE;
11022 }
11023 }
11024
11025 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11026 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11027 size_t count = 0;
11028 const char *gps_methods_src = (const char *)
11029 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11030 memset(gps_methods, '\0', sizeof(gps_methods));
11031 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11032 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11033 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11034 if (GPS_PROCESSING_METHOD_SIZE != count) {
11035 rc = BAD_VALUE;
11036 }
11037 }
11038
11039 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11040 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11042 gps_timestamp)) {
11043 rc = BAD_VALUE;
11044 }
11045 }
11046
11047 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11048 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11049 cam_rotation_info_t rotation_info;
11050 if (orientation == 0) {
11051 rotation_info.rotation = ROTATE_0;
11052 } else if (orientation == 90) {
11053 rotation_info.rotation = ROTATE_90;
11054 } else if (orientation == 180) {
11055 rotation_info.rotation = ROTATE_180;
11056 } else if (orientation == 270) {
11057 rotation_info.rotation = ROTATE_270;
11058 }
11059 rotation_info.streamId = snapshotStreamId;
11060 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11062 rc = BAD_VALUE;
11063 }
11064 }
11065
11066 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11067 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11068 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11069 rc = BAD_VALUE;
11070 }
11071 }
11072
11073 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11074 uint32_t thumb_quality = (uint32_t)
11075 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11077 thumb_quality)) {
11078 rc = BAD_VALUE;
11079 }
11080 }
11081
11082 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11083 cam_dimension_t dim;
11084 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11085 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11086 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11087 rc = BAD_VALUE;
11088 }
11089 }
11090
11091 // Internal metadata
11092 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11093 size_t count = 0;
11094 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11095 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11096 privatedata.data.i32, privatedata.count, count);
11097 if (privatedata.count != count) {
11098 rc = BAD_VALUE;
11099 }
11100 }
11101
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011102 // ISO/Exposure Priority
11103 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11104 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11105 cam_priority_mode_t mode =
11106 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11107 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11108 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11109 use_iso_exp_pty.previewOnly = FALSE;
11110 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11111 use_iso_exp_pty.value = *ptr;
11112
11113 if(CAM_ISO_PRIORITY == mode) {
11114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11115 use_iso_exp_pty)) {
11116 rc = BAD_VALUE;
11117 }
11118 }
11119 else {
11120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11121 use_iso_exp_pty)) {
11122 rc = BAD_VALUE;
11123 }
11124 }
11125 }
11126 }
11127
11128 // Saturation
11129 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11130 int32_t* use_saturation =
11131 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11133 rc = BAD_VALUE;
11134 }
11135 }
11136
Thierry Strudel3d639192016-09-09 11:52:26 -070011137 // EV step
11138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11139 gCamCapability[mCameraId]->exp_compensation_step)) {
11140 rc = BAD_VALUE;
11141 }
11142
11143 // CDS info
11144 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11145 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11146 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11147
11148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11149 CAM_INTF_META_CDS_DATA, *cdsData)) {
11150 rc = BAD_VALUE;
11151 }
11152 }
11153
Shuzhen Wang19463d72016-03-08 11:09:52 -080011154 // Hybrid AE
11155 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11156 uint8_t *hybrid_ae = (uint8_t *)
11157 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11158
11159 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11160 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11161 rc = BAD_VALUE;
11162 }
11163 }
11164
Thierry Strudel3d639192016-09-09 11:52:26 -070011165 return rc;
11166}
11167
11168/*===========================================================================
11169 * FUNCTION : captureResultCb
11170 *
11171 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11172 *
11173 * PARAMETERS :
11174 * @frame : frame information from mm-camera-interface
11175 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11176 * @userdata: userdata
11177 *
11178 * RETURN : NONE
11179 *==========================================================================*/
11180void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11181 camera3_stream_buffer_t *buffer,
11182 uint32_t frame_number, bool isInputBuffer, void *userdata)
11183{
11184 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11185 if (hw == NULL) {
11186 LOGE("Invalid hw %p", hw);
11187 return;
11188 }
11189
11190 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11191 return;
11192}
11193
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011194/*===========================================================================
11195 * FUNCTION : setBufferErrorStatus
11196 *
11197 * DESCRIPTION: Callback handler for channels to report any buffer errors
11198 *
11199 * PARAMETERS :
11200 * @ch : Channel on which buffer error is reported from
11201 * @frame_number : frame number on which buffer error is reported on
11202 * @buffer_status : buffer error status
11203 * @userdata: userdata
11204 *
11205 * RETURN : NONE
11206 *==========================================================================*/
11207void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11208 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11209{
11210 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11211 if (hw == NULL) {
11212 LOGE("Invalid hw %p", hw);
11213 return;
11214 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011215
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011216 hw->setBufferErrorStatus(ch, frame_number, err);
11217 return;
11218}
11219
11220void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11221 uint32_t frameNumber, camera3_buffer_status_t err)
11222{
11223 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11224 pthread_mutex_lock(&mMutex);
11225
11226 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11227 if (req.frame_number != frameNumber)
11228 continue;
11229 for (auto& k : req.mPendingBufferList) {
11230 if(k.stream->priv == ch) {
11231 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11232 }
11233 }
11234 }
11235
11236 pthread_mutex_unlock(&mMutex);
11237 return;
11238}
Thierry Strudel3d639192016-09-09 11:52:26 -070011239/*===========================================================================
11240 * FUNCTION : initialize
11241 *
11242 * DESCRIPTION: Pass framework callback pointers to HAL
11243 *
11244 * PARAMETERS :
11245 *
11246 *
11247 * RETURN : Success : 0
11248 * Failure: -ENODEV
11249 *==========================================================================*/
11250
11251int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11252 const camera3_callback_ops_t *callback_ops)
11253{
11254 LOGD("E");
11255 QCamera3HardwareInterface *hw =
11256 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11257 if (!hw) {
11258 LOGE("NULL camera device");
11259 return -ENODEV;
11260 }
11261
11262 int rc = hw->initialize(callback_ops);
11263 LOGD("X");
11264 return rc;
11265}
11266
11267/*===========================================================================
11268 * FUNCTION : configure_streams
11269 *
11270 * DESCRIPTION:
11271 *
11272 * PARAMETERS :
11273 *
11274 *
11275 * RETURN : Success: 0
11276 * Failure: -EINVAL (if stream configuration is invalid)
11277 * -ENODEV (fatal error)
11278 *==========================================================================*/
11279
11280int QCamera3HardwareInterface::configure_streams(
11281 const struct camera3_device *device,
11282 camera3_stream_configuration_t *stream_list)
11283{
11284 LOGD("E");
11285 QCamera3HardwareInterface *hw =
11286 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11287 if (!hw) {
11288 LOGE("NULL camera device");
11289 return -ENODEV;
11290 }
11291 int rc = hw->configureStreams(stream_list);
11292 LOGD("X");
11293 return rc;
11294}
11295
11296/*===========================================================================
11297 * FUNCTION : construct_default_request_settings
11298 *
11299 * DESCRIPTION: Configure a settings buffer to meet the required use case
11300 *
11301 * PARAMETERS :
11302 *
11303 *
11304 * RETURN : Success: Return valid metadata
11305 * Failure: Return NULL
11306 *==========================================================================*/
11307const camera_metadata_t* QCamera3HardwareInterface::
11308 construct_default_request_settings(const struct camera3_device *device,
11309 int type)
11310{
11311
11312 LOGD("E");
11313 camera_metadata_t* fwk_metadata = NULL;
11314 QCamera3HardwareInterface *hw =
11315 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11316 if (!hw) {
11317 LOGE("NULL camera device");
11318 return NULL;
11319 }
11320
11321 fwk_metadata = hw->translateCapabilityToMetadata(type);
11322
11323 LOGD("X");
11324 return fwk_metadata;
11325}
11326
11327/*===========================================================================
11328 * FUNCTION : process_capture_request
11329 *
11330 * DESCRIPTION:
11331 *
11332 * PARAMETERS :
11333 *
11334 *
11335 * RETURN :
11336 *==========================================================================*/
11337int QCamera3HardwareInterface::process_capture_request(
11338 const struct camera3_device *device,
11339 camera3_capture_request_t *request)
11340{
11341 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011342 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011343 QCamera3HardwareInterface *hw =
11344 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11345 if (!hw) {
11346 LOGE("NULL camera device");
11347 return -EINVAL;
11348 }
11349
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011350 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 LOGD("X");
11352 return rc;
11353}
11354
11355/*===========================================================================
11356 * FUNCTION : dump
11357 *
11358 * DESCRIPTION:
11359 *
11360 * PARAMETERS :
11361 *
11362 *
11363 * RETURN :
11364 *==========================================================================*/
11365
11366void QCamera3HardwareInterface::dump(
11367 const struct camera3_device *device, int fd)
11368{
11369 /* Log level property is read when "adb shell dumpsys media.camera" is
11370 called so that the log level can be controlled without restarting
11371 the media server */
11372 getLogLevel();
11373
11374 LOGD("E");
11375 QCamera3HardwareInterface *hw =
11376 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11377 if (!hw) {
11378 LOGE("NULL camera device");
11379 return;
11380 }
11381
11382 hw->dump(fd);
11383 LOGD("X");
11384 return;
11385}
11386
11387/*===========================================================================
11388 * FUNCTION : flush
11389 *
11390 * DESCRIPTION:
11391 *
11392 * PARAMETERS :
11393 *
11394 *
11395 * RETURN :
11396 *==========================================================================*/
11397
11398int QCamera3HardwareInterface::flush(
11399 const struct camera3_device *device)
11400{
11401 int rc;
11402 LOGD("E");
11403 QCamera3HardwareInterface *hw =
11404 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11405 if (!hw) {
11406 LOGE("NULL camera device");
11407 return -EINVAL;
11408 }
11409
11410 pthread_mutex_lock(&hw->mMutex);
11411 // Validate current state
11412 switch (hw->mState) {
11413 case STARTED:
11414 /* valid state */
11415 break;
11416
11417 case ERROR:
11418 pthread_mutex_unlock(&hw->mMutex);
11419 hw->handleCameraDeviceError();
11420 return -ENODEV;
11421
11422 default:
11423 LOGI("Flush returned during state %d", hw->mState);
11424 pthread_mutex_unlock(&hw->mMutex);
11425 return 0;
11426 }
11427 pthread_mutex_unlock(&hw->mMutex);
11428
11429 rc = hw->flush(true /* restart channels */ );
11430 LOGD("X");
11431 return rc;
11432}
11433
11434/*===========================================================================
11435 * FUNCTION : close_camera_device
11436 *
11437 * DESCRIPTION:
11438 *
11439 * PARAMETERS :
11440 *
11441 *
11442 * RETURN :
11443 *==========================================================================*/
11444int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11445{
11446 int ret = NO_ERROR;
11447 QCamera3HardwareInterface *hw =
11448 reinterpret_cast<QCamera3HardwareInterface *>(
11449 reinterpret_cast<camera3_device_t *>(device)->priv);
11450 if (!hw) {
11451 LOGE("NULL camera device");
11452 return BAD_VALUE;
11453 }
11454
11455 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11456 delete hw;
11457 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011458 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011459 return ret;
11460}
11461
11462/*===========================================================================
11463 * FUNCTION : getWaveletDenoiseProcessPlate
11464 *
11465 * DESCRIPTION: query wavelet denoise process plate
11466 *
11467 * PARAMETERS : None
11468 *
11469 * RETURN : WNR prcocess plate value
11470 *==========================================================================*/
11471cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11472{
11473 char prop[PROPERTY_VALUE_MAX];
11474 memset(prop, 0, sizeof(prop));
11475 property_get("persist.denoise.process.plates", prop, "0");
11476 int processPlate = atoi(prop);
11477 switch(processPlate) {
11478 case 0:
11479 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11480 case 1:
11481 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11482 case 2:
11483 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11484 case 3:
11485 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11486 default:
11487 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11488 }
11489}
11490
11491
11492/*===========================================================================
11493 * FUNCTION : getTemporalDenoiseProcessPlate
11494 *
11495 * DESCRIPTION: query temporal denoise process plate
11496 *
11497 * PARAMETERS : None
11498 *
11499 * RETURN : TNR prcocess plate value
11500 *==========================================================================*/
11501cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11502{
11503 char prop[PROPERTY_VALUE_MAX];
11504 memset(prop, 0, sizeof(prop));
11505 property_get("persist.tnr.process.plates", prop, "0");
11506 int processPlate = atoi(prop);
11507 switch(processPlate) {
11508 case 0:
11509 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11510 case 1:
11511 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11512 case 2:
11513 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11514 case 3:
11515 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11516 default:
11517 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11518 }
11519}
11520
11521
11522/*===========================================================================
11523 * FUNCTION : extractSceneMode
11524 *
11525 * DESCRIPTION: Extract scene mode from frameworks set metadata
11526 *
11527 * PARAMETERS :
11528 * @frame_settings: CameraMetadata reference
11529 * @metaMode: ANDROID_CONTORL_MODE
11530 * @hal_metadata: hal metadata structure
11531 *
11532 * RETURN : None
11533 *==========================================================================*/
11534int32_t QCamera3HardwareInterface::extractSceneMode(
11535 const CameraMetadata &frame_settings, uint8_t metaMode,
11536 metadata_buffer_t *hal_metadata)
11537{
11538 int32_t rc = NO_ERROR;
11539
11540 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11541 camera_metadata_ro_entry entry =
11542 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11543 if (0 == entry.count)
11544 return rc;
11545
11546 uint8_t fwk_sceneMode = entry.data.u8[0];
11547
11548 int val = lookupHalName(SCENE_MODES_MAP,
11549 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11550 fwk_sceneMode);
11551 if (NAME_NOT_FOUND != val) {
11552 uint8_t sceneMode = (uint8_t)val;
11553 LOGD("sceneMode: %d", sceneMode);
11554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11555 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11556 rc = BAD_VALUE;
11557 }
11558 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011559
11560 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11561 cam_hdr_param_t hdr_params;
11562 hdr_params.hdr_enable = 1;
11563 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11564 hdr_params.hdr_need_1x = false;
11565 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11566 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11567 rc = BAD_VALUE;
11568 }
11569 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011570 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11571 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11572 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11573 LOGD("sceneMode: %d", sceneMode);
11574 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11575 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11576 rc = BAD_VALUE;
11577 }
11578 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011579
11580 if (mForceHdrSnapshot) {
11581 cam_hdr_param_t hdr_params;
11582 hdr_params.hdr_enable = 1;
11583 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11584 hdr_params.hdr_need_1x = false;
11585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11586 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11587 rc = BAD_VALUE;
11588 }
11589 }
11590
Thierry Strudel3d639192016-09-09 11:52:26 -070011591 return rc;
11592}
11593
11594/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011595 * FUNCTION : setVideoHdrMode
11596 *
11597 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11598 *
11599 * PARAMETERS :
11600 * @hal_metadata: hal metadata structure
11601 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11602 *
11603 * RETURN : None
11604 *==========================================================================*/
11605int32_t QCamera3HardwareInterface::setVideoHdrMode(
11606 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11607{
11608 int32_t rc = NO_ERROR;
11609 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11610 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11611 rc = BAD_VALUE;
11612 } else {
11613 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11614 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11615 LOGD("Setting HDR mode Off");
11616 vhdr_type = CAM_SENSOR_HDR_OFF;
11617 } else {
11618 char video_hdr_prop[PROPERTY_VALUE_MAX];
11619 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11620 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11621 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11622 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11623 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11624 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11625 LOGD("Setting HDR mode In Sensor");
11626 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11627 }
11628 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11629 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11630 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11631 LOGD("Setting HDR mode Zigzag");
11632 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11633 }
11634 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11635 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11636 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11637 LOGD("Setting HDR mode Staggered");
11638 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11639 }
11640 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11641 LOGD("HDR mode not supported");
11642 rc = BAD_VALUE;
11643 }
11644 }
11645 if(rc == NO_ERROR) {
11646 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11647 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11648 rc = BAD_VALUE;
11649 }
11650 }
11651 }
11652 return rc;
11653}
11654
11655/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011656 * FUNCTION : needRotationReprocess
11657 *
11658 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11659 *
11660 * PARAMETERS : none
11661 *
11662 * RETURN : true: needed
11663 * false: no need
11664 *==========================================================================*/
11665bool QCamera3HardwareInterface::needRotationReprocess()
11666{
11667 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11668 // current rotation is not zero, and pp has the capability to process rotation
11669 LOGH("need do reprocess for rotation");
11670 return true;
11671 }
11672
11673 return false;
11674}
11675
11676/*===========================================================================
11677 * FUNCTION : needReprocess
11678 *
11679 * DESCRIPTION: if reprocess in needed
11680 *
11681 * PARAMETERS : none
11682 *
11683 * RETURN : true: needed
11684 * false: no need
11685 *==========================================================================*/
11686bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11687{
11688 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11689 // TODO: add for ZSL HDR later
11690 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11691 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11692 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11693 return true;
11694 } else {
11695 LOGH("already post processed frame");
11696 return false;
11697 }
11698 }
11699 return needRotationReprocess();
11700}
11701
11702/*===========================================================================
11703 * FUNCTION : needJpegExifRotation
11704 *
11705 * DESCRIPTION: if rotation from jpeg is needed
11706 *
11707 * PARAMETERS : none
11708 *
11709 * RETURN : true: needed
11710 * false: no need
11711 *==========================================================================*/
11712bool QCamera3HardwareInterface::needJpegExifRotation()
11713{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011714 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011715 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11716 LOGD("Need use Jpeg EXIF Rotation");
11717 return true;
11718 }
11719 return false;
11720}
11721
11722/*===========================================================================
11723 * FUNCTION : addOfflineReprocChannel
11724 *
11725 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11726 * coming from input channel
11727 *
11728 * PARAMETERS :
11729 * @config : reprocess configuration
11730 * @inputChHandle : pointer to the input (source) channel
11731 *
11732 *
11733 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11734 *==========================================================================*/
11735QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11736 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11737{
11738 int32_t rc = NO_ERROR;
11739 QCamera3ReprocessChannel *pChannel = NULL;
11740
11741 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011742 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11743 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011744 if (NULL == pChannel) {
11745 LOGE("no mem for reprocess channel");
11746 return NULL;
11747 }
11748
11749 rc = pChannel->initialize(IS_TYPE_NONE);
11750 if (rc != NO_ERROR) {
11751 LOGE("init reprocess channel failed, ret = %d", rc);
11752 delete pChannel;
11753 return NULL;
11754 }
11755
11756 // pp feature config
11757 cam_pp_feature_config_t pp_config;
11758 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11759
11760 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11761 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11762 & CAM_QCOM_FEATURE_DSDN) {
11763 //Use CPP CDS incase h/w supports it.
11764 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11765 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11766 }
11767 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11768 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11769 }
11770
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011771 if (config.hdr_param.hdr_enable) {
11772 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11773 pp_config.hdr_param = config.hdr_param;
11774 }
11775
11776 if (mForceHdrSnapshot) {
11777 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11778 pp_config.hdr_param.hdr_enable = 1;
11779 pp_config.hdr_param.hdr_need_1x = 0;
11780 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11781 }
11782
Thierry Strudel3d639192016-09-09 11:52:26 -070011783 rc = pChannel->addReprocStreamsFromSource(pp_config,
11784 config,
11785 IS_TYPE_NONE,
11786 mMetadataChannel);
11787
11788 if (rc != NO_ERROR) {
11789 delete pChannel;
11790 return NULL;
11791 }
11792 return pChannel;
11793}
11794
11795/*===========================================================================
11796 * FUNCTION : getMobicatMask
11797 *
11798 * DESCRIPTION: returns mobicat mask
11799 *
11800 * PARAMETERS : none
11801 *
11802 * RETURN : mobicat mask
11803 *
11804 *==========================================================================*/
11805uint8_t QCamera3HardwareInterface::getMobicatMask()
11806{
11807 return m_MobicatMask;
11808}
11809
11810/*===========================================================================
11811 * FUNCTION : setMobicat
11812 *
11813 * DESCRIPTION: set Mobicat on/off.
11814 *
11815 * PARAMETERS :
11816 * @params : none
11817 *
11818 * RETURN : int32_t type of status
11819 * NO_ERROR -- success
11820 * none-zero failure code
11821 *==========================================================================*/
11822int32_t QCamera3HardwareInterface::setMobicat()
11823{
11824 char value [PROPERTY_VALUE_MAX];
11825 property_get("persist.camera.mobicat", value, "0");
11826 int32_t ret = NO_ERROR;
11827 uint8_t enableMobi = (uint8_t)atoi(value);
11828
11829 if (enableMobi) {
11830 tune_cmd_t tune_cmd;
11831 tune_cmd.type = SET_RELOAD_CHROMATIX;
11832 tune_cmd.module = MODULE_ALL;
11833 tune_cmd.value = TRUE;
11834 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11835 CAM_INTF_PARM_SET_VFE_COMMAND,
11836 tune_cmd);
11837
11838 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11839 CAM_INTF_PARM_SET_PP_COMMAND,
11840 tune_cmd);
11841 }
11842 m_MobicatMask = enableMobi;
11843
11844 return ret;
11845}
11846
11847/*===========================================================================
11848* FUNCTION : getLogLevel
11849*
11850* DESCRIPTION: Reads the log level property into a variable
11851*
11852* PARAMETERS :
11853* None
11854*
11855* RETURN :
11856* None
11857*==========================================================================*/
11858void QCamera3HardwareInterface::getLogLevel()
11859{
11860 char prop[PROPERTY_VALUE_MAX];
11861 uint32_t globalLogLevel = 0;
11862
11863 property_get("persist.camera.hal.debug", prop, "0");
11864 int val = atoi(prop);
11865 if (0 <= val) {
11866 gCamHal3LogLevel = (uint32_t)val;
11867 }
11868
Thierry Strudel9ec39c62016-12-28 11:30:05 -080011869 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070011870 gKpiDebugLevel = atoi(prop);
11871
11872 property_get("persist.camera.global.debug", prop, "0");
11873 val = atoi(prop);
11874 if (0 <= val) {
11875 globalLogLevel = (uint32_t)val;
11876 }
11877
11878 /* Highest log level among hal.logs and global.logs is selected */
11879 if (gCamHal3LogLevel < globalLogLevel)
11880 gCamHal3LogLevel = globalLogLevel;
11881
11882 return;
11883}
11884
11885/*===========================================================================
11886 * FUNCTION : validateStreamRotations
11887 *
11888 * DESCRIPTION: Check if the rotations requested are supported
11889 *
11890 * PARAMETERS :
11891 * @stream_list : streams to be configured
11892 *
11893 * RETURN : NO_ERROR on success
11894 * -EINVAL on failure
11895 *
11896 *==========================================================================*/
11897int QCamera3HardwareInterface::validateStreamRotations(
11898 camera3_stream_configuration_t *streamList)
11899{
11900 int rc = NO_ERROR;
11901
11902 /*
11903 * Loop through all streams requested in configuration
11904 * Check if unsupported rotations have been requested on any of them
11905 */
11906 for (size_t j = 0; j < streamList->num_streams; j++){
11907 camera3_stream_t *newStream = streamList->streams[j];
11908
11909 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11910 bool isImplDef = (newStream->format ==
11911 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11912 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11913 isImplDef);
11914
11915 if (isRotated && (!isImplDef || isZsl)) {
11916 LOGE("Error: Unsupported rotation of %d requested for stream"
11917 "type:%d and stream format:%d",
11918 newStream->rotation, newStream->stream_type,
11919 newStream->format);
11920 rc = -EINVAL;
11921 break;
11922 }
11923 }
11924
11925 return rc;
11926}
11927
11928/*===========================================================================
11929* FUNCTION : getFlashInfo
11930*
11931* DESCRIPTION: Retrieve information about whether the device has a flash.
11932*
11933* PARAMETERS :
11934* @cameraId : Camera id to query
11935* @hasFlash : Boolean indicating whether there is a flash device
11936* associated with given camera
11937* @flashNode : If a flash device exists, this will be its device node.
11938*
11939* RETURN :
11940* None
11941*==========================================================================*/
11942void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11943 bool& hasFlash,
11944 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11945{
11946 cam_capability_t* camCapability = gCamCapability[cameraId];
11947 if (NULL == camCapability) {
11948 hasFlash = false;
11949 flashNode[0] = '\0';
11950 } else {
11951 hasFlash = camCapability->flash_available;
11952 strlcpy(flashNode,
11953 (char*)camCapability->flash_dev_name,
11954 QCAMERA_MAX_FILEPATH_LENGTH);
11955 }
11956}
11957
11958/*===========================================================================
11959* FUNCTION : getEepromVersionInfo
11960*
11961* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11962*
11963* PARAMETERS : None
11964*
11965* RETURN : string describing EEPROM version
11966* "\0" if no such info available
11967*==========================================================================*/
11968const char *QCamera3HardwareInterface::getEepromVersionInfo()
11969{
11970 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11971}
11972
11973/*===========================================================================
11974* FUNCTION : getLdafCalib
11975*
11976* DESCRIPTION: Retrieve Laser AF calibration data
11977*
11978* PARAMETERS : None
11979*
11980* RETURN : Two uint32_t describing laser AF calibration data
11981* NULL if none is available.
11982*==========================================================================*/
11983const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11984{
11985 if (mLdafCalibExist) {
11986 return &mLdafCalib[0];
11987 } else {
11988 return NULL;
11989 }
11990}
11991
11992/*===========================================================================
11993 * FUNCTION : dynamicUpdateMetaStreamInfo
11994 *
11995 * DESCRIPTION: This function:
11996 * (1) stops all the channels
11997 * (2) returns error on pending requests and buffers
11998 * (3) sends metastream_info in setparams
11999 * (4) starts all channels
12000 * This is useful when sensor has to be restarted to apply any
12001 * settings such as frame rate from a different sensor mode
12002 *
12003 * PARAMETERS : None
12004 *
12005 * RETURN : NO_ERROR on success
12006 * Error codes on failure
12007 *
12008 *==========================================================================*/
12009int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12010{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012011 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012012 int rc = NO_ERROR;
12013
12014 LOGD("E");
12015
12016 rc = stopAllChannels();
12017 if (rc < 0) {
12018 LOGE("stopAllChannels failed");
12019 return rc;
12020 }
12021
12022 rc = notifyErrorForPendingRequests();
12023 if (rc < 0) {
12024 LOGE("notifyErrorForPendingRequests failed");
12025 return rc;
12026 }
12027
12028 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12029 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12030 "Format:%d",
12031 mStreamConfigInfo.type[i],
12032 mStreamConfigInfo.stream_sizes[i].width,
12033 mStreamConfigInfo.stream_sizes[i].height,
12034 mStreamConfigInfo.postprocess_mask[i],
12035 mStreamConfigInfo.format[i]);
12036 }
12037
12038 /* Send meta stream info once again so that ISP can start */
12039 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12040 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12041 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12042 mParameters);
12043 if (rc < 0) {
12044 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12045 }
12046
12047 rc = startAllChannels();
12048 if (rc < 0) {
12049 LOGE("startAllChannels failed");
12050 return rc;
12051 }
12052
12053 LOGD("X");
12054 return rc;
12055}
12056
12057/*===========================================================================
12058 * FUNCTION : stopAllChannels
12059 *
12060 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12061 *
12062 * PARAMETERS : None
12063 *
12064 * RETURN : NO_ERROR on success
12065 * Error codes on failure
12066 *
12067 *==========================================================================*/
12068int32_t QCamera3HardwareInterface::stopAllChannels()
12069{
12070 int32_t rc = NO_ERROR;
12071
12072 LOGD("Stopping all channels");
12073 // Stop the Streams/Channels
12074 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12075 it != mStreamInfo.end(); it++) {
12076 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12077 if (channel) {
12078 channel->stop();
12079 }
12080 (*it)->status = INVALID;
12081 }
12082
12083 if (mSupportChannel) {
12084 mSupportChannel->stop();
12085 }
12086 if (mAnalysisChannel) {
12087 mAnalysisChannel->stop();
12088 }
12089 if (mRawDumpChannel) {
12090 mRawDumpChannel->stop();
12091 }
12092 if (mMetadataChannel) {
12093 /* If content of mStreamInfo is not 0, there is metadata stream */
12094 mMetadataChannel->stop();
12095 }
12096
12097 LOGD("All channels stopped");
12098 return rc;
12099}
12100
12101/*===========================================================================
12102 * FUNCTION : startAllChannels
12103 *
12104 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12105 *
12106 * PARAMETERS : None
12107 *
12108 * RETURN : NO_ERROR on success
12109 * Error codes on failure
12110 *
12111 *==========================================================================*/
12112int32_t QCamera3HardwareInterface::startAllChannels()
12113{
12114 int32_t rc = NO_ERROR;
12115
12116 LOGD("Start all channels ");
12117 // Start the Streams/Channels
12118 if (mMetadataChannel) {
12119 /* If content of mStreamInfo is not 0, there is metadata stream */
12120 rc = mMetadataChannel->start();
12121 if (rc < 0) {
12122 LOGE("META channel start failed");
12123 return rc;
12124 }
12125 }
12126 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12127 it != mStreamInfo.end(); it++) {
12128 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12129 if (channel) {
12130 rc = channel->start();
12131 if (rc < 0) {
12132 LOGE("channel start failed");
12133 return rc;
12134 }
12135 }
12136 }
12137 if (mAnalysisChannel) {
12138 mAnalysisChannel->start();
12139 }
12140 if (mSupportChannel) {
12141 rc = mSupportChannel->start();
12142 if (rc < 0) {
12143 LOGE("Support channel start failed");
12144 return rc;
12145 }
12146 }
12147 if (mRawDumpChannel) {
12148 rc = mRawDumpChannel->start();
12149 if (rc < 0) {
12150 LOGE("RAW dump channel start failed");
12151 return rc;
12152 }
12153 }
12154
12155 LOGD("All channels started");
12156 return rc;
12157}
12158
12159/*===========================================================================
12160 * FUNCTION : notifyErrorForPendingRequests
12161 *
12162 * DESCRIPTION: This function sends error for all the pending requests/buffers
12163 *
12164 * PARAMETERS : None
12165 *
12166 * RETURN : Error codes
12167 * NO_ERROR on success
12168 *
12169 *==========================================================================*/
12170int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12171{
12172 int32_t rc = NO_ERROR;
12173 unsigned int frameNum = 0;
12174 camera3_capture_result_t result;
12175 camera3_stream_buffer_t *pStream_Buf = NULL;
12176
12177 memset(&result, 0, sizeof(camera3_capture_result_t));
12178
12179 if (mPendingRequestsList.size() > 0) {
12180 pendingRequestIterator i = mPendingRequestsList.begin();
12181 frameNum = i->frame_number;
12182 } else {
12183 /* There might still be pending buffers even though there are
12184 no pending requests. Setting the frameNum to MAX so that
12185 all the buffers with smaller frame numbers are returned */
12186 frameNum = UINT_MAX;
12187 }
12188
12189 LOGH("Oldest frame num on mPendingRequestsList = %u",
12190 frameNum);
12191
12192 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12193 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12194
12195 if (req->frame_number < frameNum) {
12196 // Send Error notify to frameworks for each buffer for which
12197 // metadata buffer is already sent
12198 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12199 req->frame_number, req->mPendingBufferList.size());
12200
12201 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12202 if (NULL == pStream_Buf) {
12203 LOGE("No memory for pending buffers array");
12204 return NO_MEMORY;
12205 }
12206 memset(pStream_Buf, 0,
12207 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12208 result.result = NULL;
12209 result.frame_number = req->frame_number;
12210 result.num_output_buffers = req->mPendingBufferList.size();
12211 result.output_buffers = pStream_Buf;
12212
12213 size_t index = 0;
12214 for (auto info = req->mPendingBufferList.begin();
12215 info != req->mPendingBufferList.end(); ) {
12216
12217 camera3_notify_msg_t notify_msg;
12218 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12219 notify_msg.type = CAMERA3_MSG_ERROR;
12220 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12221 notify_msg.message.error.error_stream = info->stream;
12222 notify_msg.message.error.frame_number = req->frame_number;
12223 pStream_Buf[index].acquire_fence = -1;
12224 pStream_Buf[index].release_fence = -1;
12225 pStream_Buf[index].buffer = info->buffer;
12226 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12227 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012228 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012229 index++;
12230 // Remove buffer from list
12231 info = req->mPendingBufferList.erase(info);
12232 }
12233
12234 // Remove this request from Map
12235 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12236 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12237 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12238
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012239 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012240
12241 delete [] pStream_Buf;
12242 } else {
12243
12244 // Go through the pending requests info and send error request to framework
12245 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12246
12247 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12248
12249 // Send error notify to frameworks
12250 camera3_notify_msg_t notify_msg;
12251 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12252 notify_msg.type = CAMERA3_MSG_ERROR;
12253 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12254 notify_msg.message.error.error_stream = NULL;
12255 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012256 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012257
12258 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12259 if (NULL == pStream_Buf) {
12260 LOGE("No memory for pending buffers array");
12261 return NO_MEMORY;
12262 }
12263 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12264
12265 result.result = NULL;
12266 result.frame_number = req->frame_number;
12267 result.input_buffer = i->input_buffer;
12268 result.num_output_buffers = req->mPendingBufferList.size();
12269 result.output_buffers = pStream_Buf;
12270
12271 size_t index = 0;
12272 for (auto info = req->mPendingBufferList.begin();
12273 info != req->mPendingBufferList.end(); ) {
12274 pStream_Buf[index].acquire_fence = -1;
12275 pStream_Buf[index].release_fence = -1;
12276 pStream_Buf[index].buffer = info->buffer;
12277 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12278 pStream_Buf[index].stream = info->stream;
12279 index++;
12280 // Remove buffer from list
12281 info = req->mPendingBufferList.erase(info);
12282 }
12283
12284 // Remove this request from Map
12285 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12286 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12287 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12288
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012289 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012290 delete [] pStream_Buf;
12291 i = erasePendingRequest(i);
12292 }
12293 }
12294
12295 /* Reset pending frame Drop list and requests list */
12296 mPendingFrameDropList.clear();
12297
12298 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12299 req.mPendingBufferList.clear();
12300 }
12301 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12302 mPendingReprocessResultList.clear();
12303 LOGH("Cleared all the pending buffers ");
12304
12305 return rc;
12306}
12307
12308bool QCamera3HardwareInterface::isOnEncoder(
12309 const cam_dimension_t max_viewfinder_size,
12310 uint32_t width, uint32_t height)
12311{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012312 return ((width > (uint32_t)max_viewfinder_size.width) ||
12313 (height > (uint32_t)max_viewfinder_size.height) ||
12314 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12315 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012316}
12317
12318/*===========================================================================
12319 * FUNCTION : setBundleInfo
12320 *
12321 * DESCRIPTION: Set bundle info for all streams that are bundle.
12322 *
12323 * PARAMETERS : None
12324 *
12325 * RETURN : NO_ERROR on success
12326 * Error codes on failure
12327 *==========================================================================*/
12328int32_t QCamera3HardwareInterface::setBundleInfo()
12329{
12330 int32_t rc = NO_ERROR;
12331
12332 if (mChannelHandle) {
12333 cam_bundle_config_t bundleInfo;
12334 memset(&bundleInfo, 0, sizeof(bundleInfo));
12335 rc = mCameraHandle->ops->get_bundle_info(
12336 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12337 if (rc != NO_ERROR) {
12338 LOGE("get_bundle_info failed");
12339 return rc;
12340 }
12341 if (mAnalysisChannel) {
12342 mAnalysisChannel->setBundleInfo(bundleInfo);
12343 }
12344 if (mSupportChannel) {
12345 mSupportChannel->setBundleInfo(bundleInfo);
12346 }
12347 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12348 it != mStreamInfo.end(); it++) {
12349 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12350 channel->setBundleInfo(bundleInfo);
12351 }
12352 if (mRawDumpChannel) {
12353 mRawDumpChannel->setBundleInfo(bundleInfo);
12354 }
12355 }
12356
12357 return rc;
12358}
12359
12360/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012361 * FUNCTION : setInstantAEC
12362 *
12363 * DESCRIPTION: Set Instant AEC related params.
12364 *
12365 * PARAMETERS :
12366 * @meta: CameraMetadata reference
12367 *
12368 * RETURN : NO_ERROR on success
12369 * Error codes on failure
12370 *==========================================================================*/
12371int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12372{
12373 int32_t rc = NO_ERROR;
12374 uint8_t val = 0;
12375 char prop[PROPERTY_VALUE_MAX];
12376
12377 // First try to configure instant AEC from framework metadata
12378 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12379 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12380 }
12381
12382 // If framework did not set this value, try to read from set prop.
12383 if (val == 0) {
12384 memset(prop, 0, sizeof(prop));
12385 property_get("persist.camera.instant.aec", prop, "0");
12386 val = (uint8_t)atoi(prop);
12387 }
12388
12389 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12390 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12391 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12392 mInstantAEC = val;
12393 mInstantAECSettledFrameNumber = 0;
12394 mInstantAecFrameIdxCount = 0;
12395 LOGH("instantAEC value set %d",val);
12396 if (mInstantAEC) {
12397 memset(prop, 0, sizeof(prop));
12398 property_get("persist.camera.ae.instant.bound", prop, "10");
12399 int32_t aec_frame_skip_cnt = atoi(prop);
12400 if (aec_frame_skip_cnt >= 0) {
12401 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12402 } else {
12403 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12404 rc = BAD_VALUE;
12405 }
12406 }
12407 } else {
12408 LOGE("Bad instant aec value set %d", val);
12409 rc = BAD_VALUE;
12410 }
12411 return rc;
12412}
12413
12414/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012415 * FUNCTION : get_num_overall_buffers
12416 *
12417 * DESCRIPTION: Estimate number of pending buffers across all requests.
12418 *
12419 * PARAMETERS : None
12420 *
12421 * RETURN : Number of overall pending buffers
12422 *
12423 *==========================================================================*/
12424uint32_t PendingBuffersMap::get_num_overall_buffers()
12425{
12426 uint32_t sum_buffers = 0;
12427 for (auto &req : mPendingBuffersInRequest) {
12428 sum_buffers += req.mPendingBufferList.size();
12429 }
12430 return sum_buffers;
12431}
12432
12433/*===========================================================================
12434 * FUNCTION : removeBuf
12435 *
12436 * DESCRIPTION: Remove a matching buffer from tracker.
12437 *
12438 * PARAMETERS : @buffer: image buffer for the callback
12439 *
12440 * RETURN : None
12441 *
12442 *==========================================================================*/
12443void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12444{
12445 bool buffer_found = false;
12446 for (auto req = mPendingBuffersInRequest.begin();
12447 req != mPendingBuffersInRequest.end(); req++) {
12448 for (auto k = req->mPendingBufferList.begin();
12449 k != req->mPendingBufferList.end(); k++ ) {
12450 if (k->buffer == buffer) {
12451 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12452 req->frame_number, buffer);
12453 k = req->mPendingBufferList.erase(k);
12454 if (req->mPendingBufferList.empty()) {
12455 // Remove this request from Map
12456 req = mPendingBuffersInRequest.erase(req);
12457 }
12458 buffer_found = true;
12459 break;
12460 }
12461 }
12462 if (buffer_found) {
12463 break;
12464 }
12465 }
12466 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12467 get_num_overall_buffers());
12468}
12469
12470/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012471 * FUNCTION : getBufErrStatus
12472 *
12473 * DESCRIPTION: get buffer error status
12474 *
12475 * PARAMETERS : @buffer: buffer handle
12476 *
12477 * RETURN : Error status
12478 *
12479 *==========================================================================*/
12480int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12481{
12482 for (auto& req : mPendingBuffersInRequest) {
12483 for (auto& k : req.mPendingBufferList) {
12484 if (k.buffer == buffer)
12485 return k.bufStatus;
12486 }
12487 }
12488 return CAMERA3_BUFFER_STATUS_OK;
12489}
12490
12491/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012492 * FUNCTION : setPAAFSupport
12493 *
12494 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12495 * feature mask according to stream type and filter
12496 * arrangement
12497 *
12498 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12499 * @stream_type: stream type
12500 * @filter_arrangement: filter arrangement
12501 *
12502 * RETURN : None
12503 *==========================================================================*/
12504void QCamera3HardwareInterface::setPAAFSupport(
12505 cam_feature_mask_t& feature_mask,
12506 cam_stream_type_t stream_type,
12507 cam_color_filter_arrangement_t filter_arrangement)
12508{
12509 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12510 feature_mask, stream_type, filter_arrangement);
12511
12512 switch (filter_arrangement) {
12513 case CAM_FILTER_ARRANGEMENT_RGGB:
12514 case CAM_FILTER_ARRANGEMENT_GRBG:
12515 case CAM_FILTER_ARRANGEMENT_GBRG:
12516 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012517 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12518 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012519 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12520 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12521 }
12522 break;
12523 case CAM_FILTER_ARRANGEMENT_Y:
12524 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12525 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12526 }
12527 break;
12528 default:
12529 break;
12530 }
12531}
12532
12533/*===========================================================================
12534* FUNCTION : getSensorMountAngle
12535*
12536* DESCRIPTION: Retrieve sensor mount angle
12537*
12538* PARAMETERS : None
12539*
12540* RETURN : sensor mount angle in uint32_t
12541*==========================================================================*/
12542uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12543{
12544 return gCamCapability[mCameraId]->sensor_mount_angle;
12545}
12546
12547/*===========================================================================
12548* FUNCTION : getRelatedCalibrationData
12549*
12550* DESCRIPTION: Retrieve related system calibration data
12551*
12552* PARAMETERS : None
12553*
12554* RETURN : Pointer of related system calibration data
12555*==========================================================================*/
12556const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12557{
12558 return (const cam_related_system_calibration_data_t *)
12559 &(gCamCapability[mCameraId]->related_cam_calibration);
12560}
12561}; //end namespace qcamera