blob: 58599a693890f3cbc18d9ce4c37b260ddfb0a272 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080047#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070048
49// Display dependencies
50#include "qdMetaData.h"
51
52// Camera dependencies
53#include "android/QCamera3External.h"
54#include "util/QCameraFlash.h"
55#include "QCamera3HWI.h"
56#include "QCamera3VendorTags.h"
57#include "QCameraTrace.h"
58
59extern "C" {
60#include "mm_camera_dbg.h"
61}
62
63using namespace android;
64
65namespace qcamera {
66
67#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
68
69#define EMPTY_PIPELINE_DELAY 2
70#define PARTIAL_RESULT_COUNT 2
71#define FRAME_SKIP_DELAY 0
72
73#define MAX_VALUE_8BIT ((1<<8)-1)
74#define MAX_VALUE_10BIT ((1<<10)-1)
75#define MAX_VALUE_12BIT ((1<<12)-1)
76
77#define VIDEO_4K_WIDTH 3840
78#define VIDEO_4K_HEIGHT 2160
79
80#define MAX_EIS_WIDTH 1920
81#define MAX_EIS_HEIGHT 1080
82
83#define MAX_RAW_STREAMS 1
84#define MAX_STALLING_STREAMS 1
85#define MAX_PROCESSED_STREAMS 3
86/* Batch mode is enabled only if FPS set is equal to or greater than this */
87#define MIN_FPS_FOR_BATCH_MODE (120)
88#define PREVIEW_FPS_FOR_HFR (30)
89#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080090#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070091#define MAX_HFR_BATCH_SIZE (8)
92#define REGIONS_TUPLE_COUNT 5
93#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070094// Set a threshold for detection of missing buffers //seconds
95#define MISSING_REQUEST_BUF_TIMEOUT 3
96#define FLUSH_TIMEOUT 3
97#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
98
99#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
100 CAM_QCOM_FEATURE_CROP |\
101 CAM_QCOM_FEATURE_ROTATION |\
102 CAM_QCOM_FEATURE_SHARPNESS |\
103 CAM_QCOM_FEATURE_SCALE |\
104 CAM_QCOM_FEATURE_CAC |\
105 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700106/* Per configuration size for static metadata length*/
107#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700108
109#define TIMEOUT_NEVER -1
110
Thierry Strudel04e026f2016-10-10 11:27:36 -0700111/* Face landmarks indices */
112#define LEFT_EYE_X 0
113#define LEFT_EYE_Y 1
114#define RIGHT_EYE_X 2
115#define RIGHT_EYE_Y 3
116#define MOUTH_X 4
117#define MOUTH_Y 5
118#define TOTAL_LANDMARK_INDICES 6
119
Thierry Strudel3d639192016-09-09 11:52:26 -0700120cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
121const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
122extern pthread_mutex_t gCamLock;
123volatile uint32_t gCamHal3LogLevel = 1;
124extern uint8_t gNumCameraSessions;
125
126const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
127 {"On", CAM_CDS_MODE_ON},
128 {"Off", CAM_CDS_MODE_OFF},
129 {"Auto",CAM_CDS_MODE_AUTO}
130};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700131const QCamera3HardwareInterface::QCameraMap<
132 camera_metadata_enum_android_video_hdr_mode_t,
133 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
134 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
135 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
136};
137
138
139const QCamera3HardwareInterface::QCameraMap<
140 camera_metadata_enum_android_ir_mode_t,
141 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
142 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
143 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
144 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
145};
Thierry Strudel3d639192016-09-09 11:52:26 -0700146
147const QCamera3HardwareInterface::QCameraMap<
148 camera_metadata_enum_android_control_effect_mode_t,
149 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
150 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
151 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
152 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
153 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
154 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
155 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
156 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
157 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
158 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
159};
160
161const QCamera3HardwareInterface::QCameraMap<
162 camera_metadata_enum_android_control_awb_mode_t,
163 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
164 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
165 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
166 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
167 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
168 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
169 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
170 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
171 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
172 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
173};
174
175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_control_scene_mode_t,
177 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
178 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
179 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
180 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
181 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
182 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
183 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
184 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
185 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
186 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
187 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
188 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
189 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
190 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
191 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
192 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
193 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
194};
195
196const QCamera3HardwareInterface::QCameraMap<
197 camera_metadata_enum_android_control_af_mode_t,
198 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
199 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
200 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
201 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
202 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
203 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
204 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
205 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_color_correction_aberration_mode_t,
210 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
211 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
212 CAM_COLOR_CORRECTION_ABERRATION_OFF },
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
214 CAM_COLOR_CORRECTION_ABERRATION_FAST },
215 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
216 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_ae_antibanding_mode_t,
221 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
222 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229 camera_metadata_enum_android_control_ae_mode_t,
230 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
231 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
232 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
233 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
234 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
235 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_flash_mode_t,
240 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
241 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
242 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
243 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_statistics_face_detect_mode_t,
248 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
249 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
250 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
252};
253
254const QCamera3HardwareInterface::QCameraMap<
255 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
256 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
257 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
258 CAM_FOCUS_UNCALIBRATED },
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
260 CAM_FOCUS_APPROXIMATE },
261 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
262 CAM_FOCUS_CALIBRATED }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_lens_state_t,
267 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
268 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
269 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
270};
271
272const int32_t available_thumbnail_sizes[] = {0, 0,
273 176, 144,
274 240, 144,
275 256, 144,
276 240, 160,
277 256, 154,
278 240, 240,
279 320, 240};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_sensor_test_pattern_mode_t,
283 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
284 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
290};
291
292/* Since there is no mapping for all the options some Android enum are not listed.
293 * Also, the order in this list is important because while mapping from HAL to Android it will
294 * traverse from lower to higher index which means that for HAL values that are map to different
295 * Android values, the traverse logic will select the first one found.
296 */
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_sensor_reference_illuminant1_t,
299 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
300 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
316};
317
318const QCamera3HardwareInterface::QCameraMap<
319 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
320 { 60, CAM_HFR_MODE_60FPS},
321 { 90, CAM_HFR_MODE_90FPS},
322 { 120, CAM_HFR_MODE_120FPS},
323 { 150, CAM_HFR_MODE_150FPS},
324 { 180, CAM_HFR_MODE_180FPS},
325 { 210, CAM_HFR_MODE_210FPS},
326 { 240, CAM_HFR_MODE_240FPS},
327 { 480, CAM_HFR_MODE_480FPS},
328};
329
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700330const QCamera3HardwareInterface::QCameraMap<
331 qcamera3_ext_instant_aec_mode_t,
332 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
333 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
334 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
335 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
336};
Thierry Strudel3d639192016-09-09 11:52:26 -0700337camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
338 .initialize = QCamera3HardwareInterface::initialize,
339 .configure_streams = QCamera3HardwareInterface::configure_streams,
340 .register_stream_buffers = NULL,
341 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
342 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
343 .get_metadata_vendor_tag_ops = NULL,
344 .dump = QCamera3HardwareInterface::dump,
345 .flush = QCamera3HardwareInterface::flush,
346 .reserved = {0},
347};
348
349// initialise to some default value
350uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
351
352/*===========================================================================
353 * FUNCTION : QCamera3HardwareInterface
354 *
355 * DESCRIPTION: constructor of QCamera3HardwareInterface
356 *
357 * PARAMETERS :
358 * @cameraId : camera ID
359 *
360 * RETURN : none
361 *==========================================================================*/
362QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
363 const camera_module_callbacks_t *callbacks)
364 : mCameraId(cameraId),
365 mCameraHandle(NULL),
366 mCameraInitialized(false),
367 mCallbackOps(NULL),
368 mMetadataChannel(NULL),
369 mPictureChannel(NULL),
370 mRawChannel(NULL),
371 mSupportChannel(NULL),
372 mAnalysisChannel(NULL),
373 mRawDumpChannel(NULL),
374 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800375 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700376 mCommon(),
377 mChannelHandle(0),
378 mFirstConfiguration(true),
379 mFlush(false),
380 mFlushPerf(false),
381 mParamHeap(NULL),
382 mParameters(NULL),
383 mPrevParameters(NULL),
384 m_bIsVideo(false),
385 m_bIs4KVideo(false),
386 m_bEisSupportedSize(false),
387 m_bEisEnable(false),
388 m_MobicatMask(0),
389 mMinProcessedFrameDuration(0),
390 mMinJpegFrameDuration(0),
391 mMinRawFrameDuration(0),
392 mMetaFrameCount(0U),
393 mUpdateDebugLevel(false),
394 mCallbacks(callbacks),
395 mCaptureIntent(0),
396 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800397 /* DevCamDebug metadata internal m control*/
398 mDevCamDebugMetaEnable(0),
399 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700400 mBatchSize(0),
401 mToBeQueuedVidBufs(0),
402 mHFRVideoFps(DEFAULT_VIDEO_FPS),
403 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
404 mFirstFrameNumberInBatch(0),
405 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800406 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700407 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
408 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700409 mInstantAEC(false),
410 mResetInstantAEC(false),
411 mInstantAECSettledFrameNumber(0),
412 mAecSkipDisplayFrameBound(0),
413 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700414 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mLastCustIntentFrmNum(-1),
416 mState(CLOSED),
417 mIsDeviceLinked(false),
418 mIsMainCamera(true),
419 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700420 m_pDualCamCmdHeap(NULL),
421 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700422{
423 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700424 mCommon.init(gCamCapability[cameraId]);
425 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700426#ifndef USE_HAL_3_3
427 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
428#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700429 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700430#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700431 mCameraDevice.common.close = close_camera_device;
432 mCameraDevice.ops = &mCameraOps;
433 mCameraDevice.priv = this;
434 gCamCapability[cameraId]->version = CAM_HAL_V3;
435 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
436 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
437 gCamCapability[cameraId]->min_num_pp_bufs = 3;
438
439 pthread_cond_init(&mBuffersCond, NULL);
440
441 pthread_cond_init(&mRequestCond, NULL);
442 mPendingLiveRequest = 0;
443 mCurrentRequestId = -1;
444 pthread_mutex_init(&mMutex, NULL);
445
446 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
447 mDefaultMetadata[i] = NULL;
448
449 // Getting system props of different kinds
450 char prop[PROPERTY_VALUE_MAX];
451 memset(prop, 0, sizeof(prop));
452 property_get("persist.camera.raw.dump", prop, "0");
453 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800454 property_get("persist.camera.hal3.force.hdr", prop, "0");
455 mForceHdrSnapshot = atoi(prop);
456
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 if (mEnableRawDump)
458 LOGD("Raw dump from Camera HAL enabled");
459
460 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
461 memset(mLdafCalib, 0, sizeof(mLdafCalib));
462
463 memset(prop, 0, sizeof(prop));
464 property_get("persist.camera.tnr.preview", prop, "0");
465 m_bTnrPreview = (uint8_t)atoi(prop);
466
467 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800468 property_get("persist.camera.swtnr.preview", prop, "1");
469 m_bSwTnrPreview = (uint8_t)atoi(prop);
470
471 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 property_get("persist.camera.tnr.video", prop, "0");
473 m_bTnrVideo = (uint8_t)atoi(prop);
474
475 memset(prop, 0, sizeof(prop));
476 property_get("persist.camera.avtimer.debug", prop, "0");
477 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800478 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700479
480 //Load and read GPU library.
481 lib_surface_utils = NULL;
482 LINK_get_surface_pixel_alignment = NULL;
483 mSurfaceStridePadding = CAM_PAD_TO_32;
484 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
485 if (lib_surface_utils) {
486 *(void **)&LINK_get_surface_pixel_alignment =
487 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
488 if (LINK_get_surface_pixel_alignment) {
489 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
490 }
491 dlclose(lib_surface_utils);
492 }
493}
494
495/*===========================================================================
496 * FUNCTION : ~QCamera3HardwareInterface
497 *
498 * DESCRIPTION: destructor of QCamera3HardwareInterface
499 *
500 * PARAMETERS : none
501 *
502 * RETURN : none
503 *==========================================================================*/
504QCamera3HardwareInterface::~QCamera3HardwareInterface()
505{
506 LOGD("E");
507
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800508 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700509
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800510 // Disable power hint and enable the perf lock for close camera
511 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
512 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
513
514 // unlink of dualcam during close camera
515 if (mIsDeviceLinked) {
516 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
517 &m_pDualCamCmdPtr->bundle_info;
518 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
519 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
520 pthread_mutex_lock(&gCamLock);
521
522 if (mIsMainCamera == 1) {
523 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
524 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
525 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
526 // related session id should be session id of linked session
527 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
528 } else {
529 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
530 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
531 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
532 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
533 }
534 pthread_mutex_unlock(&gCamLock);
535
536 rc = mCameraHandle->ops->set_dual_cam_cmd(
537 mCameraHandle->camera_handle);
538 if (rc < 0) {
539 LOGE("Dualcam: Unlink failed, but still proceed to close");
540 }
541 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700542
543 /* We need to stop all streams before deleting any stream */
544 if (mRawDumpChannel) {
545 mRawDumpChannel->stop();
546 }
547
548 // NOTE: 'camera3_stream_t *' objects are already freed at
549 // this stage by the framework
550 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
551 it != mStreamInfo.end(); it++) {
552 QCamera3ProcessingChannel *channel = (*it)->channel;
553 if (channel) {
554 channel->stop();
555 }
556 }
557 if (mSupportChannel)
558 mSupportChannel->stop();
559
560 if (mAnalysisChannel) {
561 mAnalysisChannel->stop();
562 }
563 if (mMetadataChannel) {
564 mMetadataChannel->stop();
565 }
566 if (mChannelHandle) {
567 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
568 mChannelHandle);
569 LOGD("stopping channel %d", mChannelHandle);
570 }
571
572 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
573 it != mStreamInfo.end(); it++) {
574 QCamera3ProcessingChannel *channel = (*it)->channel;
575 if (channel)
576 delete channel;
577 free (*it);
578 }
579 if (mSupportChannel) {
580 delete mSupportChannel;
581 mSupportChannel = NULL;
582 }
583
584 if (mAnalysisChannel) {
585 delete mAnalysisChannel;
586 mAnalysisChannel = NULL;
587 }
588 if (mRawDumpChannel) {
589 delete mRawDumpChannel;
590 mRawDumpChannel = NULL;
591 }
592 if (mDummyBatchChannel) {
593 delete mDummyBatchChannel;
594 mDummyBatchChannel = NULL;
595 }
596
597 mPictureChannel = NULL;
598
599 if (mMetadataChannel) {
600 delete mMetadataChannel;
601 mMetadataChannel = NULL;
602 }
603
604 /* Clean up all channels */
605 if (mCameraInitialized) {
606 if(!mFirstConfiguration){
607 //send the last unconfigure
608 cam_stream_size_info_t stream_config_info;
609 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
610 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
611 stream_config_info.buffer_info.max_buffers =
612 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700613 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700614 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
615 stream_config_info);
616 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
617 if (rc < 0) {
618 LOGE("set_parms failed for unconfigure");
619 }
620 }
621 deinitParameters();
622 }
623
624 if (mChannelHandle) {
625 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
626 mChannelHandle);
627 LOGH("deleting channel %d", mChannelHandle);
628 mChannelHandle = 0;
629 }
630
631 if (mState != CLOSED)
632 closeCamera();
633
634 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
635 req.mPendingBufferList.clear();
636 }
637 mPendingBuffersMap.mPendingBuffersInRequest.clear();
638 mPendingReprocessResultList.clear();
639 for (pendingRequestIterator i = mPendingRequestsList.begin();
640 i != mPendingRequestsList.end();) {
641 i = erasePendingRequest(i);
642 }
643 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
644 if (mDefaultMetadata[i])
645 free_camera_metadata(mDefaultMetadata[i]);
646
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800647 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700648
649 pthread_cond_destroy(&mRequestCond);
650
651 pthread_cond_destroy(&mBuffersCond);
652
653 pthread_mutex_destroy(&mMutex);
654 LOGD("X");
655}
656
657/*===========================================================================
658 * FUNCTION : erasePendingRequest
659 *
660 * DESCRIPTION: function to erase a desired pending request after freeing any
661 * allocated memory
662 *
663 * PARAMETERS :
664 * @i : iterator pointing to pending request to be erased
665 *
666 * RETURN : iterator pointing to the next request
667 *==========================================================================*/
668QCamera3HardwareInterface::pendingRequestIterator
669 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
670{
671 if (i->input_buffer != NULL) {
672 free(i->input_buffer);
673 i->input_buffer = NULL;
674 }
675 if (i->settings != NULL)
676 free_camera_metadata((camera_metadata_t*)i->settings);
677 return mPendingRequestsList.erase(i);
678}
679
680/*===========================================================================
681 * FUNCTION : camEvtHandle
682 *
683 * DESCRIPTION: Function registered to mm-camera-interface to handle events
684 *
685 * PARAMETERS :
686 * @camera_handle : interface layer camera handle
687 * @evt : ptr to event
688 * @user_data : user data ptr
689 *
690 * RETURN : none
691 *==========================================================================*/
692void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
693 mm_camera_event_t *evt,
694 void *user_data)
695{
696 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
697 if (obj && evt) {
698 switch(evt->server_event_type) {
699 case CAM_EVENT_TYPE_DAEMON_DIED:
700 pthread_mutex_lock(&obj->mMutex);
701 obj->mState = ERROR;
702 pthread_mutex_unlock(&obj->mMutex);
703 LOGE("Fatal, camera daemon died");
704 break;
705
706 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
707 LOGD("HAL got request pull from Daemon");
708 pthread_mutex_lock(&obj->mMutex);
709 obj->mWokenUpByDaemon = true;
710 obj->unblockRequestIfNecessary();
711 pthread_mutex_unlock(&obj->mMutex);
712 break;
713
714 default:
715 LOGW("Warning: Unhandled event %d",
716 evt->server_event_type);
717 break;
718 }
719 } else {
720 LOGE("NULL user_data/evt");
721 }
722}
723
724/*===========================================================================
725 * FUNCTION : openCamera
726 *
727 * DESCRIPTION: open camera
728 *
729 * PARAMETERS :
730 * @hw_device : double ptr for camera device struct
731 *
732 * RETURN : int32_t type of status
733 * NO_ERROR -- success
734 * none-zero failure code
735 *==========================================================================*/
736int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
737{
738 int rc = 0;
739 if (mState != CLOSED) {
740 *hw_device = NULL;
741 return PERMISSION_DENIED;
742 }
743
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800744 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700745 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
746 mCameraId);
747
748 rc = openCamera();
749 if (rc == 0) {
750 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800751 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700752 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800753 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700754
Thierry Strudel3d639192016-09-09 11:52:26 -0700755 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
756 mCameraId, rc);
757
758 if (rc == NO_ERROR) {
759 mState = OPENED;
760 }
761 return rc;
762}
763
764/*===========================================================================
765 * FUNCTION : openCamera
766 *
767 * DESCRIPTION: open camera
768 *
769 * PARAMETERS : none
770 *
771 * RETURN : int32_t type of status
772 * NO_ERROR -- success
773 * none-zero failure code
774 *==========================================================================*/
775int QCamera3HardwareInterface::openCamera()
776{
777 int rc = 0;
778 char value[PROPERTY_VALUE_MAX];
779
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800780 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700781 if (mCameraHandle) {
782 LOGE("Failure: Camera already opened");
783 return ALREADY_EXISTS;
784 }
785
786 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
787 if (rc < 0) {
788 LOGE("Failed to reserve flash for camera id: %d",
789 mCameraId);
790 return UNKNOWN_ERROR;
791 }
792
793 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
794 if (rc) {
795 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
796 return rc;
797 }
798
799 if (!mCameraHandle) {
800 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
801 return -ENODEV;
802 }
803
804 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
805 camEvtHandle, (void *)this);
806
807 if (rc < 0) {
808 LOGE("Error, failed to register event callback");
809 /* Not closing camera here since it is already handled in destructor */
810 return FAILED_TRANSACTION;
811 }
812
813 mExifParams.debug_params =
814 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
815 if (mExifParams.debug_params) {
816 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
817 } else {
818 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
819 return NO_MEMORY;
820 }
821 mFirstConfiguration = true;
822
823 //Notify display HAL that a camera session is active.
824 //But avoid calling the same during bootup because camera service might open/close
825 //cameras at boot time during its initialization and display service will also internally
826 //wait for camera service to initialize first while calling this display API, resulting in a
827 //deadlock situation. Since boot time camera open/close calls are made only to fetch
828 //capabilities, no need of this display bw optimization.
829 //Use "service.bootanim.exit" property to know boot status.
830 property_get("service.bootanim.exit", value, "0");
831 if (atoi(value) == 1) {
832 pthread_mutex_lock(&gCamLock);
833 if (gNumCameraSessions++ == 0) {
834 setCameraLaunchStatus(true);
835 }
836 pthread_mutex_unlock(&gCamLock);
837 }
838
839 //fill the session id needed while linking dual cam
840 pthread_mutex_lock(&gCamLock);
841 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
842 &sessionId[mCameraId]);
843 pthread_mutex_unlock(&gCamLock);
844
845 if (rc < 0) {
846 LOGE("Error, failed to get sessiion id");
847 return UNKNOWN_ERROR;
848 } else {
849 //Allocate related cam sync buffer
850 //this is needed for the payload that goes along with bundling cmd for related
851 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700852 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
853 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700854 if(rc != OK) {
855 rc = NO_MEMORY;
856 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
857 return NO_MEMORY;
858 }
859
860 //Map memory for related cam sync buffer
861 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700862 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
863 m_pDualCamCmdHeap->getFd(0),
864 sizeof(cam_dual_camera_cmd_info_t),
865 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 if(rc < 0) {
867 LOGE("Dualcam: failed to map Related cam sync buffer");
868 rc = FAILED_TRANSACTION;
869 return NO_MEMORY;
870 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700871 m_pDualCamCmdPtr =
872 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 }
874
875 LOGH("mCameraId=%d",mCameraId);
876
877 return NO_ERROR;
878}
879
880/*===========================================================================
881 * FUNCTION : closeCamera
882 *
883 * DESCRIPTION: close camera
884 *
885 * PARAMETERS : none
886 *
887 * RETURN : int32_t type of status
888 * NO_ERROR -- success
889 * none-zero failure code
890 *==========================================================================*/
891int QCamera3HardwareInterface::closeCamera()
892{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 int rc = NO_ERROR;
895 char value[PROPERTY_VALUE_MAX];
896
897 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
898 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700899
900 // unmap memory for related cam sync buffer
901 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800902 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700903 if (NULL != m_pDualCamCmdHeap) {
904 m_pDualCamCmdHeap->deallocate();
905 delete m_pDualCamCmdHeap;
906 m_pDualCamCmdHeap = NULL;
907 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
911 mCameraHandle = NULL;
912
913 //reset session id to some invalid id
914 pthread_mutex_lock(&gCamLock);
915 sessionId[mCameraId] = 0xDEADBEEF;
916 pthread_mutex_unlock(&gCamLock);
917
918 //Notify display HAL that there is no active camera session
919 //but avoid calling the same during bootup. Refer to openCamera
920 //for more details.
921 property_get("service.bootanim.exit", value, "0");
922 if (atoi(value) == 1) {
923 pthread_mutex_lock(&gCamLock);
924 if (--gNumCameraSessions == 0) {
925 setCameraLaunchStatus(false);
926 }
927 pthread_mutex_unlock(&gCamLock);
928 }
929
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 if (mExifParams.debug_params) {
931 free(mExifParams.debug_params);
932 mExifParams.debug_params = NULL;
933 }
934 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
935 LOGW("Failed to release flash for camera id: %d",
936 mCameraId);
937 }
938 mState = CLOSED;
939 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
940 mCameraId, rc);
941 return rc;
942}
943
944/*===========================================================================
945 * FUNCTION : initialize
946 *
947 * DESCRIPTION: Initialize frameworks callback functions
948 *
949 * PARAMETERS :
950 * @callback_ops : callback function to frameworks
951 *
952 * RETURN :
953 *
954 *==========================================================================*/
955int QCamera3HardwareInterface::initialize(
956 const struct camera3_callback_ops *callback_ops)
957{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800958 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700959 int rc;
960
961 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
962 pthread_mutex_lock(&mMutex);
963
964 // Validate current state
965 switch (mState) {
966 case OPENED:
967 /* valid state */
968 break;
969 default:
970 LOGE("Invalid state %d", mState);
971 rc = -ENODEV;
972 goto err1;
973 }
974
975 rc = initParameters();
976 if (rc < 0) {
977 LOGE("initParamters failed %d", rc);
978 goto err1;
979 }
980 mCallbackOps = callback_ops;
981
982 mChannelHandle = mCameraHandle->ops->add_channel(
983 mCameraHandle->camera_handle, NULL, NULL, this);
984 if (mChannelHandle == 0) {
985 LOGE("add_channel failed");
986 rc = -ENOMEM;
987 pthread_mutex_unlock(&mMutex);
988 return rc;
989 }
990
991 pthread_mutex_unlock(&mMutex);
992 mCameraInitialized = true;
993 mState = INITIALIZED;
994 LOGI("X");
995 return 0;
996
997err1:
998 pthread_mutex_unlock(&mMutex);
999 return rc;
1000}
1001
1002/*===========================================================================
1003 * FUNCTION : validateStreamDimensions
1004 *
1005 * DESCRIPTION: Check if the configuration requested are those advertised
1006 *
1007 * PARAMETERS :
1008 * @stream_list : streams to be configured
1009 *
1010 * RETURN :
1011 *
1012 *==========================================================================*/
1013int QCamera3HardwareInterface::validateStreamDimensions(
1014 camera3_stream_configuration_t *streamList)
1015{
1016 int rc = NO_ERROR;
1017 size_t count = 0;
1018
1019 camera3_stream_t *inputStream = NULL;
1020 /*
1021 * Loop through all streams to find input stream if it exists*
1022 */
1023 for (size_t i = 0; i< streamList->num_streams; i++) {
1024 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1025 if (inputStream != NULL) {
1026 LOGE("Error, Multiple input streams requested");
1027 return -EINVAL;
1028 }
1029 inputStream = streamList->streams[i];
1030 }
1031 }
1032 /*
1033 * Loop through all streams requested in configuration
1034 * Check if unsupported sizes have been requested on any of them
1035 */
1036 for (size_t j = 0; j < streamList->num_streams; j++) {
1037 bool sizeFound = false;
1038 camera3_stream_t *newStream = streamList->streams[j];
1039
1040 uint32_t rotatedHeight = newStream->height;
1041 uint32_t rotatedWidth = newStream->width;
1042 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1043 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1044 rotatedHeight = newStream->width;
1045 rotatedWidth = newStream->height;
1046 }
1047
1048 /*
1049 * Sizes are different for each type of stream format check against
1050 * appropriate table.
1051 */
1052 switch (newStream->format) {
1053 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1054 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1055 case HAL_PIXEL_FORMAT_RAW10:
1056 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1057 for (size_t i = 0; i < count; i++) {
1058 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1059 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1060 sizeFound = true;
1061 break;
1062 }
1063 }
1064 break;
1065 case HAL_PIXEL_FORMAT_BLOB:
1066 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1067 /* Verify set size against generated sizes table */
1068 for (size_t i = 0; i < count; i++) {
1069 if (((int32_t)rotatedWidth ==
1070 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1071 ((int32_t)rotatedHeight ==
1072 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1073 sizeFound = true;
1074 break;
1075 }
1076 }
1077 break;
1078 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1079 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1080 default:
1081 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1082 || newStream->stream_type == CAMERA3_STREAM_INPUT
1083 || IS_USAGE_ZSL(newStream->usage)) {
1084 if (((int32_t)rotatedWidth ==
1085 gCamCapability[mCameraId]->active_array_size.width) &&
1086 ((int32_t)rotatedHeight ==
1087 gCamCapability[mCameraId]->active_array_size.height)) {
1088 sizeFound = true;
1089 break;
1090 }
1091 /* We could potentially break here to enforce ZSL stream
1092 * set from frameworks always is full active array size
1093 * but it is not clear from the spc if framework will always
1094 * follow that, also we have logic to override to full array
1095 * size, so keeping the logic lenient at the moment
1096 */
1097 }
1098 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1099 MAX_SIZES_CNT);
1100 for (size_t i = 0; i < count; i++) {
1101 if (((int32_t)rotatedWidth ==
1102 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1103 ((int32_t)rotatedHeight ==
1104 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1105 sizeFound = true;
1106 break;
1107 }
1108 }
1109 break;
1110 } /* End of switch(newStream->format) */
1111
1112 /* We error out even if a single stream has unsupported size set */
1113 if (!sizeFound) {
1114 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1115 rotatedWidth, rotatedHeight, newStream->format,
1116 gCamCapability[mCameraId]->active_array_size.width,
1117 gCamCapability[mCameraId]->active_array_size.height);
1118 rc = -EINVAL;
1119 break;
1120 }
1121 } /* End of for each stream */
1122 return rc;
1123}
1124
1125/*==============================================================================
1126 * FUNCTION : isSupportChannelNeeded
1127 *
1128 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1129 *
1130 * PARAMETERS :
1131 * @stream_list : streams to be configured
1132 * @stream_config_info : the config info for streams to be configured
1133 *
1134 * RETURN : Boolen true/false decision
1135 *
1136 *==========================================================================*/
1137bool QCamera3HardwareInterface::isSupportChannelNeeded(
1138 camera3_stream_configuration_t *streamList,
1139 cam_stream_size_info_t stream_config_info)
1140{
1141 uint32_t i;
1142 bool pprocRequested = false;
1143 /* Check for conditions where PProc pipeline does not have any streams*/
1144 for (i = 0; i < stream_config_info.num_streams; i++) {
1145 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1146 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1147 pprocRequested = true;
1148 break;
1149 }
1150 }
1151
1152 if (pprocRequested == false )
1153 return true;
1154
1155 /* Dummy stream needed if only raw or jpeg streams present */
1156 for (i = 0; i < streamList->num_streams; i++) {
1157 switch(streamList->streams[i]->format) {
1158 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1159 case HAL_PIXEL_FORMAT_RAW10:
1160 case HAL_PIXEL_FORMAT_RAW16:
1161 case HAL_PIXEL_FORMAT_BLOB:
1162 break;
1163 default:
1164 return false;
1165 }
1166 }
1167 return true;
1168}
1169
1170/*==============================================================================
1171 * FUNCTION : getSensorOutputSize
1172 *
1173 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1174 *
1175 * PARAMETERS :
1176 * @sensor_dim : sensor output dimension (output)
1177 *
1178 * RETURN : int32_t type of status
1179 * NO_ERROR -- success
1180 * none-zero failure code
1181 *
1182 *==========================================================================*/
1183int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1184{
1185 int32_t rc = NO_ERROR;
1186
1187 cam_dimension_t max_dim = {0, 0};
1188 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1189 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1190 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1191 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1192 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1193 }
1194
1195 clear_metadata_buffer(mParameters);
1196
1197 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1198 max_dim);
1199 if (rc != NO_ERROR) {
1200 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1201 return rc;
1202 }
1203
1204 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1205 if (rc != NO_ERROR) {
1206 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1207 return rc;
1208 }
1209
1210 clear_metadata_buffer(mParameters);
1211 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1212
1213 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1214 mParameters);
1215 if (rc != NO_ERROR) {
1216 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1217 return rc;
1218 }
1219
1220 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1221 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1222
1223 return rc;
1224}
1225
1226/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001227 * FUNCTION : addToPPFeatureMask
1228 *
1229 * DESCRIPTION: add additional features to pp feature mask based on
1230 * stream type and usecase
1231 *
1232 * PARAMETERS :
1233 * @stream_format : stream type for feature mask
1234 * @stream_idx : stream idx within postprocess_mask list to change
1235 *
1236 * RETURN : NULL
1237 *
1238 *==========================================================================*/
1239void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1240 uint32_t stream_idx)
1241{
1242 char feature_mask_value[PROPERTY_VALUE_MAX];
1243 cam_feature_mask_t feature_mask;
1244 int args_converted;
1245 int property_len;
1246
1247 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001248#ifdef _LE_CAMERA_
1249 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1250 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1251 property_len = property_get("persist.camera.hal3.feature",
1252 feature_mask_value, swtnr_feature_mask_value);
1253#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001254 property_len = property_get("persist.camera.hal3.feature",
1255 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001256#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001257 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1258 (feature_mask_value[1] == 'x')) {
1259 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1260 } else {
1261 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1262 }
1263 if (1 != args_converted) {
1264 feature_mask = 0;
1265 LOGE("Wrong feature mask %s", feature_mask_value);
1266 return;
1267 }
1268
1269 switch (stream_format) {
1270 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1271 /* Add LLVD to pp feature mask only if video hint is enabled */
1272 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1273 mStreamConfigInfo.postprocess_mask[stream_idx]
1274 |= CAM_QTI_FEATURE_SW_TNR;
1275 LOGH("Added SW TNR to pp feature mask");
1276 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1277 mStreamConfigInfo.postprocess_mask[stream_idx]
1278 |= CAM_QCOM_FEATURE_LLVD;
1279 LOGH("Added LLVD SeeMore to pp feature mask");
1280 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001281 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1282 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1283 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1284 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001285 break;
1286 }
1287 default:
1288 break;
1289 }
1290 LOGD("PP feature mask %llx",
1291 mStreamConfigInfo.postprocess_mask[stream_idx]);
1292}
1293
1294/*==============================================================================
1295 * FUNCTION : updateFpsInPreviewBuffer
1296 *
1297 * DESCRIPTION: update FPS information in preview buffer.
1298 *
1299 * PARAMETERS :
1300 * @metadata : pointer to metadata buffer
1301 * @frame_number: frame_number to look for in pending buffer list
1302 *
1303 * RETURN : None
1304 *
1305 *==========================================================================*/
1306void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1307 uint32_t frame_number)
1308{
1309 // Mark all pending buffers for this particular request
1310 // with corresponding framerate information
1311 for (List<PendingBuffersInRequest>::iterator req =
1312 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1313 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1314 for(List<PendingBufferInfo>::iterator j =
1315 req->mPendingBufferList.begin();
1316 j != req->mPendingBufferList.end(); j++) {
1317 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1318 if ((req->frame_number == frame_number) &&
1319 (channel->getStreamTypeMask() &
1320 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1321 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1322 CAM_INTF_PARM_FPS_RANGE, metadata) {
1323 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1324 struct private_handle_t *priv_handle =
1325 (struct private_handle_t *)(*(j->buffer));
1326 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1327 }
1328 }
1329 }
1330 }
1331}
1332
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001333/*==============================================================================
1334 * FUNCTION : updateTimeStampInPendingBuffers
1335 *
1336 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1337 * of a frame number
1338 *
1339 * PARAMETERS :
1340 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1341 * @timestamp : timestamp to be set
1342 *
1343 * RETURN : None
1344 *
1345 *==========================================================================*/
1346void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1347 uint32_t frameNumber, nsecs_t timestamp)
1348{
1349 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1350 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1351 if (req->frame_number != frameNumber)
1352 continue;
1353
1354 for (auto k = req->mPendingBufferList.begin();
1355 k != req->mPendingBufferList.end(); k++ ) {
1356 struct private_handle_t *priv_handle =
1357 (struct private_handle_t *) (*(k->buffer));
1358 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1359 }
1360 }
1361 return;
1362}
1363
Thierry Strudel3d639192016-09-09 11:52:26 -07001364/*===========================================================================
1365 * FUNCTION : configureStreams
1366 *
1367 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1368 * and output streams.
1369 *
1370 * PARAMETERS :
1371 * @stream_list : streams to be configured
1372 *
1373 * RETURN :
1374 *
1375 *==========================================================================*/
1376int QCamera3HardwareInterface::configureStreams(
1377 camera3_stream_configuration_t *streamList)
1378{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001379 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001380 int rc = 0;
1381
1382 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001383 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001384 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001385 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001386
1387 return rc;
1388}
1389
1390/*===========================================================================
1391 * FUNCTION : configureStreamsPerfLocked
1392 *
1393 * DESCRIPTION: configureStreams while perfLock is held.
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 *
1398 * RETURN : int32_t type of status
1399 * NO_ERROR -- success
1400 * none-zero failure code
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::configureStreamsPerfLocked(
1403 camera3_stream_configuration_t *streamList)
1404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001405 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 int rc = 0;
1407
1408 // Sanity check stream_list
1409 if (streamList == NULL) {
1410 LOGE("NULL stream configuration");
1411 return BAD_VALUE;
1412 }
1413 if (streamList->streams == NULL) {
1414 LOGE("NULL stream list");
1415 return BAD_VALUE;
1416 }
1417
1418 if (streamList->num_streams < 1) {
1419 LOGE("Bad number of streams requested: %d",
1420 streamList->num_streams);
1421 return BAD_VALUE;
1422 }
1423
1424 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1425 LOGE("Maximum number of streams %d exceeded: %d",
1426 MAX_NUM_STREAMS, streamList->num_streams);
1427 return BAD_VALUE;
1428 }
1429
1430 mOpMode = streamList->operation_mode;
1431 LOGD("mOpMode: %d", mOpMode);
1432
1433 /* first invalidate all the steams in the mStreamList
1434 * if they appear again, they will be validated */
1435 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1436 it != mStreamInfo.end(); it++) {
1437 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1438 if (channel) {
1439 channel->stop();
1440 }
1441 (*it)->status = INVALID;
1442 }
1443
1444 if (mRawDumpChannel) {
1445 mRawDumpChannel->stop();
1446 delete mRawDumpChannel;
1447 mRawDumpChannel = NULL;
1448 }
1449
1450 if (mSupportChannel)
1451 mSupportChannel->stop();
1452
1453 if (mAnalysisChannel) {
1454 mAnalysisChannel->stop();
1455 }
1456 if (mMetadataChannel) {
1457 /* If content of mStreamInfo is not 0, there is metadata stream */
1458 mMetadataChannel->stop();
1459 }
1460 if (mChannelHandle) {
1461 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1462 mChannelHandle);
1463 LOGD("stopping channel %d", mChannelHandle);
1464 }
1465
1466 pthread_mutex_lock(&mMutex);
1467
1468 // Check state
1469 switch (mState) {
1470 case INITIALIZED:
1471 case CONFIGURED:
1472 case STARTED:
1473 /* valid state */
1474 break;
1475 default:
1476 LOGE("Invalid state %d", mState);
1477 pthread_mutex_unlock(&mMutex);
1478 return -ENODEV;
1479 }
1480
1481 /* Check whether we have video stream */
1482 m_bIs4KVideo = false;
1483 m_bIsVideo = false;
1484 m_bEisSupportedSize = false;
1485 m_bTnrEnabled = false;
1486 bool isZsl = false;
1487 uint32_t videoWidth = 0U;
1488 uint32_t videoHeight = 0U;
1489 size_t rawStreamCnt = 0;
1490 size_t stallStreamCnt = 0;
1491 size_t processedStreamCnt = 0;
1492 // Number of streams on ISP encoder path
1493 size_t numStreamsOnEncoder = 0;
1494 size_t numYuv888OnEncoder = 0;
1495 bool bYuv888OverrideJpeg = false;
1496 cam_dimension_t largeYuv888Size = {0, 0};
1497 cam_dimension_t maxViewfinderSize = {0, 0};
1498 bool bJpegExceeds4K = false;
1499 bool bJpegOnEncoder = false;
1500 bool bUseCommonFeatureMask = false;
1501 cam_feature_mask_t commonFeatureMask = 0;
1502 bool bSmallJpegSize = false;
1503 uint32_t width_ratio;
1504 uint32_t height_ratio;
1505 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1506 camera3_stream_t *inputStream = NULL;
1507 bool isJpeg = false;
1508 cam_dimension_t jpegSize = {0, 0};
1509
1510 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1511
1512 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001513 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 uint8_t eis_prop_set;
1515 uint32_t maxEisWidth = 0;
1516 uint32_t maxEisHeight = 0;
1517
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001518 // Initialize all instant AEC related variables
1519 mInstantAEC = false;
1520 mResetInstantAEC = false;
1521 mInstantAECSettledFrameNumber = 0;
1522 mAecSkipDisplayFrameBound = 0;
1523 mInstantAecFrameIdxCount = 0;
1524
Thierry Strudel3d639192016-09-09 11:52:26 -07001525 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1526
1527 size_t count = IS_TYPE_MAX;
1528 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1529 for (size_t i = 0; i < count; i++) {
1530 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001531 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1532 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 break;
1534 }
1535 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001536 count = CAM_OPT_STAB_MAX;
1537 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1538 for (size_t i = 0; i < count; i++) {
1539 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1540 oisSupported = true;
1541 break;
1542 }
1543 }
1544
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001545 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001546 maxEisWidth = MAX_EIS_WIDTH;
1547 maxEisHeight = MAX_EIS_HEIGHT;
1548 }
1549
1550 /* EIS setprop control */
1551 char eis_prop[PROPERTY_VALUE_MAX];
1552 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001553 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001554 eis_prop_set = (uint8_t)atoi(eis_prop);
1555
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001556 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001557 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1558
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001559 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1560 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1561
Thierry Strudel3d639192016-09-09 11:52:26 -07001562 /* stream configurations */
1563 for (size_t i = 0; i < streamList->num_streams; i++) {
1564 camera3_stream_t *newStream = streamList->streams[i];
1565 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1566 "height = %d, rotation = %d, usage = 0x%x",
1567 i, newStream->stream_type, newStream->format,
1568 newStream->width, newStream->height, newStream->rotation,
1569 newStream->usage);
1570 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1571 newStream->stream_type == CAMERA3_STREAM_INPUT){
1572 isZsl = true;
1573 }
1574 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1575 inputStream = newStream;
1576 }
1577
1578 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1579 isJpeg = true;
1580 jpegSize.width = newStream->width;
1581 jpegSize.height = newStream->height;
1582 if (newStream->width > VIDEO_4K_WIDTH ||
1583 newStream->height > VIDEO_4K_HEIGHT)
1584 bJpegExceeds4K = true;
1585 }
1586
1587 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1588 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1589 m_bIsVideo = true;
1590 videoWidth = newStream->width;
1591 videoHeight = newStream->height;
1592 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1593 (VIDEO_4K_HEIGHT <= newStream->height)) {
1594 m_bIs4KVideo = true;
1595 }
1596 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1597 (newStream->height <= maxEisHeight);
1598 }
1599 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1600 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1601 switch (newStream->format) {
1602 case HAL_PIXEL_FORMAT_BLOB:
1603 stallStreamCnt++;
1604 if (isOnEncoder(maxViewfinderSize, newStream->width,
1605 newStream->height)) {
1606 numStreamsOnEncoder++;
1607 bJpegOnEncoder = true;
1608 }
1609 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1610 newStream->width);
1611 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1612 newStream->height);;
1613 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1614 "FATAL: max_downscale_factor cannot be zero and so assert");
1615 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1616 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1617 LOGH("Setting small jpeg size flag to true");
1618 bSmallJpegSize = true;
1619 }
1620 break;
1621 case HAL_PIXEL_FORMAT_RAW10:
1622 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1623 case HAL_PIXEL_FORMAT_RAW16:
1624 rawStreamCnt++;
1625 break;
1626 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1627 processedStreamCnt++;
1628 if (isOnEncoder(maxViewfinderSize, newStream->width,
1629 newStream->height)) {
1630 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1631 !IS_USAGE_ZSL(newStream->usage)) {
1632 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1633 }
1634 numStreamsOnEncoder++;
1635 }
1636 break;
1637 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1638 processedStreamCnt++;
1639 if (isOnEncoder(maxViewfinderSize, newStream->width,
1640 newStream->height)) {
1641 // If Yuv888 size is not greater than 4K, set feature mask
1642 // to SUPERSET so that it support concurrent request on
1643 // YUV and JPEG.
1644 if (newStream->width <= VIDEO_4K_WIDTH &&
1645 newStream->height <= VIDEO_4K_HEIGHT) {
1646 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1647 }
1648 numStreamsOnEncoder++;
1649 numYuv888OnEncoder++;
1650 largeYuv888Size.width = newStream->width;
1651 largeYuv888Size.height = newStream->height;
1652 }
1653 break;
1654 default:
1655 processedStreamCnt++;
1656 if (isOnEncoder(maxViewfinderSize, newStream->width,
1657 newStream->height)) {
1658 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1659 numStreamsOnEncoder++;
1660 }
1661 break;
1662 }
1663
1664 }
1665 }
1666
1667 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1668 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1669 !m_bIsVideo) {
1670 m_bEisEnable = false;
1671 }
1672
1673 /* Logic to enable/disable TNR based on specific config size/etc.*/
1674 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1675 ((videoWidth == 1920 && videoHeight == 1080) ||
1676 (videoWidth == 1280 && videoHeight == 720)) &&
1677 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1678 m_bTnrEnabled = true;
1679
1680 /* Check if num_streams is sane */
1681 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1682 rawStreamCnt > MAX_RAW_STREAMS ||
1683 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1684 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1685 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1686 pthread_mutex_unlock(&mMutex);
1687 return -EINVAL;
1688 }
1689 /* Check whether we have zsl stream or 4k video case */
1690 if (isZsl && m_bIsVideo) {
1691 LOGE("Currently invalid configuration ZSL&Video!");
1692 pthread_mutex_unlock(&mMutex);
1693 return -EINVAL;
1694 }
1695 /* Check if stream sizes are sane */
1696 if (numStreamsOnEncoder > 2) {
1697 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1698 pthread_mutex_unlock(&mMutex);
1699 return -EINVAL;
1700 } else if (1 < numStreamsOnEncoder){
1701 bUseCommonFeatureMask = true;
1702 LOGH("Multiple streams above max viewfinder size, common mask needed");
1703 }
1704
1705 /* Check if BLOB size is greater than 4k in 4k recording case */
1706 if (m_bIs4KVideo && bJpegExceeds4K) {
1707 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1708 pthread_mutex_unlock(&mMutex);
1709 return -EINVAL;
1710 }
1711
1712 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1713 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1714 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1715 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1716 // configurations:
1717 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1718 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1719 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1720 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1721 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1722 __func__);
1723 pthread_mutex_unlock(&mMutex);
1724 return -EINVAL;
1725 }
1726
1727 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1728 // the YUV stream's size is greater or equal to the JPEG size, set common
1729 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1730 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1731 jpegSize.width, jpegSize.height) &&
1732 largeYuv888Size.width > jpegSize.width &&
1733 largeYuv888Size.height > jpegSize.height) {
1734 bYuv888OverrideJpeg = true;
1735 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1736 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1737 }
1738
1739 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1740 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1741 commonFeatureMask);
1742 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1743 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1744
1745 rc = validateStreamDimensions(streamList);
1746 if (rc == NO_ERROR) {
1747 rc = validateStreamRotations(streamList);
1748 }
1749 if (rc != NO_ERROR) {
1750 LOGE("Invalid stream configuration requested!");
1751 pthread_mutex_unlock(&mMutex);
1752 return rc;
1753 }
1754
1755 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1756 for (size_t i = 0; i < streamList->num_streams; i++) {
1757 camera3_stream_t *newStream = streamList->streams[i];
1758 LOGH("newStream type = %d, stream format = %d "
1759 "stream size : %d x %d, stream rotation = %d",
1760 newStream->stream_type, newStream->format,
1761 newStream->width, newStream->height, newStream->rotation);
1762 //if the stream is in the mStreamList validate it
1763 bool stream_exists = false;
1764 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1765 it != mStreamInfo.end(); it++) {
1766 if ((*it)->stream == newStream) {
1767 QCamera3ProcessingChannel *channel =
1768 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1769 stream_exists = true;
1770 if (channel)
1771 delete channel;
1772 (*it)->status = VALID;
1773 (*it)->stream->priv = NULL;
1774 (*it)->channel = NULL;
1775 }
1776 }
1777 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1778 //new stream
1779 stream_info_t* stream_info;
1780 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1781 if (!stream_info) {
1782 LOGE("Could not allocate stream info");
1783 rc = -ENOMEM;
1784 pthread_mutex_unlock(&mMutex);
1785 return rc;
1786 }
1787 stream_info->stream = newStream;
1788 stream_info->status = VALID;
1789 stream_info->channel = NULL;
1790 mStreamInfo.push_back(stream_info);
1791 }
1792 /* Covers Opaque ZSL and API1 F/W ZSL */
1793 if (IS_USAGE_ZSL(newStream->usage)
1794 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1795 if (zslStream != NULL) {
1796 LOGE("Multiple input/reprocess streams requested!");
1797 pthread_mutex_unlock(&mMutex);
1798 return BAD_VALUE;
1799 }
1800 zslStream = newStream;
1801 }
1802 /* Covers YUV reprocess */
1803 if (inputStream != NULL) {
1804 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1805 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1806 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1807 && inputStream->width == newStream->width
1808 && inputStream->height == newStream->height) {
1809 if (zslStream != NULL) {
1810 /* This scenario indicates multiple YUV streams with same size
1811 * as input stream have been requested, since zsl stream handle
1812 * is solely use for the purpose of overriding the size of streams
1813 * which share h/w streams we will just make a guess here as to
1814 * which of the stream is a ZSL stream, this will be refactored
1815 * once we make generic logic for streams sharing encoder output
1816 */
1817 LOGH("Warning, Multiple ip/reprocess streams requested!");
1818 }
1819 zslStream = newStream;
1820 }
1821 }
1822 }
1823
1824 /* If a zsl stream is set, we know that we have configured at least one input or
1825 bidirectional stream */
1826 if (NULL != zslStream) {
1827 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1828 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1829 mInputStreamInfo.format = zslStream->format;
1830 mInputStreamInfo.usage = zslStream->usage;
1831 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1832 mInputStreamInfo.dim.width,
1833 mInputStreamInfo.dim.height,
1834 mInputStreamInfo.format, mInputStreamInfo.usage);
1835 }
1836
1837 cleanAndSortStreamInfo();
1838 if (mMetadataChannel) {
1839 delete mMetadataChannel;
1840 mMetadataChannel = NULL;
1841 }
1842 if (mSupportChannel) {
1843 delete mSupportChannel;
1844 mSupportChannel = NULL;
1845 }
1846
1847 if (mAnalysisChannel) {
1848 delete mAnalysisChannel;
1849 mAnalysisChannel = NULL;
1850 }
1851
1852 if (mDummyBatchChannel) {
1853 delete mDummyBatchChannel;
1854 mDummyBatchChannel = NULL;
1855 }
1856
1857 //Create metadata channel and initialize it
1858 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1859 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1860 gCamCapability[mCameraId]->color_arrangement);
1861 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1862 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001863 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001864 if (mMetadataChannel == NULL) {
1865 LOGE("failed to allocate metadata channel");
1866 rc = -ENOMEM;
1867 pthread_mutex_unlock(&mMutex);
1868 return rc;
1869 }
1870 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1871 if (rc < 0) {
1872 LOGE("metadata channel initialization failed");
1873 delete mMetadataChannel;
1874 mMetadataChannel = NULL;
1875 pthread_mutex_unlock(&mMutex);
1876 return rc;
1877 }
1878
1879 // Create analysis stream all the time, even when h/w support is not available
1880 {
1881 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1882 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1883 gCamCapability[mCameraId]->color_arrangement);
1884 cam_analysis_info_t analysisInfo;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001885 int32_t ret = NO_ERROR;
1886 ret = mCommon.getAnalysisInfo(
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 FALSE,
1888 TRUE,
1889 analysisFeatureMask,
1890 &analysisInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001891 if (ret == NO_ERROR) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001892 mAnalysisChannel = new QCamera3SupportChannel(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001893 mCameraHandle->camera_handle,
1894 mChannelHandle,
1895 mCameraHandle->ops,
1896 &analysisInfo.analysis_padding_info,
1897 analysisFeatureMask,
1898 CAM_STREAM_TYPE_ANALYSIS,
1899 &analysisInfo.analysis_max_res,
1900 (analysisInfo.analysis_format
1901 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1902 : CAM_FORMAT_YUV_420_NV21),
1903 analysisInfo.hw_analysis_supported,
1904 gCamCapability[mCameraId]->color_arrangement,
1905 this,
1906 0); // force buffer count to 0
1907 } else {
1908 LOGW("getAnalysisInfo failed, ret = %d", ret);
1909 }
1910 if (!mAnalysisChannel) {
1911 LOGW("Analysis channel cannot be created");
Thierry Strudel3d639192016-09-09 11:52:26 -07001912 }
1913 }
1914
1915 bool isRawStreamRequested = false;
1916 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1917 /* Allocate channel objects for the requested streams */
1918 for (size_t i = 0; i < streamList->num_streams; i++) {
1919 camera3_stream_t *newStream = streamList->streams[i];
1920 uint32_t stream_usage = newStream->usage;
1921 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1922 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1923 struct camera_info *p_info = NULL;
1924 pthread_mutex_lock(&gCamLock);
1925 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1926 pthread_mutex_unlock(&gCamLock);
1927 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1928 || IS_USAGE_ZSL(newStream->usage)) &&
1929 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1930 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1931 if (bUseCommonFeatureMask) {
1932 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1933 commonFeatureMask;
1934 } else {
1935 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1936 CAM_QCOM_FEATURE_NONE;
1937 }
1938
1939 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1940 LOGH("Input stream configured, reprocess config");
1941 } else {
1942 //for non zsl streams find out the format
1943 switch (newStream->format) {
1944 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1945 {
1946 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1947 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1948 /* add additional features to pp feature mask */
1949 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1950 mStreamConfigInfo.num_streams);
1951
1952 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1953 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1954 CAM_STREAM_TYPE_VIDEO;
1955 if (m_bTnrEnabled && m_bTnrVideo) {
1956 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1957 CAM_QCOM_FEATURE_CPP_TNR;
1958 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1959 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1960 ~CAM_QCOM_FEATURE_CDS;
1961 }
1962 } else {
1963 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1964 CAM_STREAM_TYPE_PREVIEW;
1965 if (m_bTnrEnabled && m_bTnrPreview) {
1966 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1967 CAM_QCOM_FEATURE_CPP_TNR;
1968 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1969 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1970 ~CAM_QCOM_FEATURE_CDS;
1971 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001972 if(!m_bSwTnrPreview) {
1973 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1974 ~CAM_QTI_FEATURE_SW_TNR;
1975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001976 padding_info.width_padding = mSurfaceStridePadding;
1977 padding_info.height_padding = CAM_PAD_TO_2;
1978 }
1979 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1980 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1981 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1982 newStream->height;
1983 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1984 newStream->width;
1985 }
1986 }
1987 break;
1988 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1989 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1990 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1991 if (bUseCommonFeatureMask)
1992 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1993 commonFeatureMask;
1994 else
1995 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1996 CAM_QCOM_FEATURE_NONE;
1997 } else {
1998 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1999 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2000 }
2001 break;
2002 case HAL_PIXEL_FORMAT_BLOB:
2003 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2004 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2005 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2006 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2007 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2008 } else {
2009 if (bUseCommonFeatureMask &&
2010 isOnEncoder(maxViewfinderSize, newStream->width,
2011 newStream->height)) {
2012 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2013 } else {
2014 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2015 }
2016 }
2017 if (isZsl) {
2018 if (zslStream) {
2019 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2020 (int32_t)zslStream->width;
2021 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2022 (int32_t)zslStream->height;
2023 } else {
2024 LOGE("Error, No ZSL stream identified");
2025 pthread_mutex_unlock(&mMutex);
2026 return -EINVAL;
2027 }
2028 } else if (m_bIs4KVideo) {
2029 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2030 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2031 } else if (bYuv888OverrideJpeg) {
2032 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2033 (int32_t)largeYuv888Size.width;
2034 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2035 (int32_t)largeYuv888Size.height;
2036 }
2037 break;
2038 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2039 case HAL_PIXEL_FORMAT_RAW16:
2040 case HAL_PIXEL_FORMAT_RAW10:
2041 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2042 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2043 isRawStreamRequested = true;
2044 break;
2045 default:
2046 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2047 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2048 break;
2049 }
2050 }
2051
2052 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2053 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2054 gCamCapability[mCameraId]->color_arrangement);
2055
2056 if (newStream->priv == NULL) {
2057 //New stream, construct channel
2058 switch (newStream->stream_type) {
2059 case CAMERA3_STREAM_INPUT:
2060 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2061 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2062 break;
2063 case CAMERA3_STREAM_BIDIRECTIONAL:
2064 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2065 GRALLOC_USAGE_HW_CAMERA_WRITE;
2066 break;
2067 case CAMERA3_STREAM_OUTPUT:
2068 /* For video encoding stream, set read/write rarely
2069 * flag so that they may be set to un-cached */
2070 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2071 newStream->usage |=
2072 (GRALLOC_USAGE_SW_READ_RARELY |
2073 GRALLOC_USAGE_SW_WRITE_RARELY |
2074 GRALLOC_USAGE_HW_CAMERA_WRITE);
2075 else if (IS_USAGE_ZSL(newStream->usage))
2076 {
2077 LOGD("ZSL usage flag skipping");
2078 }
2079 else if (newStream == zslStream
2080 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2081 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2082 } else
2083 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2084 break;
2085 default:
2086 LOGE("Invalid stream_type %d", newStream->stream_type);
2087 break;
2088 }
2089
2090 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2091 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2092 QCamera3ProcessingChannel *channel = NULL;
2093 switch (newStream->format) {
2094 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2095 if ((newStream->usage &
2096 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2097 (streamList->operation_mode ==
2098 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2099 ) {
2100 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2101 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002102 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 this,
2104 newStream,
2105 (cam_stream_type_t)
2106 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2108 mMetadataChannel,
2109 0); //heap buffers are not required for HFR video channel
2110 if (channel == NULL) {
2111 LOGE("allocation of channel failed");
2112 pthread_mutex_unlock(&mMutex);
2113 return -ENOMEM;
2114 }
2115 //channel->getNumBuffers() will return 0 here so use
2116 //MAX_INFLIGH_HFR_REQUESTS
2117 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2118 newStream->priv = channel;
2119 LOGI("num video buffers in HFR mode: %d",
2120 MAX_INFLIGHT_HFR_REQUESTS);
2121 } else {
2122 /* Copy stream contents in HFR preview only case to create
2123 * dummy batch channel so that sensor streaming is in
2124 * HFR mode */
2125 if (!m_bIsVideo && (streamList->operation_mode ==
2126 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2127 mDummyBatchStream = *newStream;
2128 }
2129 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2130 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002131 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002132 this,
2133 newStream,
2134 (cam_stream_type_t)
2135 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2136 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2137 mMetadataChannel,
2138 MAX_INFLIGHT_REQUESTS);
2139 if (channel == NULL) {
2140 LOGE("allocation of channel failed");
2141 pthread_mutex_unlock(&mMutex);
2142 return -ENOMEM;
2143 }
2144 newStream->max_buffers = channel->getNumBuffers();
2145 newStream->priv = channel;
2146 }
2147 break;
2148 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2149 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2150 mChannelHandle,
2151 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002152 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002153 this,
2154 newStream,
2155 (cam_stream_type_t)
2156 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2157 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2158 mMetadataChannel);
2159 if (channel == NULL) {
2160 LOGE("allocation of YUV channel failed");
2161 pthread_mutex_unlock(&mMutex);
2162 return -ENOMEM;
2163 }
2164 newStream->max_buffers = channel->getNumBuffers();
2165 newStream->priv = channel;
2166 break;
2167 }
2168 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2169 case HAL_PIXEL_FORMAT_RAW16:
2170 case HAL_PIXEL_FORMAT_RAW10:
2171 mRawChannel = new QCamera3RawChannel(
2172 mCameraHandle->camera_handle, mChannelHandle,
2173 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002174 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002175 this, newStream,
2176 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2177 mMetadataChannel,
2178 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2179 if (mRawChannel == NULL) {
2180 LOGE("allocation of raw channel failed");
2181 pthread_mutex_unlock(&mMutex);
2182 return -ENOMEM;
2183 }
2184 newStream->max_buffers = mRawChannel->getNumBuffers();
2185 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2186 break;
2187 case HAL_PIXEL_FORMAT_BLOB:
2188 // Max live snapshot inflight buffer is 1. This is to mitigate
2189 // frame drop issues for video snapshot. The more buffers being
2190 // allocated, the more frame drops there are.
2191 mPictureChannel = new QCamera3PicChannel(
2192 mCameraHandle->camera_handle, mChannelHandle,
2193 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002194 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002195 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2196 m_bIs4KVideo, isZsl, mMetadataChannel,
2197 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2198 if (mPictureChannel == NULL) {
2199 LOGE("allocation of channel failed");
2200 pthread_mutex_unlock(&mMutex);
2201 return -ENOMEM;
2202 }
2203 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2204 newStream->max_buffers = mPictureChannel->getNumBuffers();
2205 mPictureChannel->overrideYuvSize(
2206 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2207 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2208 break;
2209
2210 default:
2211 LOGE("not a supported format 0x%x", newStream->format);
2212 break;
2213 }
2214 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2215 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2216 } else {
2217 LOGE("Error, Unknown stream type");
2218 pthread_mutex_unlock(&mMutex);
2219 return -EINVAL;
2220 }
2221
2222 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2223 if (channel != NULL && channel->isUBWCEnabled()) {
2224 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002225 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2226 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2228 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2229 }
2230 }
2231
2232 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2233 it != mStreamInfo.end(); it++) {
2234 if ((*it)->stream == newStream) {
2235 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2236 break;
2237 }
2238 }
2239 } else {
2240 // Channel already exists for this stream
2241 // Do nothing for now
2242 }
2243 padding_info = gCamCapability[mCameraId]->padding_info;
2244
2245 /* Do not add entries for input stream in metastream info
2246 * since there is no real stream associated with it
2247 */
2248 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2249 mStreamConfigInfo.num_streams++;
2250 }
2251
2252 //RAW DUMP channel
2253 if (mEnableRawDump && isRawStreamRequested == false){
2254 cam_dimension_t rawDumpSize;
2255 rawDumpSize = getMaxRawSize(mCameraId);
2256 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2257 setPAAFSupport(rawDumpFeatureMask,
2258 CAM_STREAM_TYPE_RAW,
2259 gCamCapability[mCameraId]->color_arrangement);
2260 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2261 mChannelHandle,
2262 mCameraHandle->ops,
2263 rawDumpSize,
2264 &padding_info,
2265 this, rawDumpFeatureMask);
2266 if (!mRawDumpChannel) {
2267 LOGE("Raw Dump channel cannot be created");
2268 pthread_mutex_unlock(&mMutex);
2269 return -ENOMEM;
2270 }
2271 }
2272
2273
2274 if (mAnalysisChannel) {
2275 cam_analysis_info_t analysisInfo;
2276 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2277 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2278 CAM_STREAM_TYPE_ANALYSIS;
2279 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2280 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2281 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2282 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2283 gCamCapability[mCameraId]->color_arrangement);
2284 rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2285 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2286 &analysisInfo);
2287 if (rc != NO_ERROR) {
2288 LOGE("getAnalysisInfo failed, ret = %d", rc);
2289 pthread_mutex_unlock(&mMutex);
2290 return rc;
2291 }
2292 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2293 analysisInfo.analysis_max_res;
2294 mStreamConfigInfo.num_streams++;
2295 }
2296
2297 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2298 cam_analysis_info_t supportInfo;
2299 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2300 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2301 setPAAFSupport(callbackFeatureMask,
2302 CAM_STREAM_TYPE_CALLBACK,
2303 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002304 int32_t ret = NO_ERROR;
2305 ret = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2306 if (ret != NO_ERROR) {
2307 /* Ignore the error for Mono camera
2308 * because the PAAF bit mask is only set
2309 * for CAM_STREAM_TYPE_ANALYSIS stream type
2310 */
2311 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2312 LOGW("getAnalysisInfo failed, ret = %d", ret);
2313 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 }
2315 mSupportChannel = new QCamera3SupportChannel(
2316 mCameraHandle->camera_handle,
2317 mChannelHandle,
2318 mCameraHandle->ops,
2319 &gCamCapability[mCameraId]->padding_info,
2320 callbackFeatureMask,
2321 CAM_STREAM_TYPE_CALLBACK,
2322 &QCamera3SupportChannel::kDim,
2323 CAM_FORMAT_YUV_420_NV21,
2324 supportInfo.hw_analysis_supported,
2325 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002326 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002327 if (!mSupportChannel) {
2328 LOGE("dummy channel cannot be created");
2329 pthread_mutex_unlock(&mMutex);
2330 return -ENOMEM;
2331 }
2332 }
2333
2334 if (mSupportChannel) {
2335 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2336 QCamera3SupportChannel::kDim;
2337 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2338 CAM_STREAM_TYPE_CALLBACK;
2339 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2340 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2341 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2342 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2343 gCamCapability[mCameraId]->color_arrangement);
2344 mStreamConfigInfo.num_streams++;
2345 }
2346
2347 if (mRawDumpChannel) {
2348 cam_dimension_t rawSize;
2349 rawSize = getMaxRawSize(mCameraId);
2350 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2351 rawSize;
2352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2353 CAM_STREAM_TYPE_RAW;
2354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2355 CAM_QCOM_FEATURE_NONE;
2356 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2357 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2358 gCamCapability[mCameraId]->color_arrangement);
2359 mStreamConfigInfo.num_streams++;
2360 }
2361 /* In HFR mode, if video stream is not added, create a dummy channel so that
2362 * ISP can create a batch mode even for preview only case. This channel is
2363 * never 'start'ed (no stream-on), it is only 'initialized' */
2364 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2365 !m_bIsVideo) {
2366 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2367 setPAAFSupport(dummyFeatureMask,
2368 CAM_STREAM_TYPE_VIDEO,
2369 gCamCapability[mCameraId]->color_arrangement);
2370 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2371 mChannelHandle,
2372 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002373 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002374 this,
2375 &mDummyBatchStream,
2376 CAM_STREAM_TYPE_VIDEO,
2377 dummyFeatureMask,
2378 mMetadataChannel);
2379 if (NULL == mDummyBatchChannel) {
2380 LOGE("creation of mDummyBatchChannel failed."
2381 "Preview will use non-hfr sensor mode ");
2382 }
2383 }
2384 if (mDummyBatchChannel) {
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2386 mDummyBatchStream.width;
2387 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2388 mDummyBatchStream.height;
2389 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2390 CAM_STREAM_TYPE_VIDEO;
2391 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2392 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2393 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2394 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2395 gCamCapability[mCameraId]->color_arrangement);
2396 mStreamConfigInfo.num_streams++;
2397 }
2398
2399 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2400 mStreamConfigInfo.buffer_info.max_buffers =
2401 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2402
2403 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2404 for (pendingRequestIterator i = mPendingRequestsList.begin();
2405 i != mPendingRequestsList.end();) {
2406 i = erasePendingRequest(i);
2407 }
2408 mPendingFrameDropList.clear();
2409 // Initialize/Reset the pending buffers list
2410 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2411 req.mPendingBufferList.clear();
2412 }
2413 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2414
2415 mPendingReprocessResultList.clear();
2416
2417 mCurJpegMeta.clear();
2418 //Get min frame duration for this streams configuration
2419 deriveMinFrameDuration();
2420
2421 // Update state
2422 mState = CONFIGURED;
2423
2424 pthread_mutex_unlock(&mMutex);
2425
2426 return rc;
2427}
2428
2429/*===========================================================================
2430 * FUNCTION : validateCaptureRequest
2431 *
2432 * DESCRIPTION: validate a capture request from camera service
2433 *
2434 * PARAMETERS :
2435 * @request : request from framework to process
2436 *
2437 * RETURN :
2438 *
2439 *==========================================================================*/
2440int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002441 camera3_capture_request_t *request,
2442 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002443{
2444 ssize_t idx = 0;
2445 const camera3_stream_buffer_t *b;
2446 CameraMetadata meta;
2447
2448 /* Sanity check the request */
2449 if (request == NULL) {
2450 LOGE("NULL capture request");
2451 return BAD_VALUE;
2452 }
2453
2454 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2455 /*settings cannot be null for the first request*/
2456 return BAD_VALUE;
2457 }
2458
2459 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002460 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2461 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002462 LOGE("Request %d: No output buffers provided!",
2463 __FUNCTION__, frameNumber);
2464 return BAD_VALUE;
2465 }
2466 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2467 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2468 request->num_output_buffers, MAX_NUM_STREAMS);
2469 return BAD_VALUE;
2470 }
2471 if (request->input_buffer != NULL) {
2472 b = request->input_buffer;
2473 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2474 LOGE("Request %d: Buffer %ld: Status not OK!",
2475 frameNumber, (long)idx);
2476 return BAD_VALUE;
2477 }
2478 if (b->release_fence != -1) {
2479 LOGE("Request %d: Buffer %ld: Has a release fence!",
2480 frameNumber, (long)idx);
2481 return BAD_VALUE;
2482 }
2483 if (b->buffer == NULL) {
2484 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2485 frameNumber, (long)idx);
2486 return BAD_VALUE;
2487 }
2488 }
2489
2490 // Validate all buffers
2491 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002492 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002493 QCamera3ProcessingChannel *channel =
2494 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2495 if (channel == NULL) {
2496 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2497 frameNumber, (long)idx);
2498 return BAD_VALUE;
2499 }
2500 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2501 LOGE("Request %d: Buffer %ld: Status not OK!",
2502 frameNumber, (long)idx);
2503 return BAD_VALUE;
2504 }
2505 if (b->release_fence != -1) {
2506 LOGE("Request %d: Buffer %ld: Has a release fence!",
2507 frameNumber, (long)idx);
2508 return BAD_VALUE;
2509 }
2510 if (b->buffer == NULL) {
2511 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2512 frameNumber, (long)idx);
2513 return BAD_VALUE;
2514 }
2515 if (*(b->buffer) == NULL) {
2516 LOGE("Request %d: Buffer %ld: NULL private handle!",
2517 frameNumber, (long)idx);
2518 return BAD_VALUE;
2519 }
2520 idx++;
2521 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002522 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002523 return NO_ERROR;
2524}
2525
2526/*===========================================================================
2527 * FUNCTION : deriveMinFrameDuration
2528 *
2529 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2530 * on currently configured streams.
2531 *
2532 * PARAMETERS : NONE
2533 *
2534 * RETURN : NONE
2535 *
2536 *==========================================================================*/
2537void QCamera3HardwareInterface::deriveMinFrameDuration()
2538{
2539 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2540
2541 maxJpegDim = 0;
2542 maxProcessedDim = 0;
2543 maxRawDim = 0;
2544
2545 // Figure out maximum jpeg, processed, and raw dimensions
2546 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2547 it != mStreamInfo.end(); it++) {
2548
2549 // Input stream doesn't have valid stream_type
2550 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2551 continue;
2552
2553 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2554 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2555 if (dimension > maxJpegDim)
2556 maxJpegDim = dimension;
2557 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2558 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2559 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2560 if (dimension > maxRawDim)
2561 maxRawDim = dimension;
2562 } else {
2563 if (dimension > maxProcessedDim)
2564 maxProcessedDim = dimension;
2565 }
2566 }
2567
2568 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2569 MAX_SIZES_CNT);
2570
2571 //Assume all jpeg dimensions are in processed dimensions.
2572 if (maxJpegDim > maxProcessedDim)
2573 maxProcessedDim = maxJpegDim;
2574 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2575 if (maxProcessedDim > maxRawDim) {
2576 maxRawDim = INT32_MAX;
2577
2578 for (size_t i = 0; i < count; i++) {
2579 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2580 gCamCapability[mCameraId]->raw_dim[i].height;
2581 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2582 maxRawDim = dimension;
2583 }
2584 }
2585
2586 //Find minimum durations for processed, jpeg, and raw
2587 for (size_t i = 0; i < count; i++) {
2588 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2589 gCamCapability[mCameraId]->raw_dim[i].height) {
2590 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2591 break;
2592 }
2593 }
2594 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2595 for (size_t i = 0; i < count; i++) {
2596 if (maxProcessedDim ==
2597 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2598 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2599 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2600 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2601 break;
2602 }
2603 }
2604}
2605
2606/*===========================================================================
2607 * FUNCTION : getMinFrameDuration
2608 *
2609 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2610 * and current request configuration.
2611 *
2612 * PARAMETERS : @request: requset sent by the frameworks
2613 *
2614 * RETURN : min farme duration for a particular request
2615 *
2616 *==========================================================================*/
2617int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2618{
2619 bool hasJpegStream = false;
2620 bool hasRawStream = false;
2621 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2622 const camera3_stream_t *stream = request->output_buffers[i].stream;
2623 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2624 hasJpegStream = true;
2625 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2626 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2627 stream->format == HAL_PIXEL_FORMAT_RAW16)
2628 hasRawStream = true;
2629 }
2630
2631 if (!hasJpegStream)
2632 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2633 else
2634 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2635}
2636
2637/*===========================================================================
2638 * FUNCTION : handleBuffersDuringFlushLock
2639 *
2640 * DESCRIPTION: Account for buffers returned from back-end during flush
2641 * This function is executed while mMutex is held by the caller.
2642 *
2643 * PARAMETERS :
2644 * @buffer: image buffer for the callback
2645 *
2646 * RETURN :
2647 *==========================================================================*/
2648void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2649{
2650 bool buffer_found = false;
2651 for (List<PendingBuffersInRequest>::iterator req =
2652 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2653 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2654 for (List<PendingBufferInfo>::iterator i =
2655 req->mPendingBufferList.begin();
2656 i != req->mPendingBufferList.end(); i++) {
2657 if (i->buffer == buffer->buffer) {
2658 mPendingBuffersMap.numPendingBufsAtFlush--;
2659 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2660 buffer->buffer, req->frame_number,
2661 mPendingBuffersMap.numPendingBufsAtFlush);
2662 buffer_found = true;
2663 break;
2664 }
2665 }
2666 if (buffer_found) {
2667 break;
2668 }
2669 }
2670 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2671 //signal the flush()
2672 LOGD("All buffers returned to HAL. Continue flush");
2673 pthread_cond_signal(&mBuffersCond);
2674 }
2675}
2676
2677
2678/*===========================================================================
2679 * FUNCTION : handlePendingReprocResults
2680 *
2681 * DESCRIPTION: check and notify on any pending reprocess results
2682 *
2683 * PARAMETERS :
2684 * @frame_number : Pending request frame number
2685 *
2686 * RETURN : int32_t type of status
2687 * NO_ERROR -- success
2688 * none-zero failure code
2689 *==========================================================================*/
2690int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2691{
2692 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2693 j != mPendingReprocessResultList.end(); j++) {
2694 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002695 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002696
2697 LOGD("Delayed reprocess notify %d",
2698 frame_number);
2699
2700 for (pendingRequestIterator k = mPendingRequestsList.begin();
2701 k != mPendingRequestsList.end(); k++) {
2702
2703 if (k->frame_number == j->frame_number) {
2704 LOGD("Found reprocess frame number %d in pending reprocess List "
2705 "Take it out!!",
2706 k->frame_number);
2707
2708 camera3_capture_result result;
2709 memset(&result, 0, sizeof(camera3_capture_result));
2710 result.frame_number = frame_number;
2711 result.num_output_buffers = 1;
2712 result.output_buffers = &j->buffer;
2713 result.input_buffer = k->input_buffer;
2714 result.result = k->settings;
2715 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002716 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002717
2718 erasePendingRequest(k);
2719 break;
2720 }
2721 }
2722 mPendingReprocessResultList.erase(j);
2723 break;
2724 }
2725 }
2726 return NO_ERROR;
2727}
2728
2729/*===========================================================================
2730 * FUNCTION : handleBatchMetadata
2731 *
2732 * DESCRIPTION: Handles metadata buffer callback in batch mode
2733 *
2734 * PARAMETERS : @metadata_buf: metadata buffer
2735 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2736 * the meta buf in this method
2737 *
2738 * RETURN :
2739 *
2740 *==========================================================================*/
2741void QCamera3HardwareInterface::handleBatchMetadata(
2742 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2743{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002744 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002745
2746 if (NULL == metadata_buf) {
2747 LOGE("metadata_buf is NULL");
2748 return;
2749 }
2750 /* In batch mode, the metdata will contain the frame number and timestamp of
2751 * the last frame in the batch. Eg: a batch containing buffers from request
2752 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2753 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2754 * multiple process_capture_results */
2755 metadata_buffer_t *metadata =
2756 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2757 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2758 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2759 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2760 uint32_t frame_number = 0, urgent_frame_number = 0;
2761 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2762 bool invalid_metadata = false;
2763 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2764 size_t loopCount = 1;
2765
2766 int32_t *p_frame_number_valid =
2767 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2768 uint32_t *p_frame_number =
2769 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2770 int64_t *p_capture_time =
2771 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2772 int32_t *p_urgent_frame_number_valid =
2773 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2774 uint32_t *p_urgent_frame_number =
2775 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2776
2777 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2778 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2779 (NULL == p_urgent_frame_number)) {
2780 LOGE("Invalid metadata");
2781 invalid_metadata = true;
2782 } else {
2783 frame_number_valid = *p_frame_number_valid;
2784 last_frame_number = *p_frame_number;
2785 last_frame_capture_time = *p_capture_time;
2786 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2787 last_urgent_frame_number = *p_urgent_frame_number;
2788 }
2789
2790 /* In batchmode, when no video buffers are requested, set_parms are sent
2791 * for every capture_request. The difference between consecutive urgent
2792 * frame numbers and frame numbers should be used to interpolate the
2793 * corresponding frame numbers and time stamps */
2794 pthread_mutex_lock(&mMutex);
2795 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002796 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2797 if(idx < 0) {
2798 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2799 last_urgent_frame_number);
2800 mState = ERROR;
2801 pthread_mutex_unlock(&mMutex);
2802 return;
2803 }
2804 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002805 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2806 first_urgent_frame_number;
2807
2808 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2809 urgent_frame_number_valid,
2810 first_urgent_frame_number, last_urgent_frame_number);
2811 }
2812
2813 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002814 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2815 if(idx < 0) {
2816 LOGE("Invalid frame number received: %d. Irrecoverable error",
2817 last_frame_number);
2818 mState = ERROR;
2819 pthread_mutex_unlock(&mMutex);
2820 return;
2821 }
2822 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 frameNumDiff = last_frame_number + 1 -
2824 first_frame_number;
2825 mPendingBatchMap.removeItem(last_frame_number);
2826
2827 LOGD("frm: valid: %d frm_num: %d - %d",
2828 frame_number_valid,
2829 first_frame_number, last_frame_number);
2830
2831 }
2832 pthread_mutex_unlock(&mMutex);
2833
2834 if (urgent_frame_number_valid || frame_number_valid) {
2835 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2836 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2837 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2838 urgentFrameNumDiff, last_urgent_frame_number);
2839 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2840 LOGE("frameNumDiff: %d frameNum: %d",
2841 frameNumDiff, last_frame_number);
2842 }
2843
2844 for (size_t i = 0; i < loopCount; i++) {
2845 /* handleMetadataWithLock is called even for invalid_metadata for
2846 * pipeline depth calculation */
2847 if (!invalid_metadata) {
2848 /* Infer frame number. Batch metadata contains frame number of the
2849 * last frame */
2850 if (urgent_frame_number_valid) {
2851 if (i < urgentFrameNumDiff) {
2852 urgent_frame_number =
2853 first_urgent_frame_number + i;
2854 LOGD("inferred urgent frame_number: %d",
2855 urgent_frame_number);
2856 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2857 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2858 } else {
2859 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2860 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2861 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2862 }
2863 }
2864
2865 /* Infer frame number. Batch metadata contains frame number of the
2866 * last frame */
2867 if (frame_number_valid) {
2868 if (i < frameNumDiff) {
2869 frame_number = first_frame_number + i;
2870 LOGD("inferred frame_number: %d", frame_number);
2871 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2872 CAM_INTF_META_FRAME_NUMBER, frame_number);
2873 } else {
2874 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2875 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2876 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2877 }
2878 }
2879
2880 if (last_frame_capture_time) {
2881 //Infer timestamp
2882 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002883 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002884 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002885 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002886 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2887 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2888 LOGD("batch capture_time: %lld, capture_time: %lld",
2889 last_frame_capture_time, capture_time);
2890 }
2891 }
2892 pthread_mutex_lock(&mMutex);
2893 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002894 false /* free_and_bufdone_meta_buf */,
2895 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 pthread_mutex_unlock(&mMutex);
2897 }
2898
2899 /* BufDone metadata buffer */
2900 if (free_and_bufdone_meta_buf) {
2901 mMetadataChannel->bufDone(metadata_buf);
2902 free(metadata_buf);
2903 }
2904}
2905
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002906void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2907 camera3_error_msg_code_t errorCode)
2908{
2909 camera3_notify_msg_t notify_msg;
2910 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2911 notify_msg.type = CAMERA3_MSG_ERROR;
2912 notify_msg.message.error.error_code = errorCode;
2913 notify_msg.message.error.error_stream = NULL;
2914 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002915 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002916
2917 return;
2918}
Thierry Strudel3d639192016-09-09 11:52:26 -07002919/*===========================================================================
2920 * FUNCTION : handleMetadataWithLock
2921 *
2922 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2923 *
2924 * PARAMETERS : @metadata_buf: metadata buffer
2925 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2926 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002927 * @firstMetadataInBatch: Boolean to indicate whether this is the
2928 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 *
2930 * RETURN :
2931 *
2932 *==========================================================================*/
2933void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002934 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2935 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002936{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002937 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002938 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2939 //during flush do not send metadata from this thread
2940 LOGD("not sending metadata during flush or when mState is error");
2941 if (free_and_bufdone_meta_buf) {
2942 mMetadataChannel->bufDone(metadata_buf);
2943 free(metadata_buf);
2944 }
2945 return;
2946 }
2947
2948 //not in flush
2949 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2950 int32_t frame_number_valid, urgent_frame_number_valid;
2951 uint32_t frame_number, urgent_frame_number;
2952 int64_t capture_time;
2953 nsecs_t currentSysTime;
2954
2955 int32_t *p_frame_number_valid =
2956 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2957 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2958 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2959 int32_t *p_urgent_frame_number_valid =
2960 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2961 uint32_t *p_urgent_frame_number =
2962 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2963 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2964 metadata) {
2965 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2966 *p_frame_number_valid, *p_frame_number);
2967 }
2968
2969 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2970 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2971 LOGE("Invalid metadata");
2972 if (free_and_bufdone_meta_buf) {
2973 mMetadataChannel->bufDone(metadata_buf);
2974 free(metadata_buf);
2975 }
2976 goto done_metadata;
2977 }
2978 frame_number_valid = *p_frame_number_valid;
2979 frame_number = *p_frame_number;
2980 capture_time = *p_capture_time;
2981 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2982 urgent_frame_number = *p_urgent_frame_number;
2983 currentSysTime = systemTime(CLOCK_MONOTONIC);
2984
2985 // Detect if buffers from any requests are overdue
2986 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2987 if ( (currentSysTime - req.timestamp) >
2988 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2989 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002990 assert(missed.stream->priv);
2991 if (missed.stream->priv) {
2992 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2993 assert(ch->mStreams[0]);
2994 if (ch->mStreams[0]) {
2995 LOGE("Cancel missing frame = %d, buffer = %p,"
2996 "stream type = %d, stream format = %d",
2997 req.frame_number, missed.buffer,
2998 ch->mStreams[0]->getMyType(), missed.stream->format);
2999 ch->timeoutFrame(req.frame_number);
3000 }
3001 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003002 }
3003 }
3004 }
3005 //Partial result on process_capture_result for timestamp
3006 if (urgent_frame_number_valid) {
3007 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3008 urgent_frame_number, capture_time);
3009
3010 //Recieved an urgent Frame Number, handle it
3011 //using partial results
3012 for (pendingRequestIterator i =
3013 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3014 LOGD("Iterator Frame = %d urgent frame = %d",
3015 i->frame_number, urgent_frame_number);
3016
3017 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3018 (i->partial_result_cnt == 0)) {
3019 LOGE("Error: HAL missed urgent metadata for frame number %d",
3020 i->frame_number);
3021 }
3022
3023 if (i->frame_number == urgent_frame_number &&
3024 i->bUrgentReceived == 0) {
3025
3026 camera3_capture_result_t result;
3027 memset(&result, 0, sizeof(camera3_capture_result_t));
3028
3029 i->partial_result_cnt++;
3030 i->bUrgentReceived = 1;
3031 // Extract 3A metadata
3032 result.result =
3033 translateCbUrgentMetadataToResultMetadata(metadata);
3034 // Populate metadata result
3035 result.frame_number = urgent_frame_number;
3036 result.num_output_buffers = 0;
3037 result.output_buffers = NULL;
3038 result.partial_result = i->partial_result_cnt;
3039
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003040 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003041 LOGD("urgent frame_number = %u, capture_time = %lld",
3042 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003043 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3044 // Instant AEC settled for this frame.
3045 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3046 mInstantAECSettledFrameNumber = urgent_frame_number;
3047 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 free_camera_metadata((camera_metadata_t *)result.result);
3049 break;
3050 }
3051 }
3052 }
3053
3054 if (!frame_number_valid) {
3055 LOGD("Not a valid normal frame number, used as SOF only");
3056 if (free_and_bufdone_meta_buf) {
3057 mMetadataChannel->bufDone(metadata_buf);
3058 free(metadata_buf);
3059 }
3060 goto done_metadata;
3061 }
3062 LOGH("valid frame_number = %u, capture_time = %lld",
3063 frame_number, capture_time);
3064
3065 for (pendingRequestIterator i = mPendingRequestsList.begin();
3066 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3067 // Flush out all entries with less or equal frame numbers.
3068
3069 camera3_capture_result_t result;
3070 memset(&result, 0, sizeof(camera3_capture_result_t));
3071
3072 LOGD("frame_number in the list is %u", i->frame_number);
3073 i->partial_result_cnt++;
3074 result.partial_result = i->partial_result_cnt;
3075
3076 // Check whether any stream buffer corresponding to this is dropped or not
3077 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003078 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3079 if (p_cam_frame_drop ||
3080 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003081 /* Clear notify_msg structure */
3082 camera3_notify_msg_t notify_msg;
3083 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3084 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3085 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003086 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003087 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3088 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003089 if (p_cam_frame_drop) {
3090 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003091 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003092 // Got the stream ID for drop frame.
3093 dropFrame = true;
3094 break;
3095 }
3096 }
3097 } else {
3098 // This is instant AEC case.
3099 // For instant AEC drop the stream untill AEC is settled.
3100 dropFrame = true;
3101 }
3102 if (dropFrame) {
3103 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3104 if (p_cam_frame_drop) {
3105 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003106 LOGE("Start of reporting error frame#=%u, streamID=%u",
3107 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003108 } else {
3109 // For instant AEC, inform frame drop and frame number
3110 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3111 "AEC settled frame number = %u",
3112 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3113 }
3114 notify_msg.type = CAMERA3_MSG_ERROR;
3115 notify_msg.message.error.frame_number = i->frame_number;
3116 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3117 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003118 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003119 if (p_cam_frame_drop) {
3120 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003121 LOGE("End of reporting error frame#=%u, streamID=%u",
3122 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003123 } else {
3124 // For instant AEC, inform frame drop and frame number
3125 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3126 "AEC settled frame number = %u",
3127 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3128 }
3129 PendingFrameDropInfo PendingFrameDrop;
3130 PendingFrameDrop.frame_number=i->frame_number;
3131 PendingFrameDrop.stream_ID = streamID;
3132 // Add the Frame drop info to mPendingFrameDropList
3133 mPendingFrameDropList.push_back(PendingFrameDrop);
3134 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003135 }
3136 }
3137
3138 // Send empty metadata with already filled buffers for dropped metadata
3139 // and send valid metadata with already filled buffers for current metadata
3140 /* we could hit this case when we either
3141 * 1. have a pending reprocess request or
3142 * 2. miss a metadata buffer callback */
3143 if (i->frame_number < frame_number) {
3144 if (i->input_buffer) {
3145 /* this will be handled in handleInputBufferWithLock */
3146 i++;
3147 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003148 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003149
3150 mPendingLiveRequest--;
3151
3152 CameraMetadata dummyMetadata;
3153 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3154 result.result = dummyMetadata.release();
3155
3156 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003157 }
3158 } else {
3159 mPendingLiveRequest--;
3160 /* Clear notify_msg structure */
3161 camera3_notify_msg_t notify_msg;
3162 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3163
3164 // Send shutter notify to frameworks
3165 notify_msg.type = CAMERA3_MSG_SHUTTER;
3166 notify_msg.message.shutter.frame_number = i->frame_number;
3167 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003168 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003169
3170 i->timestamp = capture_time;
3171
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003172 /* Set the timestamp in display metadata so that clients aware of
3173 private_handle such as VT can use this un-modified timestamps.
3174 Camera framework is unaware of this timestamp and cannot change this */
3175 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3176
Thierry Strudel3d639192016-09-09 11:52:26 -07003177 // Find channel requiring metadata, meaning internal offline postprocess
3178 // is needed.
3179 //TODO: for now, we don't support two streams requiring metadata at the same time.
3180 // (because we are not making copies, and metadata buffer is not reference counted.
3181 bool internalPproc = false;
3182 for (pendingBufferIterator iter = i->buffers.begin();
3183 iter != i->buffers.end(); iter++) {
3184 if (iter->need_metadata) {
3185 internalPproc = true;
3186 QCamera3ProcessingChannel *channel =
3187 (QCamera3ProcessingChannel *)iter->stream->priv;
3188 channel->queueReprocMetadata(metadata_buf);
3189 break;
3190 }
3191 }
3192
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003193 for (auto itr = i->internalRequestList.begin();
3194 itr != i->internalRequestList.end(); itr++) {
3195 if (itr->need_metadata) {
3196 internalPproc = true;
3197 QCamera3ProcessingChannel *channel =
3198 (QCamera3ProcessingChannel *)itr->stream->priv;
3199 channel->queueReprocMetadata(metadata_buf);
3200 break;
3201 }
3202 }
3203
3204
Thierry Strudel3d639192016-09-09 11:52:26 -07003205 result.result = translateFromHalMetadata(metadata,
3206 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003207 i->capture_intent,
3208 /* DevCamDebug metadata translateFromHalMetadata function call*/
3209 i->DevCamDebug_meta_enable,
3210 /* DevCamDebug metadata end */
3211 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003212 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003213
3214 saveExifParams(metadata);
3215
3216 if (i->blob_request) {
3217 {
3218 //Dump tuning metadata if enabled and available
3219 char prop[PROPERTY_VALUE_MAX];
3220 memset(prop, 0, sizeof(prop));
3221 property_get("persist.camera.dumpmetadata", prop, "0");
3222 int32_t enabled = atoi(prop);
3223 if (enabled && metadata->is_tuning_params_valid) {
3224 dumpMetadataToFile(metadata->tuning_params,
3225 mMetaFrameCount,
3226 enabled,
3227 "Snapshot",
3228 frame_number);
3229 }
3230 }
3231 }
3232
3233 if (!internalPproc) {
3234 LOGD("couldn't find need_metadata for this metadata");
3235 // Return metadata buffer
3236 if (free_and_bufdone_meta_buf) {
3237 mMetadataChannel->bufDone(metadata_buf);
3238 free(metadata_buf);
3239 }
3240 }
3241 }
3242 if (!result.result) {
3243 LOGE("metadata is NULL");
3244 }
3245 result.frame_number = i->frame_number;
3246 result.input_buffer = i->input_buffer;
3247 result.num_output_buffers = 0;
3248 result.output_buffers = NULL;
3249 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3250 j != i->buffers.end(); j++) {
3251 if (j->buffer) {
3252 result.num_output_buffers++;
3253 }
3254 }
3255
3256 updateFpsInPreviewBuffer(metadata, i->frame_number);
3257
3258 if (result.num_output_buffers > 0) {
3259 camera3_stream_buffer_t *result_buffers =
3260 new camera3_stream_buffer_t[result.num_output_buffers];
3261 if (result_buffers != NULL) {
3262 size_t result_buffers_idx = 0;
3263 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3264 j != i->buffers.end(); j++) {
3265 if (j->buffer) {
3266 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3267 m != mPendingFrameDropList.end(); m++) {
3268 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3269 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3270 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3271 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3272 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3273 frame_number, streamID);
3274 m = mPendingFrameDropList.erase(m);
3275 break;
3276 }
3277 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003278 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003279 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3280 result_buffers[result_buffers_idx++] = *(j->buffer);
3281 free(j->buffer);
3282 j->buffer = NULL;
3283 }
3284 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003285
Thierry Strudel3d639192016-09-09 11:52:26 -07003286 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003287 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003288 LOGD("meta frame_number = %u, capture_time = %lld",
3289 result.frame_number, i->timestamp);
3290 free_camera_metadata((camera_metadata_t *)result.result);
3291 delete[] result_buffers;
3292 }else {
3293 LOGE("Fatal error: out of memory");
3294 }
3295 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003296 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003297 LOGD("meta frame_number = %u, capture_time = %lld",
3298 result.frame_number, i->timestamp);
3299 free_camera_metadata((camera_metadata_t *)result.result);
3300 }
3301
3302 i = erasePendingRequest(i);
3303
3304 if (!mPendingReprocessResultList.empty()) {
3305 handlePendingReprocResults(frame_number + 1);
3306 }
3307 }
3308
3309done_metadata:
3310 for (pendingRequestIterator i = mPendingRequestsList.begin();
3311 i != mPendingRequestsList.end() ;i++) {
3312 i->pipeline_depth++;
3313 }
3314 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3315 unblockRequestIfNecessary();
3316}
3317
3318/*===========================================================================
3319 * FUNCTION : hdrPlusPerfLock
3320 *
3321 * DESCRIPTION: perf lock for HDR+ using custom intent
3322 *
3323 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3324 *
3325 * RETURN : None
3326 *
3327 *==========================================================================*/
3328void QCamera3HardwareInterface::hdrPlusPerfLock(
3329 mm_camera_super_buf_t *metadata_buf)
3330{
3331 if (NULL == metadata_buf) {
3332 LOGE("metadata_buf is NULL");
3333 return;
3334 }
3335 metadata_buffer_t *metadata =
3336 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3337 int32_t *p_frame_number_valid =
3338 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3339 uint32_t *p_frame_number =
3340 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3341
3342 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3343 LOGE("%s: Invalid metadata", __func__);
3344 return;
3345 }
3346
3347 //acquire perf lock for 5 sec after the last HDR frame is captured
3348 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3349 if ((p_frame_number != NULL) &&
3350 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003351 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 }
3353 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003354}
3355
3356/*===========================================================================
3357 * FUNCTION : handleInputBufferWithLock
3358 *
3359 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3360 *
3361 * PARAMETERS : @frame_number: frame number of the input buffer
3362 *
3363 * RETURN :
3364 *
3365 *==========================================================================*/
3366void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3367{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003368 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003369 pendingRequestIterator i = mPendingRequestsList.begin();
3370 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3371 i++;
3372 }
3373 if (i != mPendingRequestsList.end() && i->input_buffer) {
3374 //found the right request
3375 if (!i->shutter_notified) {
3376 CameraMetadata settings;
3377 camera3_notify_msg_t notify_msg;
3378 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3379 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3380 if(i->settings) {
3381 settings = i->settings;
3382 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3383 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3384 } else {
3385 LOGE("No timestamp in input settings! Using current one.");
3386 }
3387 } else {
3388 LOGE("Input settings missing!");
3389 }
3390
3391 notify_msg.type = CAMERA3_MSG_SHUTTER;
3392 notify_msg.message.shutter.frame_number = frame_number;
3393 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003394 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003395 i->shutter_notified = true;
3396 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3397 i->frame_number, notify_msg.message.shutter.timestamp);
3398 }
3399
3400 if (i->input_buffer->release_fence != -1) {
3401 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3402 close(i->input_buffer->release_fence);
3403 if (rc != OK) {
3404 LOGE("input buffer sync wait failed %d", rc);
3405 }
3406 }
3407
3408 camera3_capture_result result;
3409 memset(&result, 0, sizeof(camera3_capture_result));
3410 result.frame_number = frame_number;
3411 result.result = i->settings;
3412 result.input_buffer = i->input_buffer;
3413 result.partial_result = PARTIAL_RESULT_COUNT;
3414
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003415 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 LOGD("Input request metadata and input buffer frame_number = %u",
3417 i->frame_number);
3418 i = erasePendingRequest(i);
3419 } else {
3420 LOGE("Could not find input request for frame number %d", frame_number);
3421 }
3422}
3423
3424/*===========================================================================
3425 * FUNCTION : handleBufferWithLock
3426 *
3427 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3428 *
3429 * PARAMETERS : @buffer: image buffer for the callback
3430 * @frame_number: frame number of the image buffer
3431 *
3432 * RETURN :
3433 *
3434 *==========================================================================*/
3435void QCamera3HardwareInterface::handleBufferWithLock(
3436 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3437{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003438 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003439
3440 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3441 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3442 }
3443
Thierry Strudel3d639192016-09-09 11:52:26 -07003444 /* Nothing to be done during error state */
3445 if ((ERROR == mState) || (DEINIT == mState)) {
3446 return;
3447 }
3448 if (mFlushPerf) {
3449 handleBuffersDuringFlushLock(buffer);
3450 return;
3451 }
3452 //not in flush
3453 // If the frame number doesn't exist in the pending request list,
3454 // directly send the buffer to the frameworks, and update pending buffers map
3455 // Otherwise, book-keep the buffer.
3456 pendingRequestIterator i = mPendingRequestsList.begin();
3457 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3458 i++;
3459 }
3460 if (i == mPendingRequestsList.end()) {
3461 // Verify all pending requests frame_numbers are greater
3462 for (pendingRequestIterator j = mPendingRequestsList.begin();
3463 j != mPendingRequestsList.end(); j++) {
3464 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3465 LOGW("Error: pending live frame number %d is smaller than %d",
3466 j->frame_number, frame_number);
3467 }
3468 }
3469 camera3_capture_result_t result;
3470 memset(&result, 0, sizeof(camera3_capture_result_t));
3471 result.result = NULL;
3472 result.frame_number = frame_number;
3473 result.num_output_buffers = 1;
3474 result.partial_result = 0;
3475 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3476 m != mPendingFrameDropList.end(); m++) {
3477 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3478 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3479 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3480 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3481 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3482 frame_number, streamID);
3483 m = mPendingFrameDropList.erase(m);
3484 break;
3485 }
3486 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003487 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003488 result.output_buffers = buffer;
3489 LOGH("result frame_number = %d, buffer = %p",
3490 frame_number, buffer->buffer);
3491
3492 mPendingBuffersMap.removeBuf(buffer->buffer);
3493
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 } else {
3496 if (i->input_buffer) {
3497 CameraMetadata settings;
3498 camera3_notify_msg_t notify_msg;
3499 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3500 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3501 if(i->settings) {
3502 settings = i->settings;
3503 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3504 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3505 } else {
3506 LOGW("No timestamp in input settings! Using current one.");
3507 }
3508 } else {
3509 LOGE("Input settings missing!");
3510 }
3511
3512 notify_msg.type = CAMERA3_MSG_SHUTTER;
3513 notify_msg.message.shutter.frame_number = frame_number;
3514 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3515
3516 if (i->input_buffer->release_fence != -1) {
3517 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3518 close(i->input_buffer->release_fence);
3519 if (rc != OK) {
3520 LOGE("input buffer sync wait failed %d", rc);
3521 }
3522 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003523 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003524 mPendingBuffersMap.removeBuf(buffer->buffer);
3525
Thierry Strudel04e026f2016-10-10 11:27:36 -07003526 camera3_capture_result result;
3527 memset(&result, 0, sizeof(camera3_capture_result));
3528 result.frame_number = frame_number;
3529 result.result = i->settings;
3530 result.input_buffer = i->input_buffer;
3531 result.num_output_buffers = 1;
3532 result.output_buffers = buffer;
3533 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003534
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003535 orchestrateNotify(&notify_msg);
3536 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003537 LOGD("Notify reprocess now %d!", frame_number);
3538 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003539 } else {
3540 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3541 j != i->buffers.end(); j++) {
3542 if (j->stream == buffer->stream) {
3543 if (j->buffer != NULL) {
3544 LOGE("Error: buffer is already set");
3545 } else {
3546 j->buffer = (camera3_stream_buffer_t *)malloc(
3547 sizeof(camera3_stream_buffer_t));
3548 *(j->buffer) = *buffer;
3549 LOGH("cache buffer %p at result frame_number %u",
3550 buffer->buffer, frame_number);
3551 }
3552 }
3553 }
3554 }
3555 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003556
3557 if (mPreviewStarted == false) {
3558 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3559 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3560 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3561 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3562 mPreviewStarted = true;
3563
3564 // Set power hint for preview
3565 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3566 }
3567 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003568}
3569
3570/*===========================================================================
3571 * FUNCTION : unblockRequestIfNecessary
3572 *
3573 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3574 * that mMutex is held when this function is called.
3575 *
3576 * PARAMETERS :
3577 *
3578 * RETURN :
3579 *
3580 *==========================================================================*/
3581void QCamera3HardwareInterface::unblockRequestIfNecessary()
3582{
3583 // Unblock process_capture_request
3584 pthread_cond_signal(&mRequestCond);
3585}
3586
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003587/*===========================================================================
3588 * FUNCTION : isHdrSnapshotRequest
3589 *
3590 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3591 *
3592 * PARAMETERS : camera3 request structure
3593 *
3594 * RETURN : boolean decision variable
3595 *
3596 *==========================================================================*/
3597bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3598{
3599 if (request == NULL) {
3600 LOGE("Invalid request handle");
3601 assert(0);
3602 return false;
3603 }
3604
3605 if (!mForceHdrSnapshot) {
3606 CameraMetadata frame_settings;
3607 frame_settings = request->settings;
3608
3609 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3610 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3611 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3612 return false;
3613 }
3614 } else {
3615 return false;
3616 }
3617
3618 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3619 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3620 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3621 return false;
3622 }
3623 } else {
3624 return false;
3625 }
3626 }
3627
3628 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3629 if (request->output_buffers[i].stream->format
3630 == HAL_PIXEL_FORMAT_BLOB) {
3631 return true;
3632 }
3633 }
3634
3635 return false;
3636}
3637/*===========================================================================
3638 * FUNCTION : orchestrateRequest
3639 *
3640 * DESCRIPTION: Orchestrates a capture request from camera service
3641 *
3642 * PARAMETERS :
3643 * @request : request from framework to process
3644 *
3645 * RETURN : Error status codes
3646 *
3647 *==========================================================================*/
3648int32_t QCamera3HardwareInterface::orchestrateRequest(
3649 camera3_capture_request_t *request)
3650{
3651
3652 uint32_t originalFrameNumber = request->frame_number;
3653 uint32_t originalOutputCount = request->num_output_buffers;
3654 const camera_metadata_t *original_settings = request->settings;
3655 List<InternalRequest> internallyRequestedStreams;
3656 List<InternalRequest> emptyInternalList;
3657
3658 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3659 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3660 uint32_t internalFrameNumber;
3661 CameraMetadata modified_meta;
3662
3663
3664 /* Add Blob channel to list of internally requested streams */
3665 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3666 if (request->output_buffers[i].stream->format
3667 == HAL_PIXEL_FORMAT_BLOB) {
3668 InternalRequest streamRequested;
3669 streamRequested.meteringOnly = 1;
3670 streamRequested.need_metadata = 0;
3671 streamRequested.stream = request->output_buffers[i].stream;
3672 internallyRequestedStreams.push_back(streamRequested);
3673 }
3674 }
3675 request->num_output_buffers = 0;
3676 auto itr = internallyRequestedStreams.begin();
3677
3678 /* Modify setting to set compensation */
3679 modified_meta = request->settings;
3680 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3681 uint8_t aeLock = 1;
3682 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3683 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3684 camera_metadata_t *modified_settings = modified_meta.release();
3685 request->settings = modified_settings;
3686
3687 /* Capture Settling & -2x frame */
3688 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3689 request->frame_number = internalFrameNumber;
3690 processCaptureRequest(request, internallyRequestedStreams);
3691
3692 request->num_output_buffers = originalOutputCount;
3693 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3694 request->frame_number = internalFrameNumber;
3695 processCaptureRequest(request, emptyInternalList);
3696 request->num_output_buffers = 0;
3697
3698 modified_meta = modified_settings;
3699 expCompensation = 0;
3700 aeLock = 1;
3701 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3702 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3703 modified_settings = modified_meta.release();
3704 request->settings = modified_settings;
3705
3706 /* Capture Settling & 0X frame */
3707
3708 itr = internallyRequestedStreams.begin();
3709 if (itr == internallyRequestedStreams.end()) {
3710 LOGE("Error Internally Requested Stream list is empty");
3711 assert(0);
3712 } else {
3713 itr->need_metadata = 0;
3714 itr->meteringOnly = 1;
3715 }
3716
3717 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3718 request->frame_number = internalFrameNumber;
3719 processCaptureRequest(request, internallyRequestedStreams);
3720
3721 itr = internallyRequestedStreams.begin();
3722 if (itr == internallyRequestedStreams.end()) {
3723 ALOGE("Error Internally Requested Stream list is empty");
3724 assert(0);
3725 } else {
3726 itr->need_metadata = 1;
3727 itr->meteringOnly = 0;
3728 }
3729
3730 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3731 request->frame_number = internalFrameNumber;
3732 processCaptureRequest(request, internallyRequestedStreams);
3733
3734 /* Capture 2X frame*/
3735 modified_meta = modified_settings;
3736 expCompensation = GB_HDR_2X_STEP_EV;
3737 aeLock = 1;
3738 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3739 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3740 modified_settings = modified_meta.release();
3741 request->settings = modified_settings;
3742
3743 itr = internallyRequestedStreams.begin();
3744 if (itr == internallyRequestedStreams.end()) {
3745 ALOGE("Error Internally Requested Stream list is empty");
3746 assert(0);
3747 } else {
3748 itr->need_metadata = 0;
3749 itr->meteringOnly = 1;
3750 }
3751 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3752 request->frame_number = internalFrameNumber;
3753 processCaptureRequest(request, internallyRequestedStreams);
3754
3755 itr = internallyRequestedStreams.begin();
3756 if (itr == internallyRequestedStreams.end()) {
3757 ALOGE("Error Internally Requested Stream list is empty");
3758 assert(0);
3759 } else {
3760 itr->need_metadata = 1;
3761 itr->meteringOnly = 0;
3762 }
3763
3764 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3765 request->frame_number = internalFrameNumber;
3766 processCaptureRequest(request, internallyRequestedStreams);
3767
3768
3769 /* Capture 2X on original streaming config*/
3770 internallyRequestedStreams.clear();
3771
3772 /* Restore original settings pointer */
3773 request->settings = original_settings;
3774 } else {
3775 uint32_t internalFrameNumber;
3776 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3777 request->frame_number = internalFrameNumber;
3778 return processCaptureRequest(request, internallyRequestedStreams);
3779 }
3780
3781 return NO_ERROR;
3782}
3783
3784/*===========================================================================
3785 * FUNCTION : orchestrateResult
3786 *
3787 * DESCRIPTION: Orchestrates a capture result to camera service
3788 *
3789 * PARAMETERS :
3790 * @request : request from framework to process
3791 *
3792 * RETURN :
3793 *
3794 *==========================================================================*/
3795void QCamera3HardwareInterface::orchestrateResult(
3796 camera3_capture_result_t *result)
3797{
3798 uint32_t frameworkFrameNumber;
3799 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3800 frameworkFrameNumber);
3801 if (rc != NO_ERROR) {
3802 LOGE("Cannot find translated frameworkFrameNumber");
3803 assert(0);
3804 } else {
3805 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3806 LOGD("CAM_DEBUG Internal Request drop the result");
3807 } else {
3808 result->frame_number = frameworkFrameNumber;
3809 mCallbackOps->process_capture_result(mCallbackOps, result);
3810 }
3811 }
3812}
3813
3814/*===========================================================================
3815 * FUNCTION : orchestrateNotify
3816 *
3817 * DESCRIPTION: Orchestrates a notify to camera service
3818 *
3819 * PARAMETERS :
3820 * @request : request from framework to process
3821 *
3822 * RETURN :
3823 *
3824 *==========================================================================*/
3825void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3826{
3827 uint32_t frameworkFrameNumber;
3828 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3829 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3830 frameworkFrameNumber);
3831 if (rc != NO_ERROR) {
3832 LOGE("Cannot find translated frameworkFrameNumber");
3833 assert(0);
3834 } else {
3835 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3836 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3837 } else {
3838 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3839 mCallbackOps->notify(mCallbackOps, notify_msg);
3840 }
3841 }
3842}
3843
3844/*===========================================================================
3845 * FUNCTION : FrameNumberRegistry
3846 *
3847 * DESCRIPTION: Constructor
3848 *
3849 * PARAMETERS :
3850 *
3851 * RETURN :
3852 *
3853 *==========================================================================*/
3854FrameNumberRegistry::FrameNumberRegistry()
3855{
3856 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3857}
3858
3859/*===========================================================================
3860 * FUNCTION : ~FrameNumberRegistry
3861 *
3862 * DESCRIPTION: Destructor
3863 *
3864 * PARAMETERS :
3865 *
3866 * RETURN :
3867 *
3868 *==========================================================================*/
3869FrameNumberRegistry::~FrameNumberRegistry()
3870{
3871}
3872
3873/*===========================================================================
3874 * FUNCTION : PurgeOldEntriesLocked
3875 *
3876 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3877 *
3878 * PARAMETERS :
3879 *
3880 * RETURN : NONE
3881 *
3882 *==========================================================================*/
3883void FrameNumberRegistry::purgeOldEntriesLocked()
3884{
3885 while (_register.begin() != _register.end()) {
3886 auto itr = _register.begin();
3887 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3888 _register.erase(itr);
3889 } else {
3890 return;
3891 }
3892 }
3893}
3894
3895/*===========================================================================
3896 * FUNCTION : allocStoreInternalFrameNumber
3897 *
3898 * DESCRIPTION: Method to note down a framework request and associate a new
3899 * internal request number against it
3900 *
3901 * PARAMETERS :
3902 * @fFrameNumber: Identifier given by framework
3903 * @internalFN : Output parameter which will have the newly generated internal
3904 * entry
3905 *
3906 * RETURN : Error code
3907 *
3908 *==========================================================================*/
3909int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3910 uint32_t &internalFrameNumber)
3911{
3912 Mutex::Autolock lock(mRegistryLock);
3913 internalFrameNumber = _nextFreeInternalNumber++;
3914 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3915 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3916 purgeOldEntriesLocked();
3917 return NO_ERROR;
3918}
3919
3920/*===========================================================================
3921 * FUNCTION : generateStoreInternalFrameNumber
3922 *
3923 * DESCRIPTION: Method to associate a new internal request number independent
3924 * of any associate with framework requests
3925 *
3926 * PARAMETERS :
3927 * @internalFrame#: Output parameter which will have the newly generated internal
3928 *
3929 *
3930 * RETURN : Error code
3931 *
3932 *==========================================================================*/
3933int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3934{
3935 Mutex::Autolock lock(mRegistryLock);
3936 internalFrameNumber = _nextFreeInternalNumber++;
3937 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3938 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3939 purgeOldEntriesLocked();
3940 return NO_ERROR;
3941}
3942
3943/*===========================================================================
3944 * FUNCTION : getFrameworkFrameNumber
3945 *
3946 * DESCRIPTION: Method to query the framework framenumber given an internal #
3947 *
3948 * PARAMETERS :
3949 * @internalFrame#: Internal reference
3950 * @frameworkframenumber: Output parameter holding framework frame entry
3951 *
3952 * RETURN : Error code
3953 *
3954 *==========================================================================*/
3955int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3956 uint32_t &frameworkFrameNumber)
3957{
3958 Mutex::Autolock lock(mRegistryLock);
3959 auto itr = _register.find(internalFrameNumber);
3960 if (itr == _register.end()) {
3961 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3962 return -ENOENT;
3963 }
3964
3965 frameworkFrameNumber = itr->second;
3966 purgeOldEntriesLocked();
3967 return NO_ERROR;
3968}
Thierry Strudel3d639192016-09-09 11:52:26 -07003969
3970/*===========================================================================
3971 * FUNCTION : processCaptureRequest
3972 *
3973 * DESCRIPTION: process a capture request from camera service
3974 *
3975 * PARAMETERS :
3976 * @request : request from framework to process
3977 *
3978 * RETURN :
3979 *
3980 *==========================================================================*/
3981int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003982 camera3_capture_request_t *request,
3983 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003985 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003986 int rc = NO_ERROR;
3987 int32_t request_id;
3988 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003989 bool isVidBufRequested = false;
3990 camera3_stream_buffer_t *pInputBuffer = NULL;
3991
3992 pthread_mutex_lock(&mMutex);
3993
3994 // Validate current state
3995 switch (mState) {
3996 case CONFIGURED:
3997 case STARTED:
3998 /* valid state */
3999 break;
4000
4001 case ERROR:
4002 pthread_mutex_unlock(&mMutex);
4003 handleCameraDeviceError();
4004 return -ENODEV;
4005
4006 default:
4007 LOGE("Invalid state %d", mState);
4008 pthread_mutex_unlock(&mMutex);
4009 return -ENODEV;
4010 }
4011
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004012 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004013 if (rc != NO_ERROR) {
4014 LOGE("incoming request is not valid");
4015 pthread_mutex_unlock(&mMutex);
4016 return rc;
4017 }
4018
4019 meta = request->settings;
4020
4021 // For first capture request, send capture intent, and
4022 // stream on all streams
4023 if (mState == CONFIGURED) {
4024 // send an unconfigure to the backend so that the isp
4025 // resources are deallocated
4026 if (!mFirstConfiguration) {
4027 cam_stream_size_info_t stream_config_info;
4028 int32_t hal_version = CAM_HAL_V3;
4029 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4030 stream_config_info.buffer_info.min_buffers =
4031 MIN_INFLIGHT_REQUESTS;
4032 stream_config_info.buffer_info.max_buffers =
4033 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4034 clear_metadata_buffer(mParameters);
4035 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4036 CAM_INTF_PARM_HAL_VERSION, hal_version);
4037 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4038 CAM_INTF_META_STREAM_INFO, stream_config_info);
4039 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4040 mParameters);
4041 if (rc < 0) {
4042 LOGE("set_parms for unconfigure failed");
4043 pthread_mutex_unlock(&mMutex);
4044 return rc;
4045 }
4046 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004047 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004049 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004050 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004051 property_get("persist.camera.is_type", is_type_value, "4");
4052 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4053 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4054 property_get("persist.camera.is_type_preview", is_type_value, "4");
4055 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4056 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004057
4058 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4059 int32_t hal_version = CAM_HAL_V3;
4060 uint8_t captureIntent =
4061 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4062 mCaptureIntent = captureIntent;
4063 clear_metadata_buffer(mParameters);
4064 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4065 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4066 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004067 if (mFirstConfiguration) {
4068 // configure instant AEC
4069 // Instant AEC is a session based parameter and it is needed only
4070 // once per complete session after open camera.
4071 // i.e. This is set only once for the first capture request, after open camera.
4072 setInstantAEC(meta);
4073 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004074 uint8_t fwkVideoStabMode=0;
4075 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4076 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4077 }
4078
4079 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4080 // turn it on for video/preview
4081 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4082 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 int32_t vsMode;
4084 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4086 rc = BAD_VALUE;
4087 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004088 LOGD("setEis %d", setEis);
4089 bool eis3Supported = false;
4090 size_t count = IS_TYPE_MAX;
4091 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4092 for (size_t i = 0; i < count; i++) {
4093 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4094 eis3Supported = true;
4095 break;
4096 }
4097 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004098
4099 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004100 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004101 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4102 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004103 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4104 is_type = isTypePreview;
4105 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4106 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4107 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004109 } else {
4110 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004112 } else {
4113 is_type = IS_TYPE_NONE;
4114 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004115 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004116 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004117 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4118 }
4119 }
4120
4121 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4122 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4123
4124 int32_t tintless_value = 1;
4125 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4126 CAM_INTF_PARM_TINTLESS, tintless_value);
4127 //Disable CDS for HFR mode or if DIS/EIS is on.
4128 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4129 //after every configure_stream
4130 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4131 (m_bIsVideo)) {
4132 int32_t cds = CAM_CDS_MODE_OFF;
4133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4134 CAM_INTF_PARM_CDS_MODE, cds))
4135 LOGE("Failed to disable CDS for HFR mode");
4136
4137 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004138
4139 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4140 uint8_t* use_av_timer = NULL;
4141
4142 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004143 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004144 use_av_timer = &m_debug_avtimer;
4145 }
4146 else{
4147 use_av_timer =
4148 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004149 if (use_av_timer) {
4150 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4151 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004152 }
4153
4154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4155 rc = BAD_VALUE;
4156 }
4157 }
4158
Thierry Strudel3d639192016-09-09 11:52:26 -07004159 setMobicat();
4160
4161 /* Set fps and hfr mode while sending meta stream info so that sensor
4162 * can configure appropriate streaming mode */
4163 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004164 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4165 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004166 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4167 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004168 if (rc == NO_ERROR) {
4169 int32_t max_fps =
4170 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4171 if (max_fps == 60) {
4172 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4173 }
4174 /* For HFR, more buffers are dequeued upfront to improve the performance */
4175 if (mBatchSize) {
4176 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4177 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4178 }
4179 }
4180 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004181 LOGE("setHalFpsRange failed");
4182 }
4183 }
4184 if (meta.exists(ANDROID_CONTROL_MODE)) {
4185 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4186 rc = extractSceneMode(meta, metaMode, mParameters);
4187 if (rc != NO_ERROR) {
4188 LOGE("extractSceneMode failed");
4189 }
4190 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004191 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004192
Thierry Strudel04e026f2016-10-10 11:27:36 -07004193 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4194 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4195 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4196 rc = setVideoHdrMode(mParameters, vhdr);
4197 if (rc != NO_ERROR) {
4198 LOGE("setVideoHDR is failed");
4199 }
4200 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004201
Thierry Strudel3d639192016-09-09 11:52:26 -07004202 //TODO: validate the arguments, HSV scenemode should have only the
4203 //advertised fps ranges
4204
4205 /*set the capture intent, hal version, tintless, stream info,
4206 *and disenable parameters to the backend*/
4207 LOGD("set_parms META_STREAM_INFO " );
4208 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4209 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004210 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004211 mStreamConfigInfo.type[i],
4212 mStreamConfigInfo.stream_sizes[i].width,
4213 mStreamConfigInfo.stream_sizes[i].height,
4214 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004215 mStreamConfigInfo.format[i],
4216 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004217 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004218
Thierry Strudel3d639192016-09-09 11:52:26 -07004219 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4220 mParameters);
4221 if (rc < 0) {
4222 LOGE("set_parms failed for hal version, stream info");
4223 }
4224
4225 cam_dimension_t sensor_dim;
4226 memset(&sensor_dim, 0, sizeof(sensor_dim));
4227 rc = getSensorOutputSize(sensor_dim);
4228 if (rc != NO_ERROR) {
4229 LOGE("Failed to get sensor output size");
4230 pthread_mutex_unlock(&mMutex);
4231 goto error_exit;
4232 }
4233
4234 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4235 gCamCapability[mCameraId]->active_array_size.height,
4236 sensor_dim.width, sensor_dim.height);
4237
4238 /* Set batchmode before initializing channel. Since registerBuffer
4239 * internally initializes some of the channels, better set batchmode
4240 * even before first register buffer */
4241 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4242 it != mStreamInfo.end(); it++) {
4243 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4244 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4245 && mBatchSize) {
4246 rc = channel->setBatchSize(mBatchSize);
4247 //Disable per frame map unmap for HFR/batchmode case
4248 rc |= channel->setPerFrameMapUnmap(false);
4249 if (NO_ERROR != rc) {
4250 LOGE("Channel init failed %d", rc);
4251 pthread_mutex_unlock(&mMutex);
4252 goto error_exit;
4253 }
4254 }
4255 }
4256
4257 //First initialize all streams
4258 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4259 it != mStreamInfo.end(); it++) {
4260 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4261 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4262 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004263 setEis) {
4264 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4265 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4266 is_type = mStreamConfigInfo.is_type[i];
4267 break;
4268 }
4269 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004270 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004271 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004272 rc = channel->initialize(IS_TYPE_NONE);
4273 }
4274 if (NO_ERROR != rc) {
4275 LOGE("Channel initialization failed %d", rc);
4276 pthread_mutex_unlock(&mMutex);
4277 goto error_exit;
4278 }
4279 }
4280
4281 if (mRawDumpChannel) {
4282 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4283 if (rc != NO_ERROR) {
4284 LOGE("Error: Raw Dump Channel init failed");
4285 pthread_mutex_unlock(&mMutex);
4286 goto error_exit;
4287 }
4288 }
4289 if (mSupportChannel) {
4290 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4291 if (rc < 0) {
4292 LOGE("Support channel initialization failed");
4293 pthread_mutex_unlock(&mMutex);
4294 goto error_exit;
4295 }
4296 }
4297 if (mAnalysisChannel) {
4298 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4299 if (rc < 0) {
4300 LOGE("Analysis channel initialization failed");
4301 pthread_mutex_unlock(&mMutex);
4302 goto error_exit;
4303 }
4304 }
4305 if (mDummyBatchChannel) {
4306 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4307 if (rc < 0) {
4308 LOGE("mDummyBatchChannel setBatchSize failed");
4309 pthread_mutex_unlock(&mMutex);
4310 goto error_exit;
4311 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004312 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004313 if (rc < 0) {
4314 LOGE("mDummyBatchChannel initialization failed");
4315 pthread_mutex_unlock(&mMutex);
4316 goto error_exit;
4317 }
4318 }
4319
4320 // Set bundle info
4321 rc = setBundleInfo();
4322 if (rc < 0) {
4323 LOGE("setBundleInfo failed %d", rc);
4324 pthread_mutex_unlock(&mMutex);
4325 goto error_exit;
4326 }
4327
4328 //update settings from app here
4329 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4330 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4331 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4332 }
4333 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4334 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4335 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4336 }
4337 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4338 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4339 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4340
4341 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4342 (mLinkedCameraId != mCameraId) ) {
4343 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4344 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004345 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004346 goto error_exit;
4347 }
4348 }
4349
4350 // add bundle related cameras
4351 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4352 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004353 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4354 &m_pDualCamCmdPtr->bundle_info;
4355 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004356 if (mIsDeviceLinked)
4357 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4358 else
4359 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4360
4361 pthread_mutex_lock(&gCamLock);
4362
4363 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4364 LOGE("Dualcam: Invalid Session Id ");
4365 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004366 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004367 goto error_exit;
4368 }
4369
4370 if (mIsMainCamera == 1) {
4371 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4372 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004373 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004374 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004375 // related session id should be session id of linked session
4376 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4377 } else {
4378 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4379 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004380 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004381 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004382 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4383 }
4384 pthread_mutex_unlock(&gCamLock);
4385
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004386 rc = mCameraHandle->ops->set_dual_cam_cmd(
4387 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004388 if (rc < 0) {
4389 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004390 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004391 goto error_exit;
4392 }
4393 }
4394
4395 //Then start them.
4396 LOGH("Start META Channel");
4397 rc = mMetadataChannel->start();
4398 if (rc < 0) {
4399 LOGE("META channel start failed");
4400 pthread_mutex_unlock(&mMutex);
4401 goto error_exit;
4402 }
4403
4404 if (mAnalysisChannel) {
4405 rc = mAnalysisChannel->start();
4406 if (rc < 0) {
4407 LOGE("Analysis channel start failed");
4408 mMetadataChannel->stop();
4409 pthread_mutex_unlock(&mMutex);
4410 goto error_exit;
4411 }
4412 }
4413
4414 if (mSupportChannel) {
4415 rc = mSupportChannel->start();
4416 if (rc < 0) {
4417 LOGE("Support channel start failed");
4418 mMetadataChannel->stop();
4419 /* Although support and analysis are mutually exclusive today
4420 adding it in anycase for future proofing */
4421 if (mAnalysisChannel) {
4422 mAnalysisChannel->stop();
4423 }
4424 pthread_mutex_unlock(&mMutex);
4425 goto error_exit;
4426 }
4427 }
4428 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4429 it != mStreamInfo.end(); it++) {
4430 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4431 LOGH("Start Processing Channel mask=%d",
4432 channel->getStreamTypeMask());
4433 rc = channel->start();
4434 if (rc < 0) {
4435 LOGE("channel start failed");
4436 pthread_mutex_unlock(&mMutex);
4437 goto error_exit;
4438 }
4439 }
4440
4441 if (mRawDumpChannel) {
4442 LOGD("Starting raw dump stream");
4443 rc = mRawDumpChannel->start();
4444 if (rc != NO_ERROR) {
4445 LOGE("Error Starting Raw Dump Channel");
4446 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4447 it != mStreamInfo.end(); it++) {
4448 QCamera3Channel *channel =
4449 (QCamera3Channel *)(*it)->stream->priv;
4450 LOGH("Stopping Processing Channel mask=%d",
4451 channel->getStreamTypeMask());
4452 channel->stop();
4453 }
4454 if (mSupportChannel)
4455 mSupportChannel->stop();
4456 if (mAnalysisChannel) {
4457 mAnalysisChannel->stop();
4458 }
4459 mMetadataChannel->stop();
4460 pthread_mutex_unlock(&mMutex);
4461 goto error_exit;
4462 }
4463 }
4464
4465 if (mChannelHandle) {
4466
4467 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4468 mChannelHandle);
4469 if (rc != NO_ERROR) {
4470 LOGE("start_channel failed %d", rc);
4471 pthread_mutex_unlock(&mMutex);
4472 goto error_exit;
4473 }
4474 }
4475
4476 goto no_error;
4477error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004478 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004479 return rc;
4480no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004481 mWokenUpByDaemon = false;
4482 mPendingLiveRequest = 0;
4483 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004484 }
4485
4486 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004487 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004488
4489 if (mFlushPerf) {
4490 //we cannot accept any requests during flush
4491 LOGE("process_capture_request cannot proceed during flush");
4492 pthread_mutex_unlock(&mMutex);
4493 return NO_ERROR; //should return an error
4494 }
4495
4496 if (meta.exists(ANDROID_REQUEST_ID)) {
4497 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4498 mCurrentRequestId = request_id;
4499 LOGD("Received request with id: %d", request_id);
4500 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4501 LOGE("Unable to find request id field, \
4502 & no previous id available");
4503 pthread_mutex_unlock(&mMutex);
4504 return NAME_NOT_FOUND;
4505 } else {
4506 LOGD("Re-using old request id");
4507 request_id = mCurrentRequestId;
4508 }
4509
4510 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4511 request->num_output_buffers,
4512 request->input_buffer,
4513 frameNumber);
4514 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004515 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004516 int blob_request = 0;
4517 uint32_t snapshotStreamId = 0;
4518 for (size_t i = 0; i < request->num_output_buffers; i++) {
4519 const camera3_stream_buffer_t& output = request->output_buffers[i];
4520 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4521
4522 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004523 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004524 blob_request = 1;
4525 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4526 }
4527
4528 if (output.acquire_fence != -1) {
4529 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4530 close(output.acquire_fence);
4531 if (rc != OK) {
4532 LOGE("sync wait failed %d", rc);
4533 pthread_mutex_unlock(&mMutex);
4534 return rc;
4535 }
4536 }
4537
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004538 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004539 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004540
4541 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4542 isVidBufRequested = true;
4543 }
4544 }
4545
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004546 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4547 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4548 itr++) {
4549 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4550 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4551 channel->getStreamID(channel->getStreamTypeMask());
4552
4553 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4554 isVidBufRequested = true;
4555 }
4556 }
4557
Thierry Strudel3d639192016-09-09 11:52:26 -07004558 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004559 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004560 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004561 }
4562 if (blob_request && mRawDumpChannel) {
4563 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004564 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004565 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004566 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004567 }
4568
4569 if(request->input_buffer == NULL) {
4570 /* Parse the settings:
4571 * - For every request in NORMAL MODE
4572 * - For every request in HFR mode during preview only case
4573 * - For first request of every batch in HFR mode during video
4574 * recording. In batchmode the same settings except frame number is
4575 * repeated in each request of the batch.
4576 */
4577 if (!mBatchSize ||
4578 (mBatchSize && !isVidBufRequested) ||
4579 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004580 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004581 if (rc < 0) {
4582 LOGE("fail to set frame parameters");
4583 pthread_mutex_unlock(&mMutex);
4584 return rc;
4585 }
4586 }
4587 /* For batchMode HFR, setFrameParameters is not called for every
4588 * request. But only frame number of the latest request is parsed.
4589 * Keep track of first and last frame numbers in a batch so that
4590 * metadata for the frame numbers of batch can be duplicated in
4591 * handleBatchMetadta */
4592 if (mBatchSize) {
4593 if (!mToBeQueuedVidBufs) {
4594 //start of the batch
4595 mFirstFrameNumberInBatch = request->frame_number;
4596 }
4597 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4598 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4599 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004600 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004601 return BAD_VALUE;
4602 }
4603 }
4604 if (mNeedSensorRestart) {
4605 /* Unlock the mutex as restartSensor waits on the channels to be
4606 * stopped, which in turn calls stream callback functions -
4607 * handleBufferWithLock and handleMetadataWithLock */
4608 pthread_mutex_unlock(&mMutex);
4609 rc = dynamicUpdateMetaStreamInfo();
4610 if (rc != NO_ERROR) {
4611 LOGE("Restarting the sensor failed");
4612 return BAD_VALUE;
4613 }
4614 mNeedSensorRestart = false;
4615 pthread_mutex_lock(&mMutex);
4616 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004617 if(mResetInstantAEC) {
4618 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4619 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4620 mResetInstantAEC = false;
4621 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004622 } else {
4623
4624 if (request->input_buffer->acquire_fence != -1) {
4625 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4626 close(request->input_buffer->acquire_fence);
4627 if (rc != OK) {
4628 LOGE("input buffer sync wait failed %d", rc);
4629 pthread_mutex_unlock(&mMutex);
4630 return rc;
4631 }
4632 }
4633 }
4634
4635 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4636 mLastCustIntentFrmNum = frameNumber;
4637 }
4638 /* Update pending request list and pending buffers map */
4639 PendingRequestInfo pendingRequest;
4640 pendingRequestIterator latestRequest;
4641 pendingRequest.frame_number = frameNumber;
4642 pendingRequest.num_buffers = request->num_output_buffers;
4643 pendingRequest.request_id = request_id;
4644 pendingRequest.blob_request = blob_request;
4645 pendingRequest.timestamp = 0;
4646 pendingRequest.bUrgentReceived = 0;
4647 if (request->input_buffer) {
4648 pendingRequest.input_buffer =
4649 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4650 *(pendingRequest.input_buffer) = *(request->input_buffer);
4651 pInputBuffer = pendingRequest.input_buffer;
4652 } else {
4653 pendingRequest.input_buffer = NULL;
4654 pInputBuffer = NULL;
4655 }
4656
4657 pendingRequest.pipeline_depth = 0;
4658 pendingRequest.partial_result_cnt = 0;
4659 extractJpegMetadata(mCurJpegMeta, request);
4660 pendingRequest.jpegMetadata = mCurJpegMeta;
4661 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4662 pendingRequest.shutter_notified = false;
4663
4664 //extract capture intent
4665 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4666 mCaptureIntent =
4667 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4668 }
4669 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004670 /* DevCamDebug metadata processCaptureRequest */
4671 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4672 mDevCamDebugMetaEnable =
4673 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4674 }
4675 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4676 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004677
4678 //extract CAC info
4679 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4680 mCacMode =
4681 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4682 }
4683 pendingRequest.fwkCacMode = mCacMode;
4684
4685 PendingBuffersInRequest bufsForCurRequest;
4686 bufsForCurRequest.frame_number = frameNumber;
4687 // Mark current timestamp for the new request
4688 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4689
4690 for (size_t i = 0; i < request->num_output_buffers; i++) {
4691 RequestedBufferInfo requestedBuf;
4692 memset(&requestedBuf, 0, sizeof(requestedBuf));
4693 requestedBuf.stream = request->output_buffers[i].stream;
4694 requestedBuf.buffer = NULL;
4695 pendingRequest.buffers.push_back(requestedBuf);
4696
4697 // Add to buffer handle the pending buffers list
4698 PendingBufferInfo bufferInfo;
4699 bufferInfo.buffer = request->output_buffers[i].buffer;
4700 bufferInfo.stream = request->output_buffers[i].stream;
4701 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4702 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4703 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4704 frameNumber, bufferInfo.buffer,
4705 channel->getStreamTypeMask(), bufferInfo.stream->format);
4706 }
4707 // Add this request packet into mPendingBuffersMap
4708 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4709 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4710 mPendingBuffersMap.get_num_overall_buffers());
4711
4712 latestRequest = mPendingRequestsList.insert(
4713 mPendingRequestsList.end(), pendingRequest);
4714 if(mFlush) {
4715 LOGI("mFlush is true");
4716 pthread_mutex_unlock(&mMutex);
4717 return NO_ERROR;
4718 }
4719
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004720 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004721 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004722 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004723
4724 if(request->input_buffer != NULL){
4725 LOGD("Input request, frame_number %d", frameNumber);
4726 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4727 if (NO_ERROR != rc) {
4728 LOGE("fail to set reproc parameters");
4729 pthread_mutex_unlock(&mMutex);
4730 return rc;
4731 }
4732 }
4733
4734 // Call request on other streams
4735 uint32_t streams_need_metadata = 0;
4736 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4737 for (size_t i = 0; i < request->num_output_buffers; i++) {
4738 const camera3_stream_buffer_t& output = request->output_buffers[i];
4739 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4740
4741 if (channel == NULL) {
4742 LOGW("invalid channel pointer for stream");
4743 continue;
4744 }
4745
4746 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4747 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4748 output.buffer, request->input_buffer, frameNumber);
4749 if(request->input_buffer != NULL){
4750 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004751 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004752 if (rc < 0) {
4753 LOGE("Fail to request on picture channel");
4754 pthread_mutex_unlock(&mMutex);
4755 return rc;
4756 }
4757 } else {
4758 LOGD("snapshot request with buffer %p, frame_number %d",
4759 output.buffer, frameNumber);
4760 if (!request->settings) {
4761 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004762 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 } else {
4764 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004765 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 }
4767 if (rc < 0) {
4768 LOGE("Fail to request on picture channel");
4769 pthread_mutex_unlock(&mMutex);
4770 return rc;
4771 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004772
4773 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4774 uint32_t j = 0;
4775 for (j = 0; j < streamsArray.num_streams; j++) {
4776 if (streamsArray.stream_request[j].streamID == streamId) {
4777 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4778 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4779 else
4780 streamsArray.stream_request[j].buf_index = indexUsed;
4781 break;
4782 }
4783 }
4784 if (j == streamsArray.num_streams) {
4785 LOGE("Did not find matching stream to update index");
4786 assert(0);
4787 }
4788
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 pendingBufferIter->need_metadata = true;
4790 streams_need_metadata++;
4791 }
4792 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4793 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004794 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4795 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004796 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4797 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 if (rc < 0) {
4799 LOGE("Fail to request on YUV channel");
4800 pthread_mutex_unlock(&mMutex);
4801 return rc;
4802 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004803
4804 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4805 uint32_t j = 0;
4806 for (j = 0; j < streamsArray.num_streams; j++) {
4807 if (streamsArray.stream_request[j].streamID == streamId) {
4808 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4809 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4810 else
4811 streamsArray.stream_request[j].buf_index = indexUsed;
4812 break;
4813 }
4814 }
4815 if (j == streamsArray.num_streams) {
4816 LOGE("Did not find matching stream to update index");
4817 assert(0);
4818 }
4819
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 pendingBufferIter->need_metadata = needMetadata;
4821 if (needMetadata)
4822 streams_need_metadata += 1;
4823 LOGD("calling YUV channel request, need_metadata is %d",
4824 needMetadata);
4825 } else {
4826 LOGD("request with buffer %p, frame_number %d",
4827 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004828
4829 rc = channel->request(output.buffer, frameNumber, indexUsed);
4830
4831 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4832 uint32_t j = 0;
4833 for (j = 0; j < streamsArray.num_streams; j++) {
4834 if (streamsArray.stream_request[j].streamID == streamId) {
4835 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4836 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4837 else
4838 streamsArray.stream_request[j].buf_index = indexUsed;
4839 break;
4840 }
4841 }
4842 if (j == streamsArray.num_streams) {
4843 LOGE("Did not find matching stream to update index");
4844 assert(0);
4845 }
4846
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4848 && mBatchSize) {
4849 mToBeQueuedVidBufs++;
4850 if (mToBeQueuedVidBufs == mBatchSize) {
4851 channel->queueBatchBuf();
4852 }
4853 }
4854 if (rc < 0) {
4855 LOGE("request failed");
4856 pthread_mutex_unlock(&mMutex);
4857 return rc;
4858 }
4859 }
4860 pendingBufferIter++;
4861 }
4862
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004863 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4864 itr++) {
4865 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4866
4867 if (channel == NULL) {
4868 LOGE("invalid channel pointer for stream");
4869 assert(0);
4870 return BAD_VALUE;
4871 }
4872
4873 InternalRequest requestedStream;
4874 requestedStream = (*itr);
4875
4876
4877 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4878 LOGD("snapshot request internally input buffer %p, frame_number %d",
4879 request->input_buffer, frameNumber);
4880 if(request->input_buffer != NULL){
4881 rc = channel->request(NULL, frameNumber,
4882 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4883 if (rc < 0) {
4884 LOGE("Fail to request on picture channel");
4885 pthread_mutex_unlock(&mMutex);
4886 return rc;
4887 }
4888 } else {
4889 LOGD("snapshot request with frame_number %d", frameNumber);
4890 if (!request->settings) {
4891 rc = channel->request(NULL, frameNumber,
4892 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4893 } else {
4894 rc = channel->request(NULL, frameNumber,
4895 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4896 }
4897 if (rc < 0) {
4898 LOGE("Fail to request on picture channel");
4899 pthread_mutex_unlock(&mMutex);
4900 return rc;
4901 }
4902
4903 if ((*itr).meteringOnly != 1) {
4904 requestedStream.need_metadata = 1;
4905 streams_need_metadata++;
4906 }
4907 }
4908
4909 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4910 uint32_t j = 0;
4911 for (j = 0; j < streamsArray.num_streams; j++) {
4912 if (streamsArray.stream_request[j].streamID == streamId) {
4913 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4914 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4915 else
4916 streamsArray.stream_request[j].buf_index = indexUsed;
4917 break;
4918 }
4919 }
4920 if (j == streamsArray.num_streams) {
4921 LOGE("Did not find matching stream to update index");
4922 assert(0);
4923 }
4924
4925 } else {
4926 LOGE("Internal requests not supported on this stream type");
4927 assert(0);
4928 return INVALID_OPERATION;
4929 }
4930 latestRequest->internalRequestList.push_back(requestedStream);
4931 }
4932
Thierry Strudel3d639192016-09-09 11:52:26 -07004933 //If 2 streams have need_metadata set to true, fail the request, unless
4934 //we copy/reference count the metadata buffer
4935 if (streams_need_metadata > 1) {
4936 LOGE("not supporting request in which two streams requires"
4937 " 2 HAL metadata for reprocessing");
4938 pthread_mutex_unlock(&mMutex);
4939 return -EINVAL;
4940 }
4941
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004942 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 /* Set the parameters to backend:
4944 * - For every request in NORMAL MODE
4945 * - For every request in HFR mode during preview only case
4946 * - Once every batch in HFR mode during video recording
4947 */
4948 if (!mBatchSize ||
4949 (mBatchSize && !isVidBufRequested) ||
4950 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4951 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4952 mBatchSize, isVidBufRequested,
4953 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004954
4955 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4956 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4957 uint32_t m = 0;
4958 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4959 if (streamsArray.stream_request[k].streamID ==
4960 mBatchedStreamsArray.stream_request[m].streamID)
4961 break;
4962 }
4963 if (m == mBatchedStreamsArray.num_streams) {
4964 mBatchedStreamsArray.stream_request\
4965 [mBatchedStreamsArray.num_streams].streamID =
4966 streamsArray.stream_request[k].streamID;
4967 mBatchedStreamsArray.stream_request\
4968 [mBatchedStreamsArray.num_streams].buf_index =
4969 streamsArray.stream_request[k].buf_index;
4970 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4971 }
4972 }
4973 streamsArray = mBatchedStreamsArray;
4974 }
4975 /* Update stream id of all the requested buffers */
4976 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4977 LOGE("Failed to set stream type mask in the parameters");
4978 return BAD_VALUE;
4979 }
4980
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4982 mParameters);
4983 if (rc < 0) {
4984 LOGE("set_parms failed");
4985 }
4986 /* reset to zero coz, the batch is queued */
4987 mToBeQueuedVidBufs = 0;
4988 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004989 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4990 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4991 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4992 uint32_t m = 0;
4993 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4994 if (streamsArray.stream_request[k].streamID ==
4995 mBatchedStreamsArray.stream_request[m].streamID)
4996 break;
4997 }
4998 if (m == mBatchedStreamsArray.num_streams) {
4999 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5000 streamsArray.stream_request[k].streamID;
5001 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5002 streamsArray.stream_request[k].buf_index;
5003 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5004 }
5005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 }
5007 mPendingLiveRequest++;
5008 }
5009
5010 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5011
5012 mState = STARTED;
5013 // Added a timed condition wait
5014 struct timespec ts;
5015 uint8_t isValidTimeout = 1;
5016 rc = clock_gettime(CLOCK_REALTIME, &ts);
5017 if (rc < 0) {
5018 isValidTimeout = 0;
5019 LOGE("Error reading the real time clock!!");
5020 }
5021 else {
5022 // Make timeout as 5 sec for request to be honored
5023 ts.tv_sec += 5;
5024 }
5025 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005026 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005027 (mState != ERROR) && (mState != DEINIT)) {
5028 if (!isValidTimeout) {
5029 LOGD("Blocking on conditional wait");
5030 pthread_cond_wait(&mRequestCond, &mMutex);
5031 }
5032 else {
5033 LOGD("Blocking on timed conditional wait");
5034 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5035 if (rc == ETIMEDOUT) {
5036 rc = -ENODEV;
5037 LOGE("Unblocked on timeout!!!!");
5038 break;
5039 }
5040 }
5041 LOGD("Unblocked");
5042 if (mWokenUpByDaemon) {
5043 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005044 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005045 break;
5046 }
5047 }
5048 pthread_mutex_unlock(&mMutex);
5049
5050 return rc;
5051}
5052
5053/*===========================================================================
5054 * FUNCTION : dump
5055 *
5056 * DESCRIPTION:
5057 *
5058 * PARAMETERS :
5059 *
5060 *
5061 * RETURN :
5062 *==========================================================================*/
5063void QCamera3HardwareInterface::dump(int fd)
5064{
5065 pthread_mutex_lock(&mMutex);
5066 dprintf(fd, "\n Camera HAL3 information Begin \n");
5067
5068 dprintf(fd, "\nNumber of pending requests: %zu \n",
5069 mPendingRequestsList.size());
5070 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5071 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5072 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5073 for(pendingRequestIterator i = mPendingRequestsList.begin();
5074 i != mPendingRequestsList.end(); i++) {
5075 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5076 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5077 i->input_buffer);
5078 }
5079 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5080 mPendingBuffersMap.get_num_overall_buffers());
5081 dprintf(fd, "-------+------------------\n");
5082 dprintf(fd, " Frame | Stream type mask \n");
5083 dprintf(fd, "-------+------------------\n");
5084 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5085 for(auto &j : req.mPendingBufferList) {
5086 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5087 dprintf(fd, " %5d | %11d \n",
5088 req.frame_number, channel->getStreamTypeMask());
5089 }
5090 }
5091 dprintf(fd, "-------+------------------\n");
5092
5093 dprintf(fd, "\nPending frame drop list: %zu\n",
5094 mPendingFrameDropList.size());
5095 dprintf(fd, "-------+-----------\n");
5096 dprintf(fd, " Frame | Stream ID \n");
5097 dprintf(fd, "-------+-----------\n");
5098 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5099 i != mPendingFrameDropList.end(); i++) {
5100 dprintf(fd, " %5d | %9d \n",
5101 i->frame_number, i->stream_ID);
5102 }
5103 dprintf(fd, "-------+-----------\n");
5104
5105 dprintf(fd, "\n Camera HAL3 information End \n");
5106
5107 /* use dumpsys media.camera as trigger to send update debug level event */
5108 mUpdateDebugLevel = true;
5109 pthread_mutex_unlock(&mMutex);
5110 return;
5111}
5112
5113/*===========================================================================
5114 * FUNCTION : flush
5115 *
5116 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5117 * conditionally restarts channels
5118 *
5119 * PARAMETERS :
5120 * @ restartChannels: re-start all channels
5121 *
5122 *
5123 * RETURN :
5124 * 0 on success
5125 * Error code on failure
5126 *==========================================================================*/
5127int QCamera3HardwareInterface::flush(bool restartChannels)
5128{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005129 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_FLUSH);
Thierry Strudel3d639192016-09-09 11:52:26 -07005130 int32_t rc = NO_ERROR;
5131
5132 LOGD("Unblocking Process Capture Request");
5133 pthread_mutex_lock(&mMutex);
5134 mFlush = true;
5135 pthread_mutex_unlock(&mMutex);
5136
5137 rc = stopAllChannels();
5138 // unlink of dualcam
5139 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005140 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5141 &m_pDualCamCmdPtr->bundle_info;
5142 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005143 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5144 pthread_mutex_lock(&gCamLock);
5145
5146 if (mIsMainCamera == 1) {
5147 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5148 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005149 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005150 // related session id should be session id of linked session
5151 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5152 } else {
5153 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5154 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005155 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005156 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5157 }
5158 pthread_mutex_unlock(&gCamLock);
5159
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005160 rc = mCameraHandle->ops->set_dual_cam_cmd(
5161 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005162 if (rc < 0) {
5163 LOGE("Dualcam: Unlink failed, but still proceed to close");
5164 }
5165 }
5166
5167 if (rc < 0) {
5168 LOGE("stopAllChannels failed");
5169 return rc;
5170 }
5171 if (mChannelHandle) {
5172 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5173 mChannelHandle);
5174 }
5175
5176 // Reset bundle info
5177 rc = setBundleInfo();
5178 if (rc < 0) {
5179 LOGE("setBundleInfo failed %d", rc);
5180 return rc;
5181 }
5182
5183 // Mutex Lock
5184 pthread_mutex_lock(&mMutex);
5185
5186 // Unblock process_capture_request
5187 mPendingLiveRequest = 0;
5188 pthread_cond_signal(&mRequestCond);
5189
5190 rc = notifyErrorForPendingRequests();
5191 if (rc < 0) {
5192 LOGE("notifyErrorForPendingRequests failed");
5193 pthread_mutex_unlock(&mMutex);
5194 return rc;
5195 }
5196
5197 mFlush = false;
5198
5199 // Start the Streams/Channels
5200 if (restartChannels) {
5201 rc = startAllChannels();
5202 if (rc < 0) {
5203 LOGE("startAllChannels failed");
5204 pthread_mutex_unlock(&mMutex);
5205 return rc;
5206 }
5207 }
5208
5209 if (mChannelHandle) {
5210 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5211 mChannelHandle);
5212 if (rc < 0) {
5213 LOGE("start_channel failed");
5214 pthread_mutex_unlock(&mMutex);
5215 return rc;
5216 }
5217 }
5218
5219 pthread_mutex_unlock(&mMutex);
5220
5221 return 0;
5222}
5223
5224/*===========================================================================
5225 * FUNCTION : flushPerf
5226 *
5227 * DESCRIPTION: This is the performance optimization version of flush that does
5228 * not use stream off, rather flushes the system
5229 *
5230 * PARAMETERS :
5231 *
5232 *
5233 * RETURN : 0 : success
5234 * -EINVAL: input is malformed (device is not valid)
5235 * -ENODEV: if the device has encountered a serious error
5236 *==========================================================================*/
5237int QCamera3HardwareInterface::flushPerf()
5238{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005239 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_FLUSH_PREF);
Thierry Strudel3d639192016-09-09 11:52:26 -07005240 int32_t rc = 0;
5241 struct timespec timeout;
5242 bool timed_wait = false;
5243
5244 pthread_mutex_lock(&mMutex);
5245 mFlushPerf = true;
5246 mPendingBuffersMap.numPendingBufsAtFlush =
5247 mPendingBuffersMap.get_num_overall_buffers();
5248 LOGD("Calling flush. Wait for %d buffers to return",
5249 mPendingBuffersMap.numPendingBufsAtFlush);
5250
5251 /* send the flush event to the backend */
5252 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5253 if (rc < 0) {
5254 LOGE("Error in flush: IOCTL failure");
5255 mFlushPerf = false;
5256 pthread_mutex_unlock(&mMutex);
5257 return -ENODEV;
5258 }
5259
5260 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5261 LOGD("No pending buffers in HAL, return flush");
5262 mFlushPerf = false;
5263 pthread_mutex_unlock(&mMutex);
5264 return rc;
5265 }
5266
5267 /* wait on a signal that buffers were received */
5268 rc = clock_gettime(CLOCK_REALTIME, &timeout);
5269 if (rc < 0) {
5270 LOGE("Error reading the real time clock, cannot use timed wait");
5271 } else {
5272 timeout.tv_sec += FLUSH_TIMEOUT;
5273 timed_wait = true;
5274 }
5275
5276 //Block on conditional variable
5277 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5278 LOGD("Waiting on mBuffersCond");
5279 if (!timed_wait) {
5280 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5281 if (rc != 0) {
5282 LOGE("pthread_cond_wait failed due to rc = %s",
5283 strerror(rc));
5284 break;
5285 }
5286 } else {
5287 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5288 if (rc != 0) {
5289 LOGE("pthread_cond_timedwait failed due to rc = %s",
5290 strerror(rc));
5291 break;
5292 }
5293 }
5294 }
5295 if (rc != 0) {
5296 mFlushPerf = false;
5297 pthread_mutex_unlock(&mMutex);
5298 return -ENODEV;
5299 }
5300
5301 LOGD("Received buffers, now safe to return them");
5302
5303 //make sure the channels handle flush
5304 //currently only required for the picture channel to release snapshot resources
5305 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5306 it != mStreamInfo.end(); it++) {
5307 QCamera3Channel *channel = (*it)->channel;
5308 if (channel) {
5309 rc = channel->flush();
5310 if (rc) {
5311 LOGE("Flushing the channels failed with error %d", rc);
5312 // even though the channel flush failed we need to continue and
5313 // return the buffers we have to the framework, however the return
5314 // value will be an error
5315 rc = -ENODEV;
5316 }
5317 }
5318 }
5319
5320 /* notify the frameworks and send errored results */
5321 rc = notifyErrorForPendingRequests();
5322 if (rc < 0) {
5323 LOGE("notifyErrorForPendingRequests failed");
5324 pthread_mutex_unlock(&mMutex);
5325 return rc;
5326 }
5327
5328 //unblock process_capture_request
5329 mPendingLiveRequest = 0;
5330 unblockRequestIfNecessary();
5331
5332 mFlushPerf = false;
5333 pthread_mutex_unlock(&mMutex);
5334 LOGD ("Flush Operation complete. rc = %d", rc);
5335 return rc;
5336}
5337
5338/*===========================================================================
5339 * FUNCTION : handleCameraDeviceError
5340 *
5341 * DESCRIPTION: This function calls internal flush and notifies the error to
5342 * framework and updates the state variable.
5343 *
5344 * PARAMETERS : None
5345 *
5346 * RETURN : NO_ERROR on Success
5347 * Error code on failure
5348 *==========================================================================*/
5349int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5350{
5351 int32_t rc = NO_ERROR;
5352
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005353 {
5354 Mutex::Autolock lock(mFlushLock);
5355 pthread_mutex_lock(&mMutex);
5356 if (mState != ERROR) {
5357 //if mState != ERROR, nothing to be done
5358 pthread_mutex_unlock(&mMutex);
5359 return NO_ERROR;
5360 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005361 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005362
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005363 rc = flush(false /* restart channels */);
5364 if (NO_ERROR != rc) {
5365 LOGE("internal flush to handle mState = ERROR failed");
5366 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005367
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005368 pthread_mutex_lock(&mMutex);
5369 mState = DEINIT;
5370 pthread_mutex_unlock(&mMutex);
5371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005372
5373 camera3_notify_msg_t notify_msg;
5374 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5375 notify_msg.type = CAMERA3_MSG_ERROR;
5376 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5377 notify_msg.message.error.error_stream = NULL;
5378 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005379 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005380
5381 return rc;
5382}
5383
5384/*===========================================================================
5385 * FUNCTION : captureResultCb
5386 *
5387 * DESCRIPTION: Callback handler for all capture result
5388 * (streams, as well as metadata)
5389 *
5390 * PARAMETERS :
5391 * @metadata : metadata information
5392 * @buffer : actual gralloc buffer to be returned to frameworks.
5393 * NULL if metadata.
5394 *
5395 * RETURN : NONE
5396 *==========================================================================*/
5397void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5398 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5399{
5400 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005401 pthread_mutex_lock(&mMutex);
5402 uint8_t batchSize = mBatchSize;
5403 pthread_mutex_unlock(&mMutex);
5404 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005405 handleBatchMetadata(metadata_buf,
5406 true /* free_and_bufdone_meta_buf */);
5407 } else { /* mBatchSize = 0 */
5408 hdrPlusPerfLock(metadata_buf);
5409 pthread_mutex_lock(&mMutex);
5410 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005411 true /* free_and_bufdone_meta_buf */,
5412 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 pthread_mutex_unlock(&mMutex);
5414 }
5415 } else if (isInputBuffer) {
5416 pthread_mutex_lock(&mMutex);
5417 handleInputBufferWithLock(frame_number);
5418 pthread_mutex_unlock(&mMutex);
5419 } else {
5420 pthread_mutex_lock(&mMutex);
5421 handleBufferWithLock(buffer, frame_number);
5422 pthread_mutex_unlock(&mMutex);
5423 }
5424 return;
5425}
5426
5427/*===========================================================================
5428 * FUNCTION : getReprocessibleOutputStreamId
5429 *
5430 * DESCRIPTION: Get source output stream id for the input reprocess stream
5431 * based on size and format, which would be the largest
5432 * output stream if an input stream exists.
5433 *
5434 * PARAMETERS :
5435 * @id : return the stream id if found
5436 *
5437 * RETURN : int32_t type of status
5438 * NO_ERROR -- success
5439 * none-zero failure code
5440 *==========================================================================*/
5441int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5442{
5443 /* check if any output or bidirectional stream with the same size and format
5444 and return that stream */
5445 if ((mInputStreamInfo.dim.width > 0) &&
5446 (mInputStreamInfo.dim.height > 0)) {
5447 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5448 it != mStreamInfo.end(); it++) {
5449
5450 camera3_stream_t *stream = (*it)->stream;
5451 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5452 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5453 (stream->format == mInputStreamInfo.format)) {
5454 // Usage flag for an input stream and the source output stream
5455 // may be different.
5456 LOGD("Found reprocessible output stream! %p", *it);
5457 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5458 stream->usage, mInputStreamInfo.usage);
5459
5460 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5461 if (channel != NULL && channel->mStreams[0]) {
5462 id = channel->mStreams[0]->getMyServerID();
5463 return NO_ERROR;
5464 }
5465 }
5466 }
5467 } else {
5468 LOGD("No input stream, so no reprocessible output stream");
5469 }
5470 return NAME_NOT_FOUND;
5471}
5472
5473/*===========================================================================
5474 * FUNCTION : lookupFwkName
5475 *
5476 * DESCRIPTION: In case the enum is not same in fwk and backend
5477 * make sure the parameter is correctly propogated
5478 *
5479 * PARAMETERS :
5480 * @arr : map between the two enums
5481 * @len : len of the map
5482 * @hal_name : name of the hal_parm to map
5483 *
5484 * RETURN : int type of status
5485 * fwk_name -- success
5486 * none-zero failure code
5487 *==========================================================================*/
5488template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5489 size_t len, halType hal_name)
5490{
5491
5492 for (size_t i = 0; i < len; i++) {
5493 if (arr[i].hal_name == hal_name) {
5494 return arr[i].fwk_name;
5495 }
5496 }
5497
5498 /* Not able to find matching framework type is not necessarily
5499 * an error case. This happens when mm-camera supports more attributes
5500 * than the frameworks do */
5501 LOGH("Cannot find matching framework type");
5502 return NAME_NOT_FOUND;
5503}
5504
5505/*===========================================================================
5506 * FUNCTION : lookupHalName
5507 *
5508 * DESCRIPTION: In case the enum is not same in fwk and backend
5509 * make sure the parameter is correctly propogated
5510 *
5511 * PARAMETERS :
5512 * @arr : map between the two enums
5513 * @len : len of the map
5514 * @fwk_name : name of the hal_parm to map
5515 *
5516 * RETURN : int32_t type of status
5517 * hal_name -- success
5518 * none-zero failure code
5519 *==========================================================================*/
5520template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5521 size_t len, fwkType fwk_name)
5522{
5523 for (size_t i = 0; i < len; i++) {
5524 if (arr[i].fwk_name == fwk_name) {
5525 return arr[i].hal_name;
5526 }
5527 }
5528
5529 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5530 return NAME_NOT_FOUND;
5531}
5532
5533/*===========================================================================
5534 * FUNCTION : lookupProp
5535 *
5536 * DESCRIPTION: lookup a value by its name
5537 *
5538 * PARAMETERS :
5539 * @arr : map between the two enums
5540 * @len : size of the map
5541 * @name : name to be looked up
5542 *
5543 * RETURN : Value if found
5544 * CAM_CDS_MODE_MAX if not found
5545 *==========================================================================*/
5546template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5547 size_t len, const char *name)
5548{
5549 if (name) {
5550 for (size_t i = 0; i < len; i++) {
5551 if (!strcmp(arr[i].desc, name)) {
5552 return arr[i].val;
5553 }
5554 }
5555 }
5556 return CAM_CDS_MODE_MAX;
5557}
5558
5559/*===========================================================================
5560 *
5561 * DESCRIPTION:
5562 *
5563 * PARAMETERS :
5564 * @metadata : metadata information from callback
5565 * @timestamp: metadata buffer timestamp
5566 * @request_id: request id
5567 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005568 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5569 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005570 * @pprocDone: whether internal offline postprocsesing is done
5571 *
5572 * RETURN : camera_metadata_t*
5573 * metadata in a format specified by fwk
5574 *==========================================================================*/
5575camera_metadata_t*
5576QCamera3HardwareInterface::translateFromHalMetadata(
5577 metadata_buffer_t *metadata,
5578 nsecs_t timestamp,
5579 int32_t request_id,
5580 const CameraMetadata& jpegMetadata,
5581 uint8_t pipeline_depth,
5582 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005583 /* DevCamDebug metadata translateFromHalMetadata argument */
5584 uint8_t DevCamDebug_meta_enable,
5585 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005586 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005587 uint8_t fwk_cacMode,
5588 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005589{
5590 CameraMetadata camMetadata;
5591 camera_metadata_t *resultMetadata;
5592
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005593 if (mBatchSize && !firstMetadataInBatch) {
5594 /* In batch mode, use cached metadata from the first metadata
5595 in the batch */
5596 camMetadata.clear();
5597 camMetadata = mCachedMetadata;
5598 }
5599
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 if (jpegMetadata.entryCount())
5601 camMetadata.append(jpegMetadata);
5602
5603 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5604 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5605 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5606 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005607 if (mBatchSize == 0) {
5608 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5609 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005611
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005612 if (mBatchSize && !firstMetadataInBatch) {
5613 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5614 resultMetadata = camMetadata.release();
5615 return resultMetadata;
5616 }
5617
Samuel Ha68ba5172016-12-15 18:41:12 -08005618 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5619 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5620 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5621 // DevCamDebug metadata translateFromHalMetadata AF
5622 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5623 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5624 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5625 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5626 }
5627 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5628 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5629 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5630 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5631 }
5632 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5633 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5634 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5635 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5636 }
5637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5638 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5639 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5640 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5641 }
5642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5643 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5644 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5645 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5646 }
5647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5648 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5649 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5650 *DevCamDebug_af_monitor_pdaf_target_pos;
5651 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5652 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5653 }
5654 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5655 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5656 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5657 *DevCamDebug_af_monitor_pdaf_confidence;
5658 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5659 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5660 }
5661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5662 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5663 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5664 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5665 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5666 }
5667 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5668 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5669 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5670 *DevCamDebug_af_monitor_tof_target_pos;
5671 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5672 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5673 }
5674 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5675 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5676 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5677 *DevCamDebug_af_monitor_tof_confidence;
5678 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5679 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5680 }
5681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5682 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5683 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5684 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5685 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5686 }
5687 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5688 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5689 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5690 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5691 &fwk_DevCamDebug_af_monitor_type_select, 1);
5692 }
5693 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5694 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5695 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5696 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5697 &fwk_DevCamDebug_af_monitor_refocus, 1);
5698 }
5699 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5700 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5701 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5702 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5703 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5704 }
5705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5706 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5707 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5708 *DevCamDebug_af_search_pdaf_target_pos;
5709 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5710 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5711 }
5712 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5713 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5714 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5715 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5716 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5717 }
5718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5719 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5720 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5721 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5722 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5723 }
5724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5725 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5726 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5727 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5728 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5729 }
5730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5731 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5732 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5733 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5734 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5735 }
5736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5737 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5738 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5739 *DevCamDebug_af_search_tof_target_pos;
5740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5741 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5742 }
5743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5744 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5745 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5746 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5747 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5748 }
5749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5750 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5751 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5753 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5754 }
5755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5756 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5757 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5758 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5759 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5760 }
5761 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5762 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5763 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5764 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5765 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5766 }
5767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5768 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5769 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5770 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5771 &fwk_DevCamDebug_af_search_type_select, 1);
5772 }
5773 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5774 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5775 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5776 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5777 &fwk_DevCamDebug_af_search_next_pos, 1);
5778 }
5779 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5780 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5781 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5782 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5783 &fwk_DevCamDebug_af_search_target_pos, 1);
5784 }
5785 // DevCamDebug metadata translateFromHalMetadata AEC
5786 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5787 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5788 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5789 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5790 }
5791 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5792 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5793 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5794 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5795 }
5796 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5797 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5798 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5799 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5800 }
5801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5802 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5803 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5804 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5805 }
5806 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5807 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5808 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5809 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5810 }
5811 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5812 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5813 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5814 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5815 }
5816 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5817 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5818 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5819 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5820 }
5821 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5822 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5823 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5824 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5825 }
5826 // DevCamDebug metadata translateFromHalMetadata AWB
5827 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5828 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5829 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5830 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5831 }
5832 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5833 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5834 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5835 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5836 }
5837 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5838 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5839 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5840 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5841 }
5842 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5843 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5844 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5845 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5846 }
5847 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5848 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5849 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5850 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5851 }
5852 }
5853 // atrace_end(ATRACE_TAG_ALWAYS);
5854
Thierry Strudel3d639192016-09-09 11:52:26 -07005855 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5856 int64_t fwk_frame_number = *frame_number;
5857 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5858 }
5859
5860 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5861 int32_t fps_range[2];
5862 fps_range[0] = (int32_t)float_range->min_fps;
5863 fps_range[1] = (int32_t)float_range->max_fps;
5864 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5865 fps_range, 2);
5866 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5867 fps_range[0], fps_range[1]);
5868 }
5869
5870 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5871 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5872 }
5873
5874 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5875 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5876 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5877 *sceneMode);
5878 if (NAME_NOT_FOUND != val) {
5879 uint8_t fwkSceneMode = (uint8_t)val;
5880 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5881 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5882 fwkSceneMode);
5883 }
5884 }
5885
5886 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5887 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5888 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5889 }
5890
5891 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5892 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5893 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5894 }
5895
5896 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5897 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5898 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5899 }
5900
5901 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5902 CAM_INTF_META_EDGE_MODE, metadata) {
5903 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5904 }
5905
5906 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5907 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5908 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5909 }
5910
5911 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5912 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5913 }
5914
5915 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5916 if (0 <= *flashState) {
5917 uint8_t fwk_flashState = (uint8_t) *flashState;
5918 if (!gCamCapability[mCameraId]->flash_available) {
5919 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5920 }
5921 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5922 }
5923 }
5924
5925 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5926 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5927 if (NAME_NOT_FOUND != val) {
5928 uint8_t fwk_flashMode = (uint8_t)val;
5929 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5930 }
5931 }
5932
5933 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5934 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5935 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5936 }
5937
5938 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5939 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5940 }
5941
5942 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5943 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5944 }
5945
5946 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5947 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5948 }
5949
5950 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5951 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5952 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5953 }
5954
5955 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5956 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5957 LOGD("fwk_videoStab = %d", fwk_videoStab);
5958 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5959 } else {
5960 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5961 // and so hardcoding the Video Stab result to OFF mode.
5962 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5963 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005964 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005965 }
5966
5967 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5968 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5969 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5970 }
5971
5972 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5973 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5974 }
5975
5976 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5977 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5978
5979 LOGD("dynamicblackLevel = %f %f %f %f",
5980 blackLevelSourcePattern->cam_black_level[0],
5981 blackLevelSourcePattern->cam_black_level[1],
5982 blackLevelSourcePattern->cam_black_level[2],
5983 blackLevelSourcePattern->cam_black_level[3]);
5984 }
5985
5986 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5987 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5988 float fwk_blackLevelInd[4];
5989
5990 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
5991 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
5992 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
5993 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
5994
5995 LOGD("applied dynamicblackLevel = %f %f %f %f",
5996 blackLevelAppliedPattern->cam_black_level[0],
5997 blackLevelAppliedPattern->cam_black_level[1],
5998 blackLevelAppliedPattern->cam_black_level[2],
5999 blackLevelAppliedPattern->cam_black_level[3]);
6000 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006001
6002#ifndef USE_HAL_3_3
6003 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
6004 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
6005 // depth space.
6006 fwk_blackLevelInd[0] /= 64.0;
6007 fwk_blackLevelInd[1] /= 64.0;
6008 fwk_blackLevelInd[2] /= 64.0;
6009 fwk_blackLevelInd[3] /= 64.0;
6010 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
6011#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006012 }
6013
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006014#ifndef USE_HAL_3_3
6015 // Fixed whitelevel is used by ISP/Sensor
6016 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6017 &gCamCapability[mCameraId]->white_level, 1);
6018#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006019
6020 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6021 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6022 int32_t scalerCropRegion[4];
6023 scalerCropRegion[0] = hScalerCropRegion->left;
6024 scalerCropRegion[1] = hScalerCropRegion->top;
6025 scalerCropRegion[2] = hScalerCropRegion->width;
6026 scalerCropRegion[3] = hScalerCropRegion->height;
6027
6028 // Adjust crop region from sensor output coordinate system to active
6029 // array coordinate system.
6030 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6031 scalerCropRegion[2], scalerCropRegion[3]);
6032
6033 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6034 }
6035
6036 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6037 LOGD("sensorExpTime = %lld", *sensorExpTime);
6038 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6039 }
6040
6041 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6042 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6043 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6044 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6045 }
6046
6047 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6048 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6049 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6050 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6051 sensorRollingShutterSkew, 1);
6052 }
6053
6054 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6055 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6056 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6057
6058 //calculate the noise profile based on sensitivity
6059 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6060 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6061 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6062 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6063 noise_profile[i] = noise_profile_S;
6064 noise_profile[i+1] = noise_profile_O;
6065 }
6066 LOGD("noise model entry (S, O) is (%f, %f)",
6067 noise_profile_S, noise_profile_O);
6068 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6069 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6070 }
6071
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006072#ifndef USE_HAL_3_3
6073 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
6074 int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
6075 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
6076 }
6077#endif
6078
Thierry Strudel3d639192016-09-09 11:52:26 -07006079 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6080 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6081 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6082 }
6083
6084 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6085 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6086 *faceDetectMode);
6087 if (NAME_NOT_FOUND != val) {
6088 uint8_t fwk_faceDetectMode = (uint8_t)val;
6089 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6090
6091 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6092 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6093 CAM_INTF_META_FACE_DETECTION, metadata) {
6094 uint8_t numFaces = MIN(
6095 faceDetectionInfo->num_faces_detected, MAX_ROI);
6096 int32_t faceIds[MAX_ROI];
6097 uint8_t faceScores[MAX_ROI];
6098 int32_t faceRectangles[MAX_ROI * 4];
6099 int32_t faceLandmarks[MAX_ROI * 6];
6100 size_t j = 0, k = 0;
6101
6102 for (size_t i = 0; i < numFaces; i++) {
6103 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6104 // Adjust crop region from sensor output coordinate system to active
6105 // array coordinate system.
6106 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6107 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6108 rect.width, rect.height);
6109
6110 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6111 faceRectangles+j, -1);
6112
6113 j+= 4;
6114 }
6115 if (numFaces <= 0) {
6116 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6117 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6118 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6119 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6120 }
6121
6122 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6123 numFaces);
6124 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6125 faceRectangles, numFaces * 4U);
6126 if (fwk_faceDetectMode ==
6127 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6128 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6129 CAM_INTF_META_FACE_LANDMARK, metadata) {
6130
6131 for (size_t i = 0; i < numFaces; i++) {
6132 // Map the co-ordinate sensor output coordinate system to active
6133 // array coordinate system.
6134 mCropRegionMapper.toActiveArray(
6135 landmarks->face_landmarks[i].left_eye_center.x,
6136 landmarks->face_landmarks[i].left_eye_center.y);
6137 mCropRegionMapper.toActiveArray(
6138 landmarks->face_landmarks[i].right_eye_center.x,
6139 landmarks->face_landmarks[i].right_eye_center.y);
6140 mCropRegionMapper.toActiveArray(
6141 landmarks->face_landmarks[i].mouth_center.x,
6142 landmarks->face_landmarks[i].mouth_center.y);
6143
6144 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006145 k+= TOTAL_LANDMARK_INDICES;
6146 }
6147 } else {
6148 for (size_t i = 0; i < numFaces; i++) {
6149 setInvalidLandmarks(faceLandmarks+k);
6150 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006151 }
6152 }
6153
6154 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6155 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6156 faceLandmarks, numFaces * 6U);
6157 }
6158 }
6159 }
6160 }
6161 }
6162
6163 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6164 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6165 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006166
6167 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6168 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6169 // process histogram statistics info
6170 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6171 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6172 cam_histogram_data_t rHistData, gHistData, bHistData;
6173 memset(&rHistData, 0, sizeof(rHistData));
6174 memset(&gHistData, 0, sizeof(gHistData));
6175 memset(&bHistData, 0, sizeof(bHistData));
6176
6177 switch (stats_data->type) {
6178 case CAM_HISTOGRAM_TYPE_BAYER:
6179 switch (stats_data->bayer_stats.data_type) {
6180 case CAM_STATS_CHANNEL_GR:
6181 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6182 break;
6183 case CAM_STATS_CHANNEL_GB:
6184 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6185 break;
6186 case CAM_STATS_CHANNEL_B:
6187 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6188 break;
6189 case CAM_STATS_CHANNEL_ALL:
6190 rHistData = stats_data->bayer_stats.r_stats;
6191 //Framework expects only 3 channels. So, for now,
6192 //use gb stats for G channel.
6193 gHistData = stats_data->bayer_stats.gb_stats;
6194 bHistData = stats_data->bayer_stats.b_stats;
6195 break;
6196 case CAM_STATS_CHANNEL_Y:
6197 case CAM_STATS_CHANNEL_R:
6198 default:
6199 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6200 break;
6201 }
6202 break;
6203 case CAM_HISTOGRAM_TYPE_YUV:
6204 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6205 break;
6206 }
6207
6208 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6209 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6210 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6211
6212 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6213 }
6214 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006215 }
6216
6217 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6218 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6219 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6220 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6221 }
6222
6223 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6224 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6225 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6226 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6227 }
6228
6229 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6230 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6231 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6232 CAM_MAX_SHADING_MAP_HEIGHT);
6233 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6234 CAM_MAX_SHADING_MAP_WIDTH);
6235 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6236 lensShadingMap->lens_shading, 4U * map_width * map_height);
6237 }
6238
6239 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6240 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6241 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6242 }
6243
6244 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6245 //Populate CAM_INTF_META_TONEMAP_CURVES
6246 /* ch0 = G, ch 1 = B, ch 2 = R*/
6247 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6248 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6249 tonemap->tonemap_points_cnt,
6250 CAM_MAX_TONEMAP_CURVE_SIZE);
6251 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6252 }
6253
6254 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6255 &tonemap->curves[0].tonemap_points[0][0],
6256 tonemap->tonemap_points_cnt * 2);
6257
6258 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6259 &tonemap->curves[1].tonemap_points[0][0],
6260 tonemap->tonemap_points_cnt * 2);
6261
6262 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6263 &tonemap->curves[2].tonemap_points[0][0],
6264 tonemap->tonemap_points_cnt * 2);
6265 }
6266
6267 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6268 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6269 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6270 CC_GAIN_MAX);
6271 }
6272
6273 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6274 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6275 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6276 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6277 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6278 }
6279
6280 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6281 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6282 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6283 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6284 toneCurve->tonemap_points_cnt,
6285 CAM_MAX_TONEMAP_CURVE_SIZE);
6286 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6287 }
6288 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6289 (float*)toneCurve->curve.tonemap_points,
6290 toneCurve->tonemap_points_cnt * 2);
6291 }
6292
6293 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6294 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6295 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6296 predColorCorrectionGains->gains, 4);
6297 }
6298
6299 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6300 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6301 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6302 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6303 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6304 }
6305
6306 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6307 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6308 }
6309
6310 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6311 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6312 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6313 }
6314
6315 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6316 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6317 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6318 }
6319
6320 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6321 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6322 *effectMode);
6323 if (NAME_NOT_FOUND != val) {
6324 uint8_t fwk_effectMode = (uint8_t)val;
6325 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6326 }
6327 }
6328
6329 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6330 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6331 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6332 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6333 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6334 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6335 }
6336 int32_t fwk_testPatternData[4];
6337 fwk_testPatternData[0] = testPatternData->r;
6338 fwk_testPatternData[3] = testPatternData->b;
6339 switch (gCamCapability[mCameraId]->color_arrangement) {
6340 case CAM_FILTER_ARRANGEMENT_RGGB:
6341 case CAM_FILTER_ARRANGEMENT_GRBG:
6342 fwk_testPatternData[1] = testPatternData->gr;
6343 fwk_testPatternData[2] = testPatternData->gb;
6344 break;
6345 case CAM_FILTER_ARRANGEMENT_GBRG:
6346 case CAM_FILTER_ARRANGEMENT_BGGR:
6347 fwk_testPatternData[2] = testPatternData->gr;
6348 fwk_testPatternData[1] = testPatternData->gb;
6349 break;
6350 default:
6351 LOGE("color arrangement %d is not supported",
6352 gCamCapability[mCameraId]->color_arrangement);
6353 break;
6354 }
6355 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6356 }
6357
6358 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6359 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6360 }
6361
6362 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6363 String8 str((const char *)gps_methods);
6364 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6365 }
6366
6367 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6368 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6369 }
6370
6371 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6372 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6373 }
6374
6375 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6376 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6377 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6378 }
6379
6380 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6381 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6382 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6383 }
6384
6385 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6386 int32_t fwk_thumb_size[2];
6387 fwk_thumb_size[0] = thumb_size->width;
6388 fwk_thumb_size[1] = thumb_size->height;
6389 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6390 }
6391
6392 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6393 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6394 privateData,
6395 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6396 }
6397
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006398 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6399 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6400 meteringMode, 1);
6401 }
6402
Thierry Strudel3d639192016-09-09 11:52:26 -07006403 if (metadata->is_tuning_params_valid) {
6404 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6405 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6406 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6407
6408
6409 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6410 sizeof(uint32_t));
6411 data += sizeof(uint32_t);
6412
6413 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6414 sizeof(uint32_t));
6415 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6416 data += sizeof(uint32_t);
6417
6418 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6419 sizeof(uint32_t));
6420 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6421 data += sizeof(uint32_t);
6422
6423 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6424 sizeof(uint32_t));
6425 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6426 data += sizeof(uint32_t);
6427
6428 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6429 sizeof(uint32_t));
6430 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6431 data += sizeof(uint32_t);
6432
6433 metadata->tuning_params.tuning_mod3_data_size = 0;
6434 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6435 sizeof(uint32_t));
6436 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6437 data += sizeof(uint32_t);
6438
6439 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6440 TUNING_SENSOR_DATA_MAX);
6441 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6442 count);
6443 data += count;
6444
6445 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6446 TUNING_VFE_DATA_MAX);
6447 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6448 count);
6449 data += count;
6450
6451 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6452 TUNING_CPP_DATA_MAX);
6453 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6454 count);
6455 data += count;
6456
6457 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6458 TUNING_CAC_DATA_MAX);
6459 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6460 count);
6461 data += count;
6462
6463 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6464 (int32_t *)(void *)tuning_meta_data_blob,
6465 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6466 }
6467
6468 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6469 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6470 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6471 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6472 NEUTRAL_COL_POINTS);
6473 }
6474
6475 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6476 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6477 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6478 }
6479
6480 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6481 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6482 // Adjust crop region from sensor output coordinate system to active
6483 // array coordinate system.
6484 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6485 hAeRegions->rect.width, hAeRegions->rect.height);
6486
6487 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6488 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6489 REGIONS_TUPLE_COUNT);
6490 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6491 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6492 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6493 hAeRegions->rect.height);
6494 }
6495
6496 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6497 uint8_t fwk_afState = (uint8_t) *afState;
6498 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
6499 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
6500 }
6501
6502 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6503 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6504 }
6505
6506 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6507 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6508 }
6509
6510 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6511 uint8_t fwk_lensState = *lensState;
6512 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6513 }
6514
6515 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6516 /*af regions*/
6517 int32_t afRegions[REGIONS_TUPLE_COUNT];
6518 // Adjust crop region from sensor output coordinate system to active
6519 // array coordinate system.
6520 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6521 hAfRegions->rect.width, hAfRegions->rect.height);
6522
6523 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6524 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6525 REGIONS_TUPLE_COUNT);
6526 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6527 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6528 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6529 hAfRegions->rect.height);
6530 }
6531
6532 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
6533 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6534 *hal_ab_mode);
6535 if (NAME_NOT_FOUND != val) {
6536 uint8_t fwk_ab_mode = (uint8_t)val;
6537 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6538 }
6539 }
6540
6541 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6542 int val = lookupFwkName(SCENE_MODES_MAP,
6543 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6544 if (NAME_NOT_FOUND != val) {
6545 uint8_t fwkBestshotMode = (uint8_t)val;
6546 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6547 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6548 } else {
6549 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6550 }
6551 }
6552
6553 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6554 uint8_t fwk_mode = (uint8_t) *mode;
6555 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6556 }
6557
6558 /* Constant metadata values to be update*/
6559 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6560 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6561
6562 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6563 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6564
6565 int32_t hotPixelMap[2];
6566 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6567
6568 // CDS
6569 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6570 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6571 }
6572
Thierry Strudel04e026f2016-10-10 11:27:36 -07006573 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6574 int32_t fwk_hdr;
6575 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6576 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6577 } else {
6578 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6579 }
6580 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6581 }
6582
6583 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006584 int32_t fwk_ir = (int32_t) *ir;
6585 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006586 }
6587
Thierry Strudel269c81a2016-10-12 12:13:59 -07006588 // AEC SPEED
6589 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6590 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6591 }
6592
6593 // AWB SPEED
6594 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6595 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6596 }
6597
Thierry Strudel3d639192016-09-09 11:52:26 -07006598 // TNR
6599 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6600 uint8_t tnr_enable = tnr->denoise_enable;
6601 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6602
6603 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6604 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6605 }
6606
6607 // Reprocess crop data
6608 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6609 uint8_t cnt = crop_data->num_of_streams;
6610 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6611 // mm-qcamera-daemon only posts crop_data for streams
6612 // not linked to pproc. So no valid crop metadata is not
6613 // necessarily an error case.
6614 LOGD("No valid crop metadata entries");
6615 } else {
6616 uint32_t reproc_stream_id;
6617 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6618 LOGD("No reprocessible stream found, ignore crop data");
6619 } else {
6620 int rc = NO_ERROR;
6621 Vector<int32_t> roi_map;
6622 int32_t *crop = new int32_t[cnt*4];
6623 if (NULL == crop) {
6624 rc = NO_MEMORY;
6625 }
6626 if (NO_ERROR == rc) {
6627 int32_t streams_found = 0;
6628 for (size_t i = 0; i < cnt; i++) {
6629 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6630 if (pprocDone) {
6631 // HAL already does internal reprocessing,
6632 // either via reprocessing before JPEG encoding,
6633 // or offline postprocessing for pproc bypass case.
6634 crop[0] = 0;
6635 crop[1] = 0;
6636 crop[2] = mInputStreamInfo.dim.width;
6637 crop[3] = mInputStreamInfo.dim.height;
6638 } else {
6639 crop[0] = crop_data->crop_info[i].crop.left;
6640 crop[1] = crop_data->crop_info[i].crop.top;
6641 crop[2] = crop_data->crop_info[i].crop.width;
6642 crop[3] = crop_data->crop_info[i].crop.height;
6643 }
6644 roi_map.add(crop_data->crop_info[i].roi_map.left);
6645 roi_map.add(crop_data->crop_info[i].roi_map.top);
6646 roi_map.add(crop_data->crop_info[i].roi_map.width);
6647 roi_map.add(crop_data->crop_info[i].roi_map.height);
6648 streams_found++;
6649 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6650 crop[0], crop[1], crop[2], crop[3]);
6651 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6652 crop_data->crop_info[i].roi_map.left,
6653 crop_data->crop_info[i].roi_map.top,
6654 crop_data->crop_info[i].roi_map.width,
6655 crop_data->crop_info[i].roi_map.height);
6656 break;
6657
6658 }
6659 }
6660 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6661 &streams_found, 1);
6662 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6663 crop, (size_t)(streams_found * 4));
6664 if (roi_map.array()) {
6665 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6666 roi_map.array(), roi_map.size());
6667 }
6668 }
6669 if (crop) {
6670 delete [] crop;
6671 }
6672 }
6673 }
6674 }
6675
6676 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6677 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6678 // so hardcoding the CAC result to OFF mode.
6679 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6680 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6681 } else {
6682 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6683 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6684 *cacMode);
6685 if (NAME_NOT_FOUND != val) {
6686 uint8_t resultCacMode = (uint8_t)val;
6687 // check whether CAC result from CB is equal to Framework set CAC mode
6688 // If not equal then set the CAC mode came in corresponding request
6689 if (fwk_cacMode != resultCacMode) {
6690 resultCacMode = fwk_cacMode;
6691 }
6692 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6693 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6694 } else {
6695 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6696 }
6697 }
6698 }
6699
6700 // Post blob of cam_cds_data through vendor tag.
6701 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6702 uint8_t cnt = cdsInfo->num_of_streams;
6703 cam_cds_data_t cdsDataOverride;
6704 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6705 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6706 cdsDataOverride.num_of_streams = 1;
6707 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6708 uint32_t reproc_stream_id;
6709 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6710 LOGD("No reprocessible stream found, ignore cds data");
6711 } else {
6712 for (size_t i = 0; i < cnt; i++) {
6713 if (cdsInfo->cds_info[i].stream_id ==
6714 reproc_stream_id) {
6715 cdsDataOverride.cds_info[0].cds_enable =
6716 cdsInfo->cds_info[i].cds_enable;
6717 break;
6718 }
6719 }
6720 }
6721 } else {
6722 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6723 }
6724 camMetadata.update(QCAMERA3_CDS_INFO,
6725 (uint8_t *)&cdsDataOverride,
6726 sizeof(cam_cds_data_t));
6727 }
6728
6729 // Ldaf calibration data
6730 if (!mLdafCalibExist) {
6731 IF_META_AVAILABLE(uint32_t, ldafCalib,
6732 CAM_INTF_META_LDAF_EXIF, metadata) {
6733 mLdafCalibExist = true;
6734 mLdafCalib[0] = ldafCalib[0];
6735 mLdafCalib[1] = ldafCalib[1];
6736 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6737 ldafCalib[0], ldafCalib[1]);
6738 }
6739 }
6740
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006741 // Reprocess and DDM debug data through vendor tag
6742 cam_reprocess_info_t repro_info;
6743 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006744 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6745 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006746 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006747 }
6748 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6749 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006750 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 }
6752 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6753 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006754 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006755 }
6756 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6757 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006758 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006759 }
6760 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6761 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006762 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006763 }
6764 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006765 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006766 }
6767 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6768 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006769 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006770 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006771 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6772 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6773 }
6774 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6775 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6776 }
6777 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6778 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006779
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006780 // INSTANT AEC MODE
6781 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6782 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6783 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6784 }
6785
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006786 /* In batch mode, cache the first metadata in the batch */
6787 if (mBatchSize && firstMetadataInBatch) {
6788 mCachedMetadata.clear();
6789 mCachedMetadata = camMetadata;
6790 }
6791
Thierry Strudel3d639192016-09-09 11:52:26 -07006792 resultMetadata = camMetadata.release();
6793 return resultMetadata;
6794}
6795
6796/*===========================================================================
6797 * FUNCTION : saveExifParams
6798 *
6799 * DESCRIPTION:
6800 *
6801 * PARAMETERS :
6802 * @metadata : metadata information from callback
6803 *
6804 * RETURN : none
6805 *
6806 *==========================================================================*/
6807void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6808{
6809 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6810 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6811 if (mExifParams.debug_params) {
6812 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6813 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6814 }
6815 }
6816 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6817 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6818 if (mExifParams.debug_params) {
6819 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6820 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6821 }
6822 }
6823 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6824 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6825 if (mExifParams.debug_params) {
6826 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6827 mExifParams.debug_params->af_debug_params_valid = TRUE;
6828 }
6829 }
6830 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6831 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6832 if (mExifParams.debug_params) {
6833 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6834 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6835 }
6836 }
6837 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6838 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6839 if (mExifParams.debug_params) {
6840 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6841 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6842 }
6843 }
6844 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6845 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6846 if (mExifParams.debug_params) {
6847 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6848 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6849 }
6850 }
6851 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6852 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6853 if (mExifParams.debug_params) {
6854 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6855 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6856 }
6857 }
6858 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6859 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6860 if (mExifParams.debug_params) {
6861 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6862 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6863 }
6864 }
6865}
6866
6867/*===========================================================================
6868 * FUNCTION : get3AExifParams
6869 *
6870 * DESCRIPTION:
6871 *
6872 * PARAMETERS : none
6873 *
6874 *
6875 * RETURN : mm_jpeg_exif_params_t
6876 *
6877 *==========================================================================*/
6878mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6879{
6880 return mExifParams;
6881}
6882
6883/*===========================================================================
6884 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6885 *
6886 * DESCRIPTION:
6887 *
6888 * PARAMETERS :
6889 * @metadata : metadata information from callback
6890 *
6891 * RETURN : camera_metadata_t*
6892 * metadata in a format specified by fwk
6893 *==========================================================================*/
6894camera_metadata_t*
6895QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6896 (metadata_buffer_t *metadata)
6897{
6898 CameraMetadata camMetadata;
6899 camera_metadata_t *resultMetadata;
6900
6901
6902 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6903 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6904 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6905 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6906 }
6907
6908 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6909 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6910 &aecTrigger->trigger, 1);
6911 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6912 &aecTrigger->trigger_id, 1);
6913 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6914 aecTrigger->trigger);
6915 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6916 aecTrigger->trigger_id);
6917 }
6918
6919 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6920 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6921 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6922 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6923 }
6924
6925 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6926 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6927 if (NAME_NOT_FOUND != val) {
6928 uint8_t fwkAfMode = (uint8_t)val;
6929 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6930 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6931 } else {
6932 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6933 val);
6934 }
6935 }
6936
6937 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6938 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6939 &af_trigger->trigger, 1);
6940 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6941 af_trigger->trigger);
6942 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6943 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6944 af_trigger->trigger_id);
6945 }
6946
6947 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6948 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6949 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6950 if (NAME_NOT_FOUND != val) {
6951 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6952 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6953 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6954 } else {
6955 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6956 }
6957 }
6958
6959 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6960 uint32_t aeMode = CAM_AE_MODE_MAX;
6961 int32_t flashMode = CAM_FLASH_MODE_MAX;
6962 int32_t redeye = -1;
6963 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6964 aeMode = *pAeMode;
6965 }
6966 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6967 flashMode = *pFlashMode;
6968 }
6969 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6970 redeye = *pRedeye;
6971 }
6972
6973 if (1 == redeye) {
6974 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6975 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6976 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6977 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6978 flashMode);
6979 if (NAME_NOT_FOUND != val) {
6980 fwk_aeMode = (uint8_t)val;
6981 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6982 } else {
6983 LOGE("Unsupported flash mode %d", flashMode);
6984 }
6985 } else if (aeMode == CAM_AE_MODE_ON) {
6986 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6987 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6988 } else if (aeMode == CAM_AE_MODE_OFF) {
6989 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6990 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6991 } else {
6992 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6993 "flashMode:%d, aeMode:%u!!!",
6994 redeye, flashMode, aeMode);
6995 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006996 if (mInstantAEC) {
6997 // Increment frame Idx count untill a bound reached for instant AEC.
6998 mInstantAecFrameIdxCount++;
6999 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7000 CAM_INTF_META_AEC_INFO, metadata) {
7001 LOGH("ae_params->settled = %d",ae_params->settled);
7002 // If AEC settled, or if number of frames reached bound value,
7003 // should reset instant AEC.
7004 if (ae_params->settled ||
7005 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7006 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7007 mInstantAEC = false;
7008 mResetInstantAEC = true;
7009 mInstantAecFrameIdxCount = 0;
7010 }
7011 }
7012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007013 resultMetadata = camMetadata.release();
7014 return resultMetadata;
7015}
7016
7017/*===========================================================================
7018 * FUNCTION : dumpMetadataToFile
7019 *
7020 * DESCRIPTION: Dumps tuning metadata to file system
7021 *
7022 * PARAMETERS :
7023 * @meta : tuning metadata
7024 * @dumpFrameCount : current dump frame count
7025 * @enabled : Enable mask
7026 *
7027 *==========================================================================*/
7028void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7029 uint32_t &dumpFrameCount,
7030 bool enabled,
7031 const char *type,
7032 uint32_t frameNumber)
7033{
7034 //Some sanity checks
7035 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7036 LOGE("Tuning sensor data size bigger than expected %d: %d",
7037 meta.tuning_sensor_data_size,
7038 TUNING_SENSOR_DATA_MAX);
7039 return;
7040 }
7041
7042 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7043 LOGE("Tuning VFE data size bigger than expected %d: %d",
7044 meta.tuning_vfe_data_size,
7045 TUNING_VFE_DATA_MAX);
7046 return;
7047 }
7048
7049 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7050 LOGE("Tuning CPP data size bigger than expected %d: %d",
7051 meta.tuning_cpp_data_size,
7052 TUNING_CPP_DATA_MAX);
7053 return;
7054 }
7055
7056 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7057 LOGE("Tuning CAC data size bigger than expected %d: %d",
7058 meta.tuning_cac_data_size,
7059 TUNING_CAC_DATA_MAX);
7060 return;
7061 }
7062 //
7063
7064 if(enabled){
7065 char timeBuf[FILENAME_MAX];
7066 char buf[FILENAME_MAX];
7067 memset(buf, 0, sizeof(buf));
7068 memset(timeBuf, 0, sizeof(timeBuf));
7069 time_t current_time;
7070 struct tm * timeinfo;
7071 time (&current_time);
7072 timeinfo = localtime (&current_time);
7073 if (timeinfo != NULL) {
7074 strftime (timeBuf, sizeof(timeBuf),
7075 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7076 }
7077 String8 filePath(timeBuf);
7078 snprintf(buf,
7079 sizeof(buf),
7080 "%dm_%s_%d.bin",
7081 dumpFrameCount,
7082 type,
7083 frameNumber);
7084 filePath.append(buf);
7085 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7086 if (file_fd >= 0) {
7087 ssize_t written_len = 0;
7088 meta.tuning_data_version = TUNING_DATA_VERSION;
7089 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7090 written_len += write(file_fd, data, sizeof(uint32_t));
7091 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7092 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7093 written_len += write(file_fd, data, sizeof(uint32_t));
7094 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7095 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7096 written_len += write(file_fd, data, sizeof(uint32_t));
7097 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7098 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7099 written_len += write(file_fd, data, sizeof(uint32_t));
7100 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7101 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7102 written_len += write(file_fd, data, sizeof(uint32_t));
7103 meta.tuning_mod3_data_size = 0;
7104 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7105 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7106 written_len += write(file_fd, data, sizeof(uint32_t));
7107 size_t total_size = meta.tuning_sensor_data_size;
7108 data = (void *)((uint8_t *)&meta.data);
7109 written_len += write(file_fd, data, total_size);
7110 total_size = meta.tuning_vfe_data_size;
7111 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7112 written_len += write(file_fd, data, total_size);
7113 total_size = meta.tuning_cpp_data_size;
7114 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7115 written_len += write(file_fd, data, total_size);
7116 total_size = meta.tuning_cac_data_size;
7117 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7118 written_len += write(file_fd, data, total_size);
7119 close(file_fd);
7120 }else {
7121 LOGE("fail to open file for metadata dumping");
7122 }
7123 }
7124}
7125
7126/*===========================================================================
7127 * FUNCTION : cleanAndSortStreamInfo
7128 *
7129 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7130 * and sort them such that raw stream is at the end of the list
7131 * This is a workaround for camera daemon constraint.
7132 *
7133 * PARAMETERS : None
7134 *
7135 *==========================================================================*/
7136void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7137{
7138 List<stream_info_t *> newStreamInfo;
7139
7140 /*clean up invalid streams*/
7141 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7142 it != mStreamInfo.end();) {
7143 if(((*it)->status) == INVALID){
7144 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7145 delete channel;
7146 free(*it);
7147 it = mStreamInfo.erase(it);
7148 } else {
7149 it++;
7150 }
7151 }
7152
7153 // Move preview/video/callback/snapshot streams into newList
7154 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7155 it != mStreamInfo.end();) {
7156 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7157 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7158 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7159 newStreamInfo.push_back(*it);
7160 it = mStreamInfo.erase(it);
7161 } else
7162 it++;
7163 }
7164 // Move raw streams into newList
7165 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7166 it != mStreamInfo.end();) {
7167 newStreamInfo.push_back(*it);
7168 it = mStreamInfo.erase(it);
7169 }
7170
7171 mStreamInfo = newStreamInfo;
7172}
7173
7174/*===========================================================================
7175 * FUNCTION : extractJpegMetadata
7176 *
7177 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7178 * JPEG metadata is cached in HAL, and return as part of capture
7179 * result when metadata is returned from camera daemon.
7180 *
7181 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7182 * @request: capture request
7183 *
7184 *==========================================================================*/
7185void QCamera3HardwareInterface::extractJpegMetadata(
7186 CameraMetadata& jpegMetadata,
7187 const camera3_capture_request_t *request)
7188{
7189 CameraMetadata frame_settings;
7190 frame_settings = request->settings;
7191
7192 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7193 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7194 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7195 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7196
7197 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7198 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7199 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7200 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7201
7202 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7203 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7204 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7205 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7206
7207 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7208 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7209 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7210 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7211
7212 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7213 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7214 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7215 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7216
7217 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7218 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7219 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7220 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7221
7222 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7223 int32_t thumbnail_size[2];
7224 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7225 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7226 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7227 int32_t orientation =
7228 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007229 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007230 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7231 int32_t temp;
7232 temp = thumbnail_size[0];
7233 thumbnail_size[0] = thumbnail_size[1];
7234 thumbnail_size[1] = temp;
7235 }
7236 }
7237 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7238 thumbnail_size,
7239 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7240 }
7241
7242}
7243
7244/*===========================================================================
7245 * FUNCTION : convertToRegions
7246 *
7247 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7248 *
7249 * PARAMETERS :
7250 * @rect : cam_rect_t struct to convert
7251 * @region : int32_t destination array
7252 * @weight : if we are converting from cam_area_t, weight is valid
7253 * else weight = -1
7254 *
7255 *==========================================================================*/
7256void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7257 int32_t *region, int weight)
7258{
7259 region[0] = rect.left;
7260 region[1] = rect.top;
7261 region[2] = rect.left + rect.width;
7262 region[3] = rect.top + rect.height;
7263 if (weight > -1) {
7264 region[4] = weight;
7265 }
7266}
7267
7268/*===========================================================================
7269 * FUNCTION : convertFromRegions
7270 *
7271 * DESCRIPTION: helper method to convert from array to cam_rect_t
7272 *
7273 * PARAMETERS :
7274 * @rect : cam_rect_t struct to convert
7275 * @region : int32_t destination array
7276 * @weight : if we are converting from cam_area_t, weight is valid
7277 * else weight = -1
7278 *
7279 *==========================================================================*/
7280void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7281 const camera_metadata_t *settings, uint32_t tag)
7282{
7283 CameraMetadata frame_settings;
7284 frame_settings = settings;
7285 int32_t x_min = frame_settings.find(tag).data.i32[0];
7286 int32_t y_min = frame_settings.find(tag).data.i32[1];
7287 int32_t x_max = frame_settings.find(tag).data.i32[2];
7288 int32_t y_max = frame_settings.find(tag).data.i32[3];
7289 roi.weight = frame_settings.find(tag).data.i32[4];
7290 roi.rect.left = x_min;
7291 roi.rect.top = y_min;
7292 roi.rect.width = x_max - x_min;
7293 roi.rect.height = y_max - y_min;
7294}
7295
7296/*===========================================================================
7297 * FUNCTION : resetIfNeededROI
7298 *
7299 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7300 * crop region
7301 *
7302 * PARAMETERS :
7303 * @roi : cam_area_t struct to resize
7304 * @scalerCropRegion : cam_crop_region_t region to compare against
7305 *
7306 *
7307 *==========================================================================*/
7308bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7309 const cam_crop_region_t* scalerCropRegion)
7310{
7311 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7312 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7313 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7314 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7315
7316 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7317 * without having this check the calculations below to validate if the roi
7318 * is inside scalar crop region will fail resulting in the roi not being
7319 * reset causing algorithm to continue to use stale roi window
7320 */
7321 if (roi->weight == 0) {
7322 return true;
7323 }
7324
7325 if ((roi_x_max < scalerCropRegion->left) ||
7326 // right edge of roi window is left of scalar crop's left edge
7327 (roi_y_max < scalerCropRegion->top) ||
7328 // bottom edge of roi window is above scalar crop's top edge
7329 (roi->rect.left > crop_x_max) ||
7330 // left edge of roi window is beyond(right) of scalar crop's right edge
7331 (roi->rect.top > crop_y_max)){
7332 // top edge of roi windo is above scalar crop's top edge
7333 return false;
7334 }
7335 if (roi->rect.left < scalerCropRegion->left) {
7336 roi->rect.left = scalerCropRegion->left;
7337 }
7338 if (roi->rect.top < scalerCropRegion->top) {
7339 roi->rect.top = scalerCropRegion->top;
7340 }
7341 if (roi_x_max > crop_x_max) {
7342 roi_x_max = crop_x_max;
7343 }
7344 if (roi_y_max > crop_y_max) {
7345 roi_y_max = crop_y_max;
7346 }
7347 roi->rect.width = roi_x_max - roi->rect.left;
7348 roi->rect.height = roi_y_max - roi->rect.top;
7349 return true;
7350}
7351
7352/*===========================================================================
7353 * FUNCTION : convertLandmarks
7354 *
7355 * DESCRIPTION: helper method to extract the landmarks from face detection info
7356 *
7357 * PARAMETERS :
7358 * @landmark_data : input landmark data to be converted
7359 * @landmarks : int32_t destination array
7360 *
7361 *
7362 *==========================================================================*/
7363void QCamera3HardwareInterface::convertLandmarks(
7364 cam_face_landmarks_info_t landmark_data,
7365 int32_t *landmarks)
7366{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007367 if (landmark_data.is_left_eye_valid) {
7368 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7369 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7370 } else {
7371 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7372 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7373 }
7374
7375 if (landmark_data.is_right_eye_valid) {
7376 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7377 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7378 } else {
7379 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7380 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7381 }
7382
7383 if (landmark_data.is_mouth_valid) {
7384 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7385 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7386 } else {
7387 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7388 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7389 }
7390}
7391
7392/*===========================================================================
7393 * FUNCTION : setInvalidLandmarks
7394 *
7395 * DESCRIPTION: helper method to set invalid landmarks
7396 *
7397 * PARAMETERS :
7398 * @landmarks : int32_t destination array
7399 *
7400 *
7401 *==========================================================================*/
7402void QCamera3HardwareInterface::setInvalidLandmarks(
7403 int32_t *landmarks)
7404{
7405 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7406 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7407 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7408 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7409 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7410 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007411}
7412
7413#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007414
7415/*===========================================================================
7416 * FUNCTION : getCapabilities
7417 *
7418 * DESCRIPTION: query camera capability from back-end
7419 *
7420 * PARAMETERS :
7421 * @ops : mm-interface ops structure
7422 * @cam_handle : camera handle for which we need capability
7423 *
7424 * RETURN : ptr type of capability structure
7425 * capability for success
7426 * NULL for failure
7427 *==========================================================================*/
7428cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7429 uint32_t cam_handle)
7430{
7431 int rc = NO_ERROR;
7432 QCamera3HeapMemory *capabilityHeap = NULL;
7433 cam_capability_t *cap_ptr = NULL;
7434
7435 if (ops == NULL) {
7436 LOGE("Invalid arguments");
7437 return NULL;
7438 }
7439
7440 capabilityHeap = new QCamera3HeapMemory(1);
7441 if (capabilityHeap == NULL) {
7442 LOGE("creation of capabilityHeap failed");
7443 return NULL;
7444 }
7445
7446 /* Allocate memory for capability buffer */
7447 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7448 if(rc != OK) {
7449 LOGE("No memory for cappability");
7450 goto allocate_failed;
7451 }
7452
7453 /* Map memory for capability buffer */
7454 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7455
7456 rc = ops->map_buf(cam_handle,
7457 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7458 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7459 if(rc < 0) {
7460 LOGE("failed to map capability buffer");
7461 rc = FAILED_TRANSACTION;
7462 goto map_failed;
7463 }
7464
7465 /* Query Capability */
7466 rc = ops->query_capability(cam_handle);
7467 if(rc < 0) {
7468 LOGE("failed to query capability");
7469 rc = FAILED_TRANSACTION;
7470 goto query_failed;
7471 }
7472
7473 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7474 if (cap_ptr == NULL) {
7475 LOGE("out of memory");
7476 rc = NO_MEMORY;
7477 goto query_failed;
7478 }
7479
7480 memset(cap_ptr, 0, sizeof(cam_capability_t));
7481 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7482
7483 int index;
7484 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7485 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7486 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7487 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7488 }
7489
7490query_failed:
7491 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7492map_failed:
7493 capabilityHeap->deallocate();
7494allocate_failed:
7495 delete capabilityHeap;
7496
7497 if (rc != NO_ERROR) {
7498 return NULL;
7499 } else {
7500 return cap_ptr;
7501 }
7502}
7503
Thierry Strudel3d639192016-09-09 11:52:26 -07007504/*===========================================================================
7505 * FUNCTION : initCapabilities
7506 *
7507 * DESCRIPTION: initialize camera capabilities in static data struct
7508 *
7509 * PARAMETERS :
7510 * @cameraId : camera Id
7511 *
7512 * RETURN : int32_t type of status
7513 * NO_ERROR -- success
7514 * none-zero failure code
7515 *==========================================================================*/
7516int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7517{
7518 int rc = 0;
7519 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007520 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007521
7522 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7523 if (rc) {
7524 LOGE("camera_open failed. rc = %d", rc);
7525 goto open_failed;
7526 }
7527 if (!cameraHandle) {
7528 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7529 goto open_failed;
7530 }
7531
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007532 handle = get_main_camera_handle(cameraHandle->camera_handle);
7533 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7534 if (gCamCapability[cameraId] == NULL) {
7535 rc = FAILED_TRANSACTION;
7536 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007537 }
7538
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007539 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007540 if (is_dual_camera_by_idx(cameraId)) {
7541 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7542 gCamCapability[cameraId]->aux_cam_cap =
7543 getCapabilities(cameraHandle->ops, handle);
7544 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7545 rc = FAILED_TRANSACTION;
7546 free(gCamCapability[cameraId]);
7547 goto failed_op;
7548 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007549
7550 // Copy the main camera capability to main_cam_cap struct
7551 gCamCapability[cameraId]->main_cam_cap =
7552 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7553 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7554 LOGE("out of memory");
7555 rc = NO_MEMORY;
7556 goto failed_op;
7557 }
7558 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7559 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007560 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007561failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007562 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7563 cameraHandle = NULL;
7564open_failed:
7565 return rc;
7566}
7567
7568/*==========================================================================
7569 * FUNCTION : get3Aversion
7570 *
7571 * DESCRIPTION: get the Q3A S/W version
7572 *
7573 * PARAMETERS :
7574 * @sw_version: Reference of Q3A structure which will hold version info upon
7575 * return
7576 *
7577 * RETURN : None
7578 *
7579 *==========================================================================*/
7580void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7581{
7582 if(gCamCapability[mCameraId])
7583 sw_version = gCamCapability[mCameraId]->q3a_version;
7584 else
7585 LOGE("Capability structure NULL!");
7586}
7587
7588
7589/*===========================================================================
7590 * FUNCTION : initParameters
7591 *
7592 * DESCRIPTION: initialize camera parameters
7593 *
7594 * PARAMETERS :
7595 *
7596 * RETURN : int32_t type of status
7597 * NO_ERROR -- success
7598 * none-zero failure code
7599 *==========================================================================*/
7600int QCamera3HardwareInterface::initParameters()
7601{
7602 int rc = 0;
7603
7604 //Allocate Set Param Buffer
7605 mParamHeap = new QCamera3HeapMemory(1);
7606 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7607 if(rc != OK) {
7608 rc = NO_MEMORY;
7609 LOGE("Failed to allocate SETPARM Heap memory");
7610 delete mParamHeap;
7611 mParamHeap = NULL;
7612 return rc;
7613 }
7614
7615 //Map memory for parameters buffer
7616 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7617 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7618 mParamHeap->getFd(0),
7619 sizeof(metadata_buffer_t),
7620 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7621 if(rc < 0) {
7622 LOGE("failed to map SETPARM buffer");
7623 rc = FAILED_TRANSACTION;
7624 mParamHeap->deallocate();
7625 delete mParamHeap;
7626 mParamHeap = NULL;
7627 return rc;
7628 }
7629
7630 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7631
7632 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7633 return rc;
7634}
7635
7636/*===========================================================================
7637 * FUNCTION : deinitParameters
7638 *
7639 * DESCRIPTION: de-initialize camera parameters
7640 *
7641 * PARAMETERS :
7642 *
7643 * RETURN : NONE
7644 *==========================================================================*/
7645void QCamera3HardwareInterface::deinitParameters()
7646{
7647 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7648 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7649
7650 mParamHeap->deallocate();
7651 delete mParamHeap;
7652 mParamHeap = NULL;
7653
7654 mParameters = NULL;
7655
7656 free(mPrevParameters);
7657 mPrevParameters = NULL;
7658}
7659
7660/*===========================================================================
7661 * FUNCTION : calcMaxJpegSize
7662 *
7663 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7664 *
7665 * PARAMETERS :
7666 *
7667 * RETURN : max_jpeg_size
7668 *==========================================================================*/
7669size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7670{
7671 size_t max_jpeg_size = 0;
7672 size_t temp_width, temp_height;
7673 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7674 MAX_SIZES_CNT);
7675 for (size_t i = 0; i < count; i++) {
7676 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7677 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7678 if (temp_width * temp_height > max_jpeg_size ) {
7679 max_jpeg_size = temp_width * temp_height;
7680 }
7681 }
7682 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7683 return max_jpeg_size;
7684}
7685
7686/*===========================================================================
7687 * FUNCTION : getMaxRawSize
7688 *
7689 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7690 *
7691 * PARAMETERS :
7692 *
7693 * RETURN : Largest supported Raw Dimension
7694 *==========================================================================*/
7695cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7696{
7697 int max_width = 0;
7698 cam_dimension_t maxRawSize;
7699
7700 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7701 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7702 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7703 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7704 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7705 }
7706 }
7707 return maxRawSize;
7708}
7709
7710
7711/*===========================================================================
7712 * FUNCTION : calcMaxJpegDim
7713 *
7714 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7715 *
7716 * PARAMETERS :
7717 *
7718 * RETURN : max_jpeg_dim
7719 *==========================================================================*/
7720cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7721{
7722 cam_dimension_t max_jpeg_dim;
7723 cam_dimension_t curr_jpeg_dim;
7724 max_jpeg_dim.width = 0;
7725 max_jpeg_dim.height = 0;
7726 curr_jpeg_dim.width = 0;
7727 curr_jpeg_dim.height = 0;
7728 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7729 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7730 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7731 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7732 max_jpeg_dim.width * max_jpeg_dim.height ) {
7733 max_jpeg_dim.width = curr_jpeg_dim.width;
7734 max_jpeg_dim.height = curr_jpeg_dim.height;
7735 }
7736 }
7737 return max_jpeg_dim;
7738}
7739
7740/*===========================================================================
7741 * FUNCTION : addStreamConfig
7742 *
7743 * DESCRIPTION: adds the stream configuration to the array
7744 *
7745 * PARAMETERS :
7746 * @available_stream_configs : pointer to stream configuration array
7747 * @scalar_format : scalar format
7748 * @dim : configuration dimension
7749 * @config_type : input or output configuration type
7750 *
7751 * RETURN : NONE
7752 *==========================================================================*/
7753void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7754 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7755{
7756 available_stream_configs.add(scalar_format);
7757 available_stream_configs.add(dim.width);
7758 available_stream_configs.add(dim.height);
7759 available_stream_configs.add(config_type);
7760}
7761
7762/*===========================================================================
7763 * FUNCTION : suppportBurstCapture
7764 *
7765 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7766 *
7767 * PARAMETERS :
7768 * @cameraId : camera Id
7769 *
7770 * RETURN : true if camera supports BURST_CAPTURE
7771 * false otherwise
7772 *==========================================================================*/
7773bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7774{
7775 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7776 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7777 const int32_t highResWidth = 3264;
7778 const int32_t highResHeight = 2448;
7779
7780 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7781 // Maximum resolution images cannot be captured at >= 10fps
7782 // -> not supporting BURST_CAPTURE
7783 return false;
7784 }
7785
7786 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7787 // Maximum resolution images can be captured at >= 20fps
7788 // --> supporting BURST_CAPTURE
7789 return true;
7790 }
7791
7792 // Find the smallest highRes resolution, or largest resolution if there is none
7793 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7794 MAX_SIZES_CNT);
7795 size_t highRes = 0;
7796 while ((highRes + 1 < totalCnt) &&
7797 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7798 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7799 highResWidth * highResHeight)) {
7800 highRes++;
7801 }
7802 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7803 return true;
7804 } else {
7805 return false;
7806 }
7807}
7808
7809/*===========================================================================
7810 * FUNCTION : initStaticMetadata
7811 *
7812 * DESCRIPTION: initialize the static metadata
7813 *
7814 * PARAMETERS :
7815 * @cameraId : camera Id
7816 *
7817 * RETURN : int32_t type of status
7818 * 0 -- success
7819 * non-zero failure code
7820 *==========================================================================*/
7821int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7822{
7823 int rc = 0;
7824 CameraMetadata staticInfo;
7825 size_t count = 0;
7826 bool limitedDevice = false;
7827 char prop[PROPERTY_VALUE_MAX];
7828 bool supportBurst = false;
7829
7830 supportBurst = supportBurstCapture(cameraId);
7831
7832 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7833 * guaranteed or if min fps of max resolution is less than 20 fps, its
7834 * advertised as limited device*/
7835 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7836 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7837 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7838 !supportBurst;
7839
7840 uint8_t supportedHwLvl = limitedDevice ?
7841 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007842#ifndef USE_HAL_3_3
7843 // LEVEL_3 - This device will support level 3.
7844 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7845#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007846 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007847#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007848
7849 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7850 &supportedHwLvl, 1);
7851
7852 bool facingBack = false;
7853 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7854 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7855 facingBack = true;
7856 }
7857 /*HAL 3 only*/
7858 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7859 &gCamCapability[cameraId]->min_focus_distance, 1);
7860
7861 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7862 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7863
7864 /*should be using focal lengths but sensor doesn't provide that info now*/
7865 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7866 &gCamCapability[cameraId]->focal_length,
7867 1);
7868
7869 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7870 gCamCapability[cameraId]->apertures,
7871 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7872
7873 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7874 gCamCapability[cameraId]->filter_densities,
7875 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7876
7877
7878 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7879 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7880 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7881
7882 int32_t lens_shading_map_size[] = {
7883 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7884 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7885 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7886 lens_shading_map_size,
7887 sizeof(lens_shading_map_size)/sizeof(int32_t));
7888
7889 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7890 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7891
7892 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7893 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7894
7895 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7896 &gCamCapability[cameraId]->max_frame_duration, 1);
7897
7898 camera_metadata_rational baseGainFactor = {
7899 gCamCapability[cameraId]->base_gain_factor.numerator,
7900 gCamCapability[cameraId]->base_gain_factor.denominator};
7901 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7902 &baseGainFactor, 1);
7903
7904 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7905 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7906
7907 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7908 gCamCapability[cameraId]->pixel_array_size.height};
7909 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7910 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7911
7912 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7913 gCamCapability[cameraId]->active_array_size.top,
7914 gCamCapability[cameraId]->active_array_size.width,
7915 gCamCapability[cameraId]->active_array_size.height};
7916 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7917 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7918
7919 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7920 &gCamCapability[cameraId]->white_level, 1);
7921
7922 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7923 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
7924
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007925#ifndef USE_HAL_3_3
7926 bool hasBlackRegions = false;
7927 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7928 LOGW("black_region_count: %d is bounded to %d",
7929 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7930 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7931 }
7932 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7933 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7934 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7935 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7936 }
7937 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7938 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7939 hasBlackRegions = true;
7940 }
7941#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007942 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7943 &gCamCapability[cameraId]->flash_charge_duration, 1);
7944
7945 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7946 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7947
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007948 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7949 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7950 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7951 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007952 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7953 &timestampSource, 1);
7954
7955 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7956 &gCamCapability[cameraId]->histogram_size, 1);
7957
7958 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7959 &gCamCapability[cameraId]->max_histogram_count, 1);
7960
7961 int32_t sharpness_map_size[] = {
7962 gCamCapability[cameraId]->sharpness_map_size.width,
7963 gCamCapability[cameraId]->sharpness_map_size.height};
7964
7965 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7966 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7967
7968 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7969 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7970
7971 int32_t scalar_formats[] = {
7972 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7973 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7974 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7975 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7976 HAL_PIXEL_FORMAT_RAW10,
7977 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7978 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7979 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7980 scalar_formats,
7981 scalar_formats_count);
7982
7983 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7984 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7985 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7986 count, MAX_SIZES_CNT, available_processed_sizes);
7987 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7988 available_processed_sizes, count * 2);
7989
7990 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7991 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7992 makeTable(gCamCapability[cameraId]->raw_dim,
7993 count, MAX_SIZES_CNT, available_raw_sizes);
7994 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7995 available_raw_sizes, count * 2);
7996
7997 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7998 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7999 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8000 count, MAX_SIZES_CNT, available_fps_ranges);
8001 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8002 available_fps_ranges, count * 2);
8003
8004 camera_metadata_rational exposureCompensationStep = {
8005 gCamCapability[cameraId]->exp_compensation_step.numerator,
8006 gCamCapability[cameraId]->exp_compensation_step.denominator};
8007 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8008 &exposureCompensationStep, 1);
8009
8010 Vector<uint8_t> availableVstabModes;
8011 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8012 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008013 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008014 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008015 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008016 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008017 count = IS_TYPE_MAX;
8018 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8019 for (size_t i = 0; i < count; i++) {
8020 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8021 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8022 eisSupported = true;
8023 break;
8024 }
8025 }
8026 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008027 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8028 }
8029 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8030 availableVstabModes.array(), availableVstabModes.size());
8031
8032 /*HAL 1 and HAL 3 common*/
8033 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8034 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8035 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8036 float maxZoom = maxZoomStep/minZoomStep;
8037 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8038 &maxZoom, 1);
8039
8040 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8041 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8042
8043 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8044 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8045 max3aRegions[2] = 0; /* AF not supported */
8046 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8047 max3aRegions, 3);
8048
8049 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8050 memset(prop, 0, sizeof(prop));
8051 property_get("persist.camera.facedetect", prop, "1");
8052 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8053 LOGD("Support face detection mode: %d",
8054 supportedFaceDetectMode);
8055
8056 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008057 /* support mode should be OFF if max number of face is 0 */
8058 if (maxFaces <= 0) {
8059 supportedFaceDetectMode = 0;
8060 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008061 Vector<uint8_t> availableFaceDetectModes;
8062 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8063 if (supportedFaceDetectMode == 1) {
8064 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8065 } else if (supportedFaceDetectMode == 2) {
8066 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8067 } else if (supportedFaceDetectMode == 3) {
8068 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8069 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8070 } else {
8071 maxFaces = 0;
8072 }
8073 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8074 availableFaceDetectModes.array(),
8075 availableFaceDetectModes.size());
8076 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8077 (int32_t *)&maxFaces, 1);
8078
8079 int32_t exposureCompensationRange[] = {
8080 gCamCapability[cameraId]->exposure_compensation_min,
8081 gCamCapability[cameraId]->exposure_compensation_max};
8082 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8083 exposureCompensationRange,
8084 sizeof(exposureCompensationRange)/sizeof(int32_t));
8085
8086 uint8_t lensFacing = (facingBack) ?
8087 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8088 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8089
8090 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8091 available_thumbnail_sizes,
8092 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8093
8094 /*all sizes will be clubbed into this tag*/
8095 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8096 /*android.scaler.availableStreamConfigurations*/
8097 Vector<int32_t> available_stream_configs;
8098 cam_dimension_t active_array_dim;
8099 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8100 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8101 /* Add input/output stream configurations for each scalar formats*/
8102 for (size_t j = 0; j < scalar_formats_count; j++) {
8103 switch (scalar_formats[j]) {
8104 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8105 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8106 case HAL_PIXEL_FORMAT_RAW10:
8107 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8108 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8109 addStreamConfig(available_stream_configs, scalar_formats[j],
8110 gCamCapability[cameraId]->raw_dim[i],
8111 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8112 }
8113 break;
8114 case HAL_PIXEL_FORMAT_BLOB:
8115 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8116 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8117 addStreamConfig(available_stream_configs, scalar_formats[j],
8118 gCamCapability[cameraId]->picture_sizes_tbl[i],
8119 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8120 }
8121 break;
8122 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8123 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8124 default:
8125 cam_dimension_t largest_picture_size;
8126 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8127 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8128 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8129 addStreamConfig(available_stream_configs, scalar_formats[j],
8130 gCamCapability[cameraId]->picture_sizes_tbl[i],
8131 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8132 /* Book keep largest */
8133 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8134 >= largest_picture_size.width &&
8135 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8136 >= largest_picture_size.height)
8137 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8138 }
8139 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8140 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8141 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8142 addStreamConfig(available_stream_configs, scalar_formats[j],
8143 largest_picture_size,
8144 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8145 }
8146 break;
8147 }
8148 }
8149
8150 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8151 available_stream_configs.array(), available_stream_configs.size());
8152 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8153 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8154
8155 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8156 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8157
8158 /* android.scaler.availableMinFrameDurations */
8159 Vector<int64_t> available_min_durations;
8160 for (size_t j = 0; j < scalar_formats_count; j++) {
8161 switch (scalar_formats[j]) {
8162 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8163 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8164 case HAL_PIXEL_FORMAT_RAW10:
8165 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8166 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8167 available_min_durations.add(scalar_formats[j]);
8168 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8169 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8170 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8171 }
8172 break;
8173 default:
8174 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8175 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8176 available_min_durations.add(scalar_formats[j]);
8177 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8178 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8179 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8180 }
8181 break;
8182 }
8183 }
8184 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8185 available_min_durations.array(), available_min_durations.size());
8186
8187 Vector<int32_t> available_hfr_configs;
8188 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8189 int32_t fps = 0;
8190 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8191 case CAM_HFR_MODE_60FPS:
8192 fps = 60;
8193 break;
8194 case CAM_HFR_MODE_90FPS:
8195 fps = 90;
8196 break;
8197 case CAM_HFR_MODE_120FPS:
8198 fps = 120;
8199 break;
8200 case CAM_HFR_MODE_150FPS:
8201 fps = 150;
8202 break;
8203 case CAM_HFR_MODE_180FPS:
8204 fps = 180;
8205 break;
8206 case CAM_HFR_MODE_210FPS:
8207 fps = 210;
8208 break;
8209 case CAM_HFR_MODE_240FPS:
8210 fps = 240;
8211 break;
8212 case CAM_HFR_MODE_480FPS:
8213 fps = 480;
8214 break;
8215 case CAM_HFR_MODE_OFF:
8216 case CAM_HFR_MODE_MAX:
8217 default:
8218 break;
8219 }
8220
8221 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8222 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8223 /* For each HFR frame rate, need to advertise one variable fps range
8224 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8225 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8226 * set by the app. When video recording is started, [120, 120] is
8227 * set. This way sensor configuration does not change when recording
8228 * is started */
8229
8230 /* (width, height, fps_min, fps_max, batch_size_max) */
8231 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8232 j < MAX_SIZES_CNT; j++) {
8233 available_hfr_configs.add(
8234 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8235 available_hfr_configs.add(
8236 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8237 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8238 available_hfr_configs.add(fps);
8239 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8240
8241 /* (width, height, fps_min, fps_max, batch_size_max) */
8242 available_hfr_configs.add(
8243 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8244 available_hfr_configs.add(
8245 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8246 available_hfr_configs.add(fps);
8247 available_hfr_configs.add(fps);
8248 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8249 }
8250 }
8251 }
8252 //Advertise HFR capability only if the property is set
8253 memset(prop, 0, sizeof(prop));
8254 property_get("persist.camera.hal3hfr.enable", prop, "1");
8255 uint8_t hfrEnable = (uint8_t)atoi(prop);
8256
8257 if(hfrEnable && available_hfr_configs.array()) {
8258 staticInfo.update(
8259 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8260 available_hfr_configs.array(), available_hfr_configs.size());
8261 }
8262
8263 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8264 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8265 &max_jpeg_size, 1);
8266
8267 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8268 size_t size = 0;
8269 count = CAM_EFFECT_MODE_MAX;
8270 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8271 for (size_t i = 0; i < count; i++) {
8272 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8273 gCamCapability[cameraId]->supported_effects[i]);
8274 if (NAME_NOT_FOUND != val) {
8275 avail_effects[size] = (uint8_t)val;
8276 size++;
8277 }
8278 }
8279 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8280 avail_effects,
8281 size);
8282
8283 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8284 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8285 size_t supported_scene_modes_cnt = 0;
8286 count = CAM_SCENE_MODE_MAX;
8287 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8288 for (size_t i = 0; i < count; i++) {
8289 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8290 CAM_SCENE_MODE_OFF) {
8291 int val = lookupFwkName(SCENE_MODES_MAP,
8292 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8293 gCamCapability[cameraId]->supported_scene_modes[i]);
8294 if (NAME_NOT_FOUND != val) {
8295 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8296 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8297 supported_scene_modes_cnt++;
8298 }
8299 }
8300 }
8301 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8302 avail_scene_modes,
8303 supported_scene_modes_cnt);
8304
8305 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8306 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8307 supported_scene_modes_cnt,
8308 CAM_SCENE_MODE_MAX,
8309 scene_mode_overrides,
8310 supported_indexes,
8311 cameraId);
8312
8313 if (supported_scene_modes_cnt == 0) {
8314 supported_scene_modes_cnt = 1;
8315 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8316 }
8317
8318 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8319 scene_mode_overrides, supported_scene_modes_cnt * 3);
8320
8321 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8322 ANDROID_CONTROL_MODE_AUTO,
8323 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8324 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8325 available_control_modes,
8326 3);
8327
8328 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8329 size = 0;
8330 count = CAM_ANTIBANDING_MODE_MAX;
8331 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8332 for (size_t i = 0; i < count; i++) {
8333 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8334 gCamCapability[cameraId]->supported_antibandings[i]);
8335 if (NAME_NOT_FOUND != val) {
8336 avail_antibanding_modes[size] = (uint8_t)val;
8337 size++;
8338 }
8339
8340 }
8341 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8342 avail_antibanding_modes,
8343 size);
8344
8345 uint8_t avail_abberation_modes[] = {
8346 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8347 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8348 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8349 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8350 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8351 if (0 == count) {
8352 // If no aberration correction modes are available for a device, this advertise OFF mode
8353 size = 1;
8354 } else {
8355 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8356 // So, advertize all 3 modes if atleast any one mode is supported as per the
8357 // new M requirement
8358 size = 3;
8359 }
8360 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8361 avail_abberation_modes,
8362 size);
8363
8364 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8365 size = 0;
8366 count = CAM_FOCUS_MODE_MAX;
8367 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8368 for (size_t i = 0; i < count; i++) {
8369 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8370 gCamCapability[cameraId]->supported_focus_modes[i]);
8371 if (NAME_NOT_FOUND != val) {
8372 avail_af_modes[size] = (uint8_t)val;
8373 size++;
8374 }
8375 }
8376 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8377 avail_af_modes,
8378 size);
8379
8380 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8381 size = 0;
8382 count = CAM_WB_MODE_MAX;
8383 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8384 for (size_t i = 0; i < count; i++) {
8385 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8386 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8387 gCamCapability[cameraId]->supported_white_balances[i]);
8388 if (NAME_NOT_FOUND != val) {
8389 avail_awb_modes[size] = (uint8_t)val;
8390 size++;
8391 }
8392 }
8393 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8394 avail_awb_modes,
8395 size);
8396
8397 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8398 count = CAM_FLASH_FIRING_LEVEL_MAX;
8399 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8400 count);
8401 for (size_t i = 0; i < count; i++) {
8402 available_flash_levels[i] =
8403 gCamCapability[cameraId]->supported_firing_levels[i];
8404 }
8405 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8406 available_flash_levels, count);
8407
8408 uint8_t flashAvailable;
8409 if (gCamCapability[cameraId]->flash_available)
8410 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8411 else
8412 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8413 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8414 &flashAvailable, 1);
8415
8416 Vector<uint8_t> avail_ae_modes;
8417 count = CAM_AE_MODE_MAX;
8418 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8419 for (size_t i = 0; i < count; i++) {
8420 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8421 }
8422 if (flashAvailable) {
8423 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8424 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8425 }
8426 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8427 avail_ae_modes.array(),
8428 avail_ae_modes.size());
8429
8430 int32_t sensitivity_range[2];
8431 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8432 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8433 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8434 sensitivity_range,
8435 sizeof(sensitivity_range) / sizeof(int32_t));
8436
8437 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8438 &gCamCapability[cameraId]->max_analog_sensitivity,
8439 1);
8440
8441 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8442 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8443 &sensor_orientation,
8444 1);
8445
8446 int32_t max_output_streams[] = {
8447 MAX_STALLING_STREAMS,
8448 MAX_PROCESSED_STREAMS,
8449 MAX_RAW_STREAMS};
8450 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8451 max_output_streams,
8452 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8453
8454 uint8_t avail_leds = 0;
8455 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8456 &avail_leds, 0);
8457
8458 uint8_t focus_dist_calibrated;
8459 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8460 gCamCapability[cameraId]->focus_dist_calibrated);
8461 if (NAME_NOT_FOUND != val) {
8462 focus_dist_calibrated = (uint8_t)val;
8463 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8464 &focus_dist_calibrated, 1);
8465 }
8466
8467 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8468 size = 0;
8469 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8470 MAX_TEST_PATTERN_CNT);
8471 for (size_t i = 0; i < count; i++) {
8472 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8473 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8474 if (NAME_NOT_FOUND != testpatternMode) {
8475 avail_testpattern_modes[size] = testpatternMode;
8476 size++;
8477 }
8478 }
8479 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8480 avail_testpattern_modes,
8481 size);
8482
8483 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8484 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8485 &max_pipeline_depth,
8486 1);
8487
8488 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8489 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8490 &partial_result_count,
8491 1);
8492
8493 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8494 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8495
8496 Vector<uint8_t> available_capabilities;
8497 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8498 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8499 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8500 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8501 if (supportBurst) {
8502 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8503 }
8504 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8505 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8506 if (hfrEnable && available_hfr_configs.array()) {
8507 available_capabilities.add(
8508 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8509 }
8510
8511 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8512 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8513 }
8514 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8515 available_capabilities.array(),
8516 available_capabilities.size());
8517
8518 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8519 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8520 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8521 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8522
8523 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8524 &aeLockAvailable, 1);
8525
8526 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8527 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8528 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8529 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8530
8531 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8532 &awbLockAvailable, 1);
8533
8534 int32_t max_input_streams = 1;
8535 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8536 &max_input_streams,
8537 1);
8538
8539 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8540 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8541 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8542 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8543 HAL_PIXEL_FORMAT_YCbCr_420_888};
8544 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8545 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8546
8547 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8548 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8549 &max_latency,
8550 1);
8551
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008552#ifndef USE_HAL_3_3
8553 int32_t isp_sensitivity_range[2];
8554 isp_sensitivity_range[0] =
8555 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8556 isp_sensitivity_range[1] =
8557 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8558 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8559 isp_sensitivity_range,
8560 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8561#endif
8562
Thierry Strudel3d639192016-09-09 11:52:26 -07008563 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8564 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8565 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8566 available_hot_pixel_modes,
8567 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8568
8569 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8570 ANDROID_SHADING_MODE_FAST,
8571 ANDROID_SHADING_MODE_HIGH_QUALITY};
8572 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8573 available_shading_modes,
8574 3);
8575
8576 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8577 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8578 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8579 available_lens_shading_map_modes,
8580 2);
8581
8582 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8583 ANDROID_EDGE_MODE_FAST,
8584 ANDROID_EDGE_MODE_HIGH_QUALITY,
8585 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8586 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8587 available_edge_modes,
8588 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8589
8590 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8591 ANDROID_NOISE_REDUCTION_MODE_FAST,
8592 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8593 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8594 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8595 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8596 available_noise_red_modes,
8597 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8598
8599 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8600 ANDROID_TONEMAP_MODE_FAST,
8601 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8602 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8603 available_tonemap_modes,
8604 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8605
8606 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8607 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8608 available_hot_pixel_map_modes,
8609 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8610
8611 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8612 gCamCapability[cameraId]->reference_illuminant1);
8613 if (NAME_NOT_FOUND != val) {
8614 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8615 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8616 }
8617
8618 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8619 gCamCapability[cameraId]->reference_illuminant2);
8620 if (NAME_NOT_FOUND != val) {
8621 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8622 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8623 }
8624
8625 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8626 (void *)gCamCapability[cameraId]->forward_matrix1,
8627 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8628
8629 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8630 (void *)gCamCapability[cameraId]->forward_matrix2,
8631 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8632
8633 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8634 (void *)gCamCapability[cameraId]->color_transform1,
8635 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8636
8637 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8638 (void *)gCamCapability[cameraId]->color_transform2,
8639 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8640
8641 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8642 (void *)gCamCapability[cameraId]->calibration_transform1,
8643 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8644
8645 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8646 (void *)gCamCapability[cameraId]->calibration_transform2,
8647 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8648
8649 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8650 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8651 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8652 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8653 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8654 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8655 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8656 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8657 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8658 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8659 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8660 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8661 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8662 ANDROID_JPEG_GPS_COORDINATES,
8663 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8664 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8665 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8666 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8667 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8668 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8669 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8670 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8671 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8672 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008673#ifndef USE_HAL_3_3
8674 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8675#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008676 ANDROID_STATISTICS_FACE_DETECT_MODE,
8677 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8678 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8679 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008680 ANDROID_BLACK_LEVEL_LOCK,
8681 /* DevCamDebug metadata request_keys_basic */
8682 DEVCAMDEBUG_META_ENABLE,
8683 /* DevCamDebug metadata end */
8684 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008685
8686 size_t request_keys_cnt =
8687 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8688 Vector<int32_t> available_request_keys;
8689 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8690 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8691 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8692 }
8693
8694 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8695 available_request_keys.array(), available_request_keys.size());
8696
8697 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8698 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8699 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8700 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8701 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8702 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8703 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8704 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8705 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8706 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8707 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8708 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8709 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8710 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8711 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8712 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8713 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8714 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8715 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8716 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8717 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008718 ANDROID_STATISTICS_FACE_SCORES,
8719#ifndef USE_HAL_3_3
8720 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8721#endif
Samuel Ha68ba5172016-12-15 18:41:12 -08008722 // DevCamDebug metadata result_keys_basic
8723 DEVCAMDEBUG_META_ENABLE,
8724 // DevCamDebug metadata result_keys AF
8725 DEVCAMDEBUG_AF_LENS_POSITION,
8726 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8727 DEVCAMDEBUG_AF_TOF_DISTANCE,
8728 DEVCAMDEBUG_AF_LUMA,
8729 DEVCAMDEBUG_AF_HAF_STATE,
8730 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8731 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8732 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8733 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8734 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8735 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8736 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8737 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8738 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8739 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8740 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8741 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8742 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8743 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8744 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8745 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8746 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8747 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8748 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8749 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8750 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8751 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8752 // DevCamDebug metadata result_keys AEC
8753 DEVCAMDEBUG_AEC_TARGET_LUMA,
8754 DEVCAMDEBUG_AEC_COMP_LUMA,
8755 DEVCAMDEBUG_AEC_AVG_LUMA,
8756 DEVCAMDEBUG_AEC_CUR_LUMA,
8757 DEVCAMDEBUG_AEC_LINECOUNT,
8758 DEVCAMDEBUG_AEC_REAL_GAIN,
8759 DEVCAMDEBUG_AEC_EXP_INDEX,
8760 DEVCAMDEBUG_AEC_LUX_IDX,
8761 // DevCamDebug metadata result_keys AWB
8762 DEVCAMDEBUG_AWB_R_GAIN,
8763 DEVCAMDEBUG_AWB_G_GAIN,
8764 DEVCAMDEBUG_AWB_B_GAIN,
8765 DEVCAMDEBUG_AWB_CCT,
8766 DEVCAMDEBUG_AWB_DECISION,
8767 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008768 };
8769
Thierry Strudel3d639192016-09-09 11:52:26 -07008770 size_t result_keys_cnt =
8771 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8772
8773 Vector<int32_t> available_result_keys;
8774 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8775 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8776 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8777 }
8778 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8779 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8780 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8781 }
8782 if (supportedFaceDetectMode == 1) {
8783 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8784 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8785 } else if ((supportedFaceDetectMode == 2) ||
8786 (supportedFaceDetectMode == 3)) {
8787 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8788 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8789 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008790#ifndef USE_HAL_3_3
8791 if (hasBlackRegions) {
8792 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8793 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8794 }
8795#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008796 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8797 available_result_keys.array(), available_result_keys.size());
8798
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008799 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008800 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8801 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8802 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8803 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8804 ANDROID_SCALER_CROPPING_TYPE,
8805 ANDROID_SYNC_MAX_LATENCY,
8806 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8807 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8808 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8809 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8810 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8811 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8812 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8813 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8814 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8815 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8816 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8817 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8818 ANDROID_LENS_FACING,
8819 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8820 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8821 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8822 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8823 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8824 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8825 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8826 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8827 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8828 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8829 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8830 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8831 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8832 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8833 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8834 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8835 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8836 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8837 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8838 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8839 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8840 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8841 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8842 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8843 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8844 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8845 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8846 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8847 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8848 ANDROID_CONTROL_AVAILABLE_MODES,
8849 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8850 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8851 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8852 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008853 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8854#ifndef USE_HAL_3_3
8855 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8856 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8857#endif
8858 };
8859
8860 Vector<int32_t> available_characteristics_keys;
8861 available_characteristics_keys.appendArray(characteristics_keys_basic,
8862 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8863#ifndef USE_HAL_3_3
8864 if (hasBlackRegions) {
8865 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8866 }
8867#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008868 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008869 available_characteristics_keys.array(),
8870 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008871
8872 /*available stall durations depend on the hw + sw and will be different for different devices */
8873 /*have to add for raw after implementation*/
8874 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8875 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8876
8877 Vector<int64_t> available_stall_durations;
8878 for (uint32_t j = 0; j < stall_formats_count; j++) {
8879 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8880 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8881 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8882 available_stall_durations.add(stall_formats[j]);
8883 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8884 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8885 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8886 }
8887 } else {
8888 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8889 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8890 available_stall_durations.add(stall_formats[j]);
8891 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8892 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8893 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8894 }
8895 }
8896 }
8897 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8898 available_stall_durations.array(),
8899 available_stall_durations.size());
8900
8901 //QCAMERA3_OPAQUE_RAW
8902 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8903 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8904 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8905 case LEGACY_RAW:
8906 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8907 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8908 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8909 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8910 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8911 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8912 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8913 break;
8914 case MIPI_RAW:
8915 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8916 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8917 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8918 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8919 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8920 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8921 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8922 break;
8923 default:
8924 LOGE("unknown opaque_raw_format %d",
8925 gCamCapability[cameraId]->opaque_raw_fmt);
8926 break;
8927 }
8928 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8929
8930 Vector<int32_t> strides;
8931 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8932 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8933 cam_stream_buf_plane_info_t buf_planes;
8934 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8935 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8936 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8937 &gCamCapability[cameraId]->padding_info, &buf_planes);
8938 strides.add(buf_planes.plane_info.mp[0].stride);
8939 }
8940 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8941 strides.size());
8942
Thierry Strudel04e026f2016-10-10 11:27:36 -07008943 //Video HDR default
8944 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8945 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8946 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8947 int32_t vhdr_mode[] = {
8948 QCAMERA3_VIDEO_HDR_MODE_OFF,
8949 QCAMERA3_VIDEO_HDR_MODE_ON};
8950
8951 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8952 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8953 vhdr_mode, vhdr_mode_count);
8954 }
8955
Thierry Strudel3d639192016-09-09 11:52:26 -07008956 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8957 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8958 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8959
8960 uint8_t isMonoOnly =
8961 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8962 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8963 &isMonoOnly, 1);
8964
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008965#ifndef USE_HAL_3_3
8966 Vector<int32_t> opaque_size;
8967 for (size_t j = 0; j < scalar_formats_count; j++) {
8968 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8969 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8970 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8971 cam_stream_buf_plane_info_t buf_planes;
8972
8973 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8974 &gCamCapability[cameraId]->padding_info, &buf_planes);
8975
8976 if (rc == 0) {
8977 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8978 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8979 opaque_size.add(buf_planes.plane_info.frame_len);
8980 }else {
8981 LOGE("raw frame calculation failed!");
8982 }
8983 }
8984 }
8985 }
8986
8987 if ((opaque_size.size() > 0) &&
8988 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8989 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8990 else
8991 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8992#endif
8993
Thierry Strudel04e026f2016-10-10 11:27:36 -07008994 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
8995 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
8996 size = 0;
8997 count = CAM_IR_MODE_MAX;
8998 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
8999 for (size_t i = 0; i < count; i++) {
9000 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9001 gCamCapability[cameraId]->supported_ir_modes[i]);
9002 if (NAME_NOT_FOUND != val) {
9003 avail_ir_modes[size] = (int32_t)val;
9004 size++;
9005 }
9006 }
9007 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9008 avail_ir_modes, size);
9009 }
9010
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009011 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9012 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9013 size = 0;
9014 count = CAM_AEC_CONVERGENCE_MAX;
9015 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9016 for (size_t i = 0; i < count; i++) {
9017 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9018 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9019 if (NAME_NOT_FOUND != val) {
9020 available_instant_aec_modes[size] = (int32_t)val;
9021 size++;
9022 }
9023 }
9024 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9025 available_instant_aec_modes, size);
9026 }
9027
Thierry Strudel3d639192016-09-09 11:52:26 -07009028 gStaticMetadata[cameraId] = staticInfo.release();
9029 return rc;
9030}
9031
9032/*===========================================================================
9033 * FUNCTION : makeTable
9034 *
9035 * DESCRIPTION: make a table of sizes
9036 *
9037 * PARAMETERS :
9038 *
9039 *
9040 *==========================================================================*/
9041void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9042 size_t max_size, int32_t *sizeTable)
9043{
9044 size_t j = 0;
9045 if (size > max_size) {
9046 size = max_size;
9047 }
9048 for (size_t i = 0; i < size; i++) {
9049 sizeTable[j] = dimTable[i].width;
9050 sizeTable[j+1] = dimTable[i].height;
9051 j+=2;
9052 }
9053}
9054
9055/*===========================================================================
9056 * FUNCTION : makeFPSTable
9057 *
9058 * DESCRIPTION: make a table of fps ranges
9059 *
9060 * PARAMETERS :
9061 *
9062 *==========================================================================*/
9063void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9064 size_t max_size, int32_t *fpsRangesTable)
9065{
9066 size_t j = 0;
9067 if (size > max_size) {
9068 size = max_size;
9069 }
9070 for (size_t i = 0; i < size; i++) {
9071 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9072 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9073 j+=2;
9074 }
9075}
9076
9077/*===========================================================================
9078 * FUNCTION : makeOverridesList
9079 *
9080 * DESCRIPTION: make a list of scene mode overrides
9081 *
9082 * PARAMETERS :
9083 *
9084 *
9085 *==========================================================================*/
9086void QCamera3HardwareInterface::makeOverridesList(
9087 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9088 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9089{
9090 /*daemon will give a list of overrides for all scene modes.
9091 However we should send the fwk only the overrides for the scene modes
9092 supported by the framework*/
9093 size_t j = 0;
9094 if (size > max_size) {
9095 size = max_size;
9096 }
9097 size_t focus_count = CAM_FOCUS_MODE_MAX;
9098 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9099 focus_count);
9100 for (size_t i = 0; i < size; i++) {
9101 bool supt = false;
9102 size_t index = supported_indexes[i];
9103 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9104 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9105 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9106 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9107 overridesTable[index].awb_mode);
9108 if (NAME_NOT_FOUND != val) {
9109 overridesList[j+1] = (uint8_t)val;
9110 }
9111 uint8_t focus_override = overridesTable[index].af_mode;
9112 for (size_t k = 0; k < focus_count; k++) {
9113 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9114 supt = true;
9115 break;
9116 }
9117 }
9118 if (supt) {
9119 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9120 focus_override);
9121 if (NAME_NOT_FOUND != val) {
9122 overridesList[j+2] = (uint8_t)val;
9123 }
9124 } else {
9125 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9126 }
9127 j+=3;
9128 }
9129}
9130
9131/*===========================================================================
9132 * FUNCTION : filterJpegSizes
9133 *
9134 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9135 * could be downscaled to
9136 *
9137 * PARAMETERS :
9138 *
9139 * RETURN : length of jpegSizes array
9140 *==========================================================================*/
9141
9142size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9143 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9144 uint8_t downscale_factor)
9145{
9146 if (0 == downscale_factor) {
9147 downscale_factor = 1;
9148 }
9149
9150 int32_t min_width = active_array_size.width / downscale_factor;
9151 int32_t min_height = active_array_size.height / downscale_factor;
9152 size_t jpegSizesCnt = 0;
9153 if (processedSizesCnt > maxCount) {
9154 processedSizesCnt = maxCount;
9155 }
9156 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9157 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9158 jpegSizes[jpegSizesCnt] = processedSizes[i];
9159 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9160 jpegSizesCnt += 2;
9161 }
9162 }
9163 return jpegSizesCnt;
9164}
9165
9166/*===========================================================================
9167 * FUNCTION : computeNoiseModelEntryS
9168 *
9169 * DESCRIPTION: function to map a given sensitivity to the S noise
9170 * model parameters in the DNG noise model.
9171 *
9172 * PARAMETERS : sens : the sensor sensitivity
9173 *
9174 ** RETURN : S (sensor amplification) noise
9175 *
9176 *==========================================================================*/
9177double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9178 double s = gCamCapability[mCameraId]->gradient_S * sens +
9179 gCamCapability[mCameraId]->offset_S;
9180 return ((s < 0.0) ? 0.0 : s);
9181}
9182
9183/*===========================================================================
9184 * FUNCTION : computeNoiseModelEntryO
9185 *
9186 * DESCRIPTION: function to map a given sensitivity to the O noise
9187 * model parameters in the DNG noise model.
9188 *
9189 * PARAMETERS : sens : the sensor sensitivity
9190 *
9191 ** RETURN : O (sensor readout) noise
9192 *
9193 *==========================================================================*/
9194double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9195 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9196 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9197 1.0 : (1.0 * sens / max_analog_sens);
9198 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9199 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9200 return ((o < 0.0) ? 0.0 : o);
9201}
9202
9203/*===========================================================================
9204 * FUNCTION : getSensorSensitivity
9205 *
9206 * DESCRIPTION: convert iso_mode to an integer value
9207 *
9208 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9209 *
9210 ** RETURN : sensitivity supported by sensor
9211 *
9212 *==========================================================================*/
9213int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9214{
9215 int32_t sensitivity;
9216
9217 switch (iso_mode) {
9218 case CAM_ISO_MODE_100:
9219 sensitivity = 100;
9220 break;
9221 case CAM_ISO_MODE_200:
9222 sensitivity = 200;
9223 break;
9224 case CAM_ISO_MODE_400:
9225 sensitivity = 400;
9226 break;
9227 case CAM_ISO_MODE_800:
9228 sensitivity = 800;
9229 break;
9230 case CAM_ISO_MODE_1600:
9231 sensitivity = 1600;
9232 break;
9233 default:
9234 sensitivity = -1;
9235 break;
9236 }
9237 return sensitivity;
9238}
9239
9240/*===========================================================================
9241 * FUNCTION : getCamInfo
9242 *
9243 * DESCRIPTION: query camera capabilities
9244 *
9245 * PARAMETERS :
9246 * @cameraId : camera Id
9247 * @info : camera info struct to be filled in with camera capabilities
9248 *
9249 * RETURN : int type of status
9250 * NO_ERROR -- success
9251 * none-zero failure code
9252 *==========================================================================*/
9253int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9254 struct camera_info *info)
9255{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009256 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009257 int rc = 0;
9258
9259 pthread_mutex_lock(&gCamLock);
9260 if (NULL == gCamCapability[cameraId]) {
9261 rc = initCapabilities(cameraId);
9262 if (rc < 0) {
9263 pthread_mutex_unlock(&gCamLock);
9264 return rc;
9265 }
9266 }
9267
9268 if (NULL == gStaticMetadata[cameraId]) {
9269 rc = initStaticMetadata(cameraId);
9270 if (rc < 0) {
9271 pthread_mutex_unlock(&gCamLock);
9272 return rc;
9273 }
9274 }
9275
9276 switch(gCamCapability[cameraId]->position) {
9277 case CAM_POSITION_BACK:
9278 case CAM_POSITION_BACK_AUX:
9279 info->facing = CAMERA_FACING_BACK;
9280 break;
9281
9282 case CAM_POSITION_FRONT:
9283 case CAM_POSITION_FRONT_AUX:
9284 info->facing = CAMERA_FACING_FRONT;
9285 break;
9286
9287 default:
9288 LOGE("Unknown position type %d for camera id:%d",
9289 gCamCapability[cameraId]->position, cameraId);
9290 rc = -1;
9291 break;
9292 }
9293
9294
9295 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009296#ifndef USE_HAL_3_3
9297 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9298#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009299 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009300#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009301 info->static_camera_characteristics = gStaticMetadata[cameraId];
9302
9303 //For now assume both cameras can operate independently.
9304 info->conflicting_devices = NULL;
9305 info->conflicting_devices_length = 0;
9306
9307 //resource cost is 100 * MIN(1.0, m/M),
9308 //where m is throughput requirement with maximum stream configuration
9309 //and M is CPP maximum throughput.
9310 float max_fps = 0.0;
9311 for (uint32_t i = 0;
9312 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9313 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9314 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9315 }
9316 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9317 gCamCapability[cameraId]->active_array_size.width *
9318 gCamCapability[cameraId]->active_array_size.height * max_fps /
9319 gCamCapability[cameraId]->max_pixel_bandwidth;
9320 info->resource_cost = 100 * MIN(1.0, ratio);
9321 LOGI("camera %d resource cost is %d", cameraId,
9322 info->resource_cost);
9323
9324 pthread_mutex_unlock(&gCamLock);
9325 return rc;
9326}
9327
9328/*===========================================================================
9329 * FUNCTION : translateCapabilityToMetadata
9330 *
9331 * DESCRIPTION: translate the capability into camera_metadata_t
9332 *
9333 * PARAMETERS : type of the request
9334 *
9335 *
9336 * RETURN : success: camera_metadata_t*
9337 * failure: NULL
9338 *
9339 *==========================================================================*/
9340camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9341{
9342 if (mDefaultMetadata[type] != NULL) {
9343 return mDefaultMetadata[type];
9344 }
9345 //first time we are handling this request
9346 //fill up the metadata structure using the wrapper class
9347 CameraMetadata settings;
9348 //translate from cam_capability_t to camera_metadata_tag_t
9349 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9350 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9351 int32_t defaultRequestID = 0;
9352 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9353
9354 /* OIS disable */
9355 char ois_prop[PROPERTY_VALUE_MAX];
9356 memset(ois_prop, 0, sizeof(ois_prop));
9357 property_get("persist.camera.ois.disable", ois_prop, "0");
9358 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9359
9360 /* Force video to use OIS */
9361 char videoOisProp[PROPERTY_VALUE_MAX];
9362 memset(videoOisProp, 0, sizeof(videoOisProp));
9363 property_get("persist.camera.ois.video", videoOisProp, "1");
9364 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009365
9366 // Hybrid AE enable/disable
9367 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9368 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9369 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9370 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9371
Thierry Strudel3d639192016-09-09 11:52:26 -07009372 uint8_t controlIntent = 0;
9373 uint8_t focusMode;
9374 uint8_t vsMode;
9375 uint8_t optStabMode;
9376 uint8_t cacMode;
9377 uint8_t edge_mode;
9378 uint8_t noise_red_mode;
9379 uint8_t tonemap_mode;
9380 bool highQualityModeEntryAvailable = FALSE;
9381 bool fastModeEntryAvailable = FALSE;
9382 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9383 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9384 switch (type) {
9385 case CAMERA3_TEMPLATE_PREVIEW:
9386 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9387 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9388 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9389 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9390 edge_mode = ANDROID_EDGE_MODE_FAST;
9391 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9392 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9393 break;
9394 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9395 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9396 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9397 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9398 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9399 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9400 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9401 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9402 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9403 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9404 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9405 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9406 highQualityModeEntryAvailable = TRUE;
9407 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9408 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9409 fastModeEntryAvailable = TRUE;
9410 }
9411 }
9412 if (highQualityModeEntryAvailable) {
9413 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9414 } else if (fastModeEntryAvailable) {
9415 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9416 }
9417 break;
9418 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9419 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9420 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9421 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009422 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9423 edge_mode = ANDROID_EDGE_MODE_FAST;
9424 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9425 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9426 if (forceVideoOis)
9427 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9428 break;
9429 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9430 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9431 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9432 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009433 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9434 edge_mode = ANDROID_EDGE_MODE_FAST;
9435 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9436 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9437 if (forceVideoOis)
9438 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9439 break;
9440 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9441 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9442 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9443 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9444 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9445 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9446 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9447 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9448 break;
9449 case CAMERA3_TEMPLATE_MANUAL:
9450 edge_mode = ANDROID_EDGE_MODE_FAST;
9451 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9452 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9453 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9454 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9455 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9456 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9457 break;
9458 default:
9459 edge_mode = ANDROID_EDGE_MODE_FAST;
9460 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9461 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9462 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9463 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9464 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9465 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9466 break;
9467 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009468 // Set CAC to OFF if underlying device doesn't support
9469 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9470 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009472 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9473 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9474 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9475 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9476 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9477 }
9478 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9479
9480 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9481 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9482 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9483 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9484 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9485 || ois_disable)
9486 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9487 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9488
9489 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9490 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9491
9492 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9493 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9494
9495 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9496 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9497
9498 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9499 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9500
9501 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9502 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9503
9504 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9505 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9506
9507 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9508 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9509
9510 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9511 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9512
9513 /*flash*/
9514 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9515 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9516
9517 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9518 settings.update(ANDROID_FLASH_FIRING_POWER,
9519 &flashFiringLevel, 1);
9520
9521 /* lens */
9522 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9523 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9524
9525 if (gCamCapability[mCameraId]->filter_densities_count) {
9526 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9527 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9528 gCamCapability[mCameraId]->filter_densities_count);
9529 }
9530
9531 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9532 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9533
9534 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9535 float default_focus_distance = 0;
9536 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9537 }
9538
9539 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9540 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9541
9542 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9543 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9544
9545 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9546 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9547
9548 /* face detection (default to OFF) */
9549 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9550 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9551
9552 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9553 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9554
9555 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9556 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9557
9558 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9559 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9560
9561 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9562 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9563
9564 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9565 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9566
9567 /* Exposure time(Update the Min Exposure Time)*/
9568 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9569 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9570
9571 /* frame duration */
9572 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9573 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9574
9575 /* sensitivity */
9576 static const int32_t default_sensitivity = 100;
9577 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009578#ifndef USE_HAL_3_3
9579 static const int32_t default_isp_sensitivity =
9580 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9581 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9582#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009583
9584 /*edge mode*/
9585 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9586
9587 /*noise reduction mode*/
9588 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9589
9590 /*color correction mode*/
9591 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9592 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9593
9594 /*transform matrix mode*/
9595 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9596
9597 int32_t scaler_crop_region[4];
9598 scaler_crop_region[0] = 0;
9599 scaler_crop_region[1] = 0;
9600 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9601 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9602 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9603
9604 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9605 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9606
9607 /*focus distance*/
9608 float focus_distance = 0.0;
9609 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9610
9611 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009612 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009613 float max_range = 0.0;
9614 float max_fixed_fps = 0.0;
9615 int32_t fps_range[2] = {0, 0};
9616 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9617 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009618 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9619 TEMPLATE_MAX_PREVIEW_FPS) {
9620 continue;
9621 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009622 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9623 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9624 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9625 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9626 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9627 if (range > max_range) {
9628 fps_range[0] =
9629 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9630 fps_range[1] =
9631 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9632 max_range = range;
9633 }
9634 } else {
9635 if (range < 0.01 && max_fixed_fps <
9636 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9637 fps_range[0] =
9638 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9639 fps_range[1] =
9640 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9641 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9642 }
9643 }
9644 }
9645 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9646
9647 /*precapture trigger*/
9648 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9649 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9650
9651 /*af trigger*/
9652 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9653 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9654
9655 /* ae & af regions */
9656 int32_t active_region[] = {
9657 gCamCapability[mCameraId]->active_array_size.left,
9658 gCamCapability[mCameraId]->active_array_size.top,
9659 gCamCapability[mCameraId]->active_array_size.left +
9660 gCamCapability[mCameraId]->active_array_size.width,
9661 gCamCapability[mCameraId]->active_array_size.top +
9662 gCamCapability[mCameraId]->active_array_size.height,
9663 0};
9664 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9665 sizeof(active_region) / sizeof(active_region[0]));
9666 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9667 sizeof(active_region) / sizeof(active_region[0]));
9668
9669 /* black level lock */
9670 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9671 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9672
9673 /* lens shading map mode */
9674 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9675 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9676 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9677 }
9678 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9679
9680 //special defaults for manual template
9681 if (type == CAMERA3_TEMPLATE_MANUAL) {
9682 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9683 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9684
9685 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9686 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9687
9688 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9689 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9690
9691 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9692 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9693
9694 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9695 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9696
9697 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9698 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9699 }
9700
9701
9702 /* TNR
9703 * We'll use this location to determine which modes TNR will be set.
9704 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9705 * This is not to be confused with linking on a per stream basis that decision
9706 * is still on per-session basis and will be handled as part of config stream
9707 */
9708 uint8_t tnr_enable = 0;
9709
9710 if (m_bTnrPreview || m_bTnrVideo) {
9711
9712 switch (type) {
9713 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9714 tnr_enable = 1;
9715 break;
9716
9717 default:
9718 tnr_enable = 0;
9719 break;
9720 }
9721
9722 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9723 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9724 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9725
9726 LOGD("TNR:%d with process plate %d for template:%d",
9727 tnr_enable, tnr_process_type, type);
9728 }
9729
9730 //Update Link tags to default
9731 int32_t sync_type = CAM_TYPE_STANDALONE;
9732 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9733
9734 int32_t is_main = 0; //this doesn't matter as app should overwrite
9735 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9736
9737 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9738
9739 /* CDS default */
9740 char prop[PROPERTY_VALUE_MAX];
9741 memset(prop, 0, sizeof(prop));
9742 property_get("persist.camera.CDS", prop, "Auto");
9743 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9744 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9745 if (CAM_CDS_MODE_MAX == cds_mode) {
9746 cds_mode = CAM_CDS_MODE_AUTO;
9747 }
9748
9749 /* Disabling CDS in templates which have TNR enabled*/
9750 if (tnr_enable)
9751 cds_mode = CAM_CDS_MODE_OFF;
9752
9753 int32_t mode = cds_mode;
9754 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009755
9756 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9757 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9758
9759 /* IR Mode Default Off */
9760 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9761 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9762
Thierry Strudel269c81a2016-10-12 12:13:59 -07009763 /* Manual Convergence AEC Speed is disabled by default*/
9764 float default_aec_speed = 0;
9765 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9766
9767 /* Manual Convergence AWB Speed is disabled by default*/
9768 float default_awb_speed = 0;
9769 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9770
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009771 // Set instant AEC to normal convergence by default
9772 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9773 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9774
Shuzhen Wang19463d72016-03-08 11:09:52 -08009775 /* hybrid ae */
9776 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9777
Thierry Strudel3d639192016-09-09 11:52:26 -07009778 mDefaultMetadata[type] = settings.release();
9779
9780 return mDefaultMetadata[type];
9781}
9782
9783/*===========================================================================
9784 * FUNCTION : setFrameParameters
9785 *
9786 * DESCRIPTION: set parameters per frame as requested in the metadata from
9787 * framework
9788 *
9789 * PARAMETERS :
9790 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009791 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009792 * @blob_request: Whether this request is a blob request or not
9793 *
9794 * RETURN : success: NO_ERROR
9795 * failure:
9796 *==========================================================================*/
9797int QCamera3HardwareInterface::setFrameParameters(
9798 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009799 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009800 int blob_request,
9801 uint32_t snapshotStreamId)
9802{
9803 /*translate from camera_metadata_t type to parm_type_t*/
9804 int rc = 0;
9805 int32_t hal_version = CAM_HAL_V3;
9806
9807 clear_metadata_buffer(mParameters);
9808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9809 LOGE("Failed to set hal version in the parameters");
9810 return BAD_VALUE;
9811 }
9812
9813 /*we need to update the frame number in the parameters*/
9814 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9815 request->frame_number)) {
9816 LOGE("Failed to set the frame number in the parameters");
9817 return BAD_VALUE;
9818 }
9819
9820 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009822 LOGE("Failed to set stream type mask in the parameters");
9823 return BAD_VALUE;
9824 }
9825
9826 if (mUpdateDebugLevel) {
9827 uint32_t dummyDebugLevel = 0;
9828 /* The value of dummyDebugLevel is irrelavent. On
9829 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9831 dummyDebugLevel)) {
9832 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9833 return BAD_VALUE;
9834 }
9835 mUpdateDebugLevel = false;
9836 }
9837
9838 if(request->settings != NULL){
9839 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9840 if (blob_request)
9841 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9842 }
9843
9844 return rc;
9845}
9846
9847/*===========================================================================
9848 * FUNCTION : setReprocParameters
9849 *
9850 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9851 * return it.
9852 *
9853 * PARAMETERS :
9854 * @request : request that needs to be serviced
9855 *
9856 * RETURN : success: NO_ERROR
9857 * failure:
9858 *==========================================================================*/
9859int32_t QCamera3HardwareInterface::setReprocParameters(
9860 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9861 uint32_t snapshotStreamId)
9862{
9863 /*translate from camera_metadata_t type to parm_type_t*/
9864 int rc = 0;
9865
9866 if (NULL == request->settings){
9867 LOGE("Reprocess settings cannot be NULL");
9868 return BAD_VALUE;
9869 }
9870
9871 if (NULL == reprocParam) {
9872 LOGE("Invalid reprocessing metadata buffer");
9873 return BAD_VALUE;
9874 }
9875 clear_metadata_buffer(reprocParam);
9876
9877 /*we need to update the frame number in the parameters*/
9878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9879 request->frame_number)) {
9880 LOGE("Failed to set the frame number in the parameters");
9881 return BAD_VALUE;
9882 }
9883
9884 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9885 if (rc < 0) {
9886 LOGE("Failed to translate reproc request");
9887 return rc;
9888 }
9889
9890 CameraMetadata frame_settings;
9891 frame_settings = request->settings;
9892 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9893 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9894 int32_t *crop_count =
9895 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9896 int32_t *crop_data =
9897 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9898 int32_t *roi_map =
9899 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9900 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9901 cam_crop_data_t crop_meta;
9902 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9903 crop_meta.num_of_streams = 1;
9904 crop_meta.crop_info[0].crop.left = crop_data[0];
9905 crop_meta.crop_info[0].crop.top = crop_data[1];
9906 crop_meta.crop_info[0].crop.width = crop_data[2];
9907 crop_meta.crop_info[0].crop.height = crop_data[3];
9908
9909 crop_meta.crop_info[0].roi_map.left =
9910 roi_map[0];
9911 crop_meta.crop_info[0].roi_map.top =
9912 roi_map[1];
9913 crop_meta.crop_info[0].roi_map.width =
9914 roi_map[2];
9915 crop_meta.crop_info[0].roi_map.height =
9916 roi_map[3];
9917
9918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9919 rc = BAD_VALUE;
9920 }
9921 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9922 request->input_buffer->stream,
9923 crop_meta.crop_info[0].crop.left,
9924 crop_meta.crop_info[0].crop.top,
9925 crop_meta.crop_info[0].crop.width,
9926 crop_meta.crop_info[0].crop.height);
9927 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9928 request->input_buffer->stream,
9929 crop_meta.crop_info[0].roi_map.left,
9930 crop_meta.crop_info[0].roi_map.top,
9931 crop_meta.crop_info[0].roi_map.width,
9932 crop_meta.crop_info[0].roi_map.height);
9933 } else {
9934 LOGE("Invalid reprocess crop count %d!", *crop_count);
9935 }
9936 } else {
9937 LOGE("No crop data from matching output stream");
9938 }
9939
9940 /* These settings are not needed for regular requests so handle them specially for
9941 reprocess requests; information needed for EXIF tags */
9942 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9943 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9944 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9945 if (NAME_NOT_FOUND != val) {
9946 uint32_t flashMode = (uint32_t)val;
9947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9948 rc = BAD_VALUE;
9949 }
9950 } else {
9951 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9952 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9953 }
9954 } else {
9955 LOGH("No flash mode in reprocess settings");
9956 }
9957
9958 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9959 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9961 rc = BAD_VALUE;
9962 }
9963 } else {
9964 LOGH("No flash state in reprocess settings");
9965 }
9966
9967 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9968 uint8_t *reprocessFlags =
9969 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9970 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9971 *reprocessFlags)) {
9972 rc = BAD_VALUE;
9973 }
9974 }
9975
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009976 // Add metadata which reprocess needs
9977 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9978 cam_reprocess_info_t *repro_info =
9979 (cam_reprocess_info_t *)frame_settings.find
9980 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -07009981 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009982 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009983 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009984 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009985 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009986 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009987 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009988 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009989 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009990 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -07009991 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009992 repro_info->pipeline_flip);
9993 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
9994 repro_info->af_roi);
9995 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
9996 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -07009997 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
9998 CAM_INTF_PARM_ROTATION metadata then has been added in
9999 translateToHalMetadata. HAL need to keep this new rotation
10000 metadata. Otherwise, the old rotation info saved in the vendor tag
10001 would be used */
10002 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10003 CAM_INTF_PARM_ROTATION, reprocParam) {
10004 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10005 } else {
10006 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010007 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010009 }
10010
10011 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10012 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10013 roi.width and roi.height would be the final JPEG size.
10014 For now, HAL only checks this for reprocess request */
10015 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10016 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10017 uint8_t *enable =
10018 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10019 if (*enable == TRUE) {
10020 int32_t *crop_data =
10021 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10022 cam_stream_crop_info_t crop_meta;
10023 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10024 crop_meta.stream_id = 0;
10025 crop_meta.crop.left = crop_data[0];
10026 crop_meta.crop.top = crop_data[1];
10027 crop_meta.crop.width = crop_data[2];
10028 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010029 // The JPEG crop roi should match cpp output size
10030 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10031 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10032 crop_meta.roi_map.left = 0;
10033 crop_meta.roi_map.top = 0;
10034 crop_meta.roi_map.width = cpp_crop->crop.width;
10035 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010036 }
10037 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10038 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010039 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010040 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010041 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10042 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010043 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010044 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10045
10046 // Add JPEG scale information
10047 cam_dimension_t scale_dim;
10048 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10049 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10050 int32_t *roi =
10051 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10052 scale_dim.width = roi[2];
10053 scale_dim.height = roi[3];
10054 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10055 scale_dim);
10056 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10057 scale_dim.width, scale_dim.height, mCameraId);
10058 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010059 }
10060 }
10061
10062 return rc;
10063}
10064
10065/*===========================================================================
10066 * FUNCTION : saveRequestSettings
10067 *
10068 * DESCRIPTION: Add any settings that might have changed to the request settings
10069 * and save the settings to be applied on the frame
10070 *
10071 * PARAMETERS :
10072 * @jpegMetadata : the extracted and/or modified jpeg metadata
10073 * @request : request with initial settings
10074 *
10075 * RETURN :
10076 * camera_metadata_t* : pointer to the saved request settings
10077 *==========================================================================*/
10078camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10079 const CameraMetadata &jpegMetadata,
10080 camera3_capture_request_t *request)
10081{
10082 camera_metadata_t *resultMetadata;
10083 CameraMetadata camMetadata;
10084 camMetadata = request->settings;
10085
10086 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10087 int32_t thumbnail_size[2];
10088 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10089 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10090 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10091 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10092 }
10093
10094 if (request->input_buffer != NULL) {
10095 uint8_t reprocessFlags = 1;
10096 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10097 (uint8_t*)&reprocessFlags,
10098 sizeof(reprocessFlags));
10099 }
10100
10101 resultMetadata = camMetadata.release();
10102 return resultMetadata;
10103}
10104
10105/*===========================================================================
10106 * FUNCTION : setHalFpsRange
10107 *
10108 * DESCRIPTION: set FPS range parameter
10109 *
10110 *
10111 * PARAMETERS :
10112 * @settings : Metadata from framework
10113 * @hal_metadata: Metadata buffer
10114 *
10115 *
10116 * RETURN : success: NO_ERROR
10117 * failure:
10118 *==========================================================================*/
10119int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10120 metadata_buffer_t *hal_metadata)
10121{
10122 int32_t rc = NO_ERROR;
10123 cam_fps_range_t fps_range;
10124 fps_range.min_fps = (float)
10125 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10126 fps_range.max_fps = (float)
10127 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10128 fps_range.video_min_fps = fps_range.min_fps;
10129 fps_range.video_max_fps = fps_range.max_fps;
10130
10131 LOGD("aeTargetFpsRange fps: [%f %f]",
10132 fps_range.min_fps, fps_range.max_fps);
10133 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10134 * follows:
10135 * ---------------------------------------------------------------|
10136 * Video stream is absent in configure_streams |
10137 * (Camcorder preview before the first video record |
10138 * ---------------------------------------------------------------|
10139 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10140 * | | | vid_min/max_fps|
10141 * ---------------------------------------------------------------|
10142 * NO | [ 30, 240] | 240 | [240, 240] |
10143 * |-------------|-------------|----------------|
10144 * | [240, 240] | 240 | [240, 240] |
10145 * ---------------------------------------------------------------|
10146 * Video stream is present in configure_streams |
10147 * ---------------------------------------------------------------|
10148 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10149 * | | | vid_min/max_fps|
10150 * ---------------------------------------------------------------|
10151 * NO | [ 30, 240] | 240 | [240, 240] |
10152 * (camcorder prev |-------------|-------------|----------------|
10153 * after video rec | [240, 240] | 240 | [240, 240] |
10154 * is stopped) | | | |
10155 * ---------------------------------------------------------------|
10156 * YES | [ 30, 240] | 240 | [240, 240] |
10157 * |-------------|-------------|----------------|
10158 * | [240, 240] | 240 | [240, 240] |
10159 * ---------------------------------------------------------------|
10160 * When Video stream is absent in configure_streams,
10161 * preview fps = sensor_fps / batchsize
10162 * Eg: for 240fps at batchSize 4, preview = 60fps
10163 * for 120fps at batchSize 4, preview = 30fps
10164 *
10165 * When video stream is present in configure_streams, preview fps is as per
10166 * the ratio of preview buffers to video buffers requested in process
10167 * capture request
10168 */
10169 mBatchSize = 0;
10170 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10171 fps_range.min_fps = fps_range.video_max_fps;
10172 fps_range.video_min_fps = fps_range.video_max_fps;
10173 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10174 fps_range.max_fps);
10175 if (NAME_NOT_FOUND != val) {
10176 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10178 return BAD_VALUE;
10179 }
10180
10181 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10182 /* If batchmode is currently in progress and the fps changes,
10183 * set the flag to restart the sensor */
10184 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10185 (mHFRVideoFps != fps_range.max_fps)) {
10186 mNeedSensorRestart = true;
10187 }
10188 mHFRVideoFps = fps_range.max_fps;
10189 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10190 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10191 mBatchSize = MAX_HFR_BATCH_SIZE;
10192 }
10193 }
10194 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10195
10196 }
10197 } else {
10198 /* HFR mode is session param in backend/ISP. This should be reset when
10199 * in non-HFR mode */
10200 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10202 return BAD_VALUE;
10203 }
10204 }
10205 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10206 return BAD_VALUE;
10207 }
10208 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10209 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10210 return rc;
10211}
10212
10213/*===========================================================================
10214 * FUNCTION : translateToHalMetadata
10215 *
10216 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10217 *
10218 *
10219 * PARAMETERS :
10220 * @request : request sent from framework
10221 *
10222 *
10223 * RETURN : success: NO_ERROR
10224 * failure:
10225 *==========================================================================*/
10226int QCamera3HardwareInterface::translateToHalMetadata
10227 (const camera3_capture_request_t *request,
10228 metadata_buffer_t *hal_metadata,
10229 uint32_t snapshotStreamId)
10230{
10231 int rc = 0;
10232 CameraMetadata frame_settings;
10233 frame_settings = request->settings;
10234
10235 /* Do not change the order of the following list unless you know what you are
10236 * doing.
10237 * The order is laid out in such a way that parameters in the front of the table
10238 * may be used to override the parameters later in the table. Examples are:
10239 * 1. META_MODE should precede AEC/AWB/AF MODE
10240 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10241 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10242 * 4. Any mode should precede it's corresponding settings
10243 */
10244 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10245 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10247 rc = BAD_VALUE;
10248 }
10249 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10250 if (rc != NO_ERROR) {
10251 LOGE("extractSceneMode failed");
10252 }
10253 }
10254
10255 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10256 uint8_t fwk_aeMode =
10257 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10258 uint8_t aeMode;
10259 int32_t redeye;
10260
10261 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10262 aeMode = CAM_AE_MODE_OFF;
10263 } else {
10264 aeMode = CAM_AE_MODE_ON;
10265 }
10266 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10267 redeye = 1;
10268 } else {
10269 redeye = 0;
10270 }
10271
10272 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10273 fwk_aeMode);
10274 if (NAME_NOT_FOUND != val) {
10275 int32_t flashMode = (int32_t)val;
10276 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10277 }
10278
10279 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10281 rc = BAD_VALUE;
10282 }
10283 }
10284
10285 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10286 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10287 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10288 fwk_whiteLevel);
10289 if (NAME_NOT_FOUND != val) {
10290 uint8_t whiteLevel = (uint8_t)val;
10291 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10292 rc = BAD_VALUE;
10293 }
10294 }
10295 }
10296
10297 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10298 uint8_t fwk_cacMode =
10299 frame_settings.find(
10300 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10301 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10302 fwk_cacMode);
10303 if (NAME_NOT_FOUND != val) {
10304 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10305 bool entryAvailable = FALSE;
10306 // Check whether Frameworks set CAC mode is supported in device or not
10307 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10308 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10309 entryAvailable = TRUE;
10310 break;
10311 }
10312 }
10313 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10314 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10315 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10316 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10317 if (entryAvailable == FALSE) {
10318 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10319 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10320 } else {
10321 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10322 // High is not supported and so set the FAST as spec say's underlying
10323 // device implementation can be the same for both modes.
10324 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10325 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10326 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10327 // in order to avoid the fps drop due to high quality
10328 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10329 } else {
10330 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10331 }
10332 }
10333 }
10334 LOGD("Final cacMode is %d", cacMode);
10335 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10336 rc = BAD_VALUE;
10337 }
10338 } else {
10339 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10340 }
10341 }
10342
10343 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10344 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10345 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10346 fwk_focusMode);
10347 if (NAME_NOT_FOUND != val) {
10348 uint8_t focusMode = (uint8_t)val;
10349 LOGD("set focus mode %d", focusMode);
10350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10351 rc = BAD_VALUE;
10352 }
10353 }
10354 }
10355
10356 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10357 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10359 focalDistance)) {
10360 rc = BAD_VALUE;
10361 }
10362 }
10363
10364 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10365 uint8_t fwk_antibandingMode =
10366 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10367 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10368 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10369 if (NAME_NOT_FOUND != val) {
10370 uint32_t hal_antibandingMode = (uint32_t)val;
10371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10372 hal_antibandingMode)) {
10373 rc = BAD_VALUE;
10374 }
10375 }
10376 }
10377
10378 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10379 int32_t expCompensation = frame_settings.find(
10380 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10381 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10382 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10383 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10384 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010385 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10387 expCompensation)) {
10388 rc = BAD_VALUE;
10389 }
10390 }
10391
10392 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10393 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10394 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10395 rc = BAD_VALUE;
10396 }
10397 }
10398 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10399 rc = setHalFpsRange(frame_settings, hal_metadata);
10400 if (rc != NO_ERROR) {
10401 LOGE("setHalFpsRange failed");
10402 }
10403 }
10404
10405 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10406 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10408 rc = BAD_VALUE;
10409 }
10410 }
10411
10412 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10413 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10414 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10415 fwk_effectMode);
10416 if (NAME_NOT_FOUND != val) {
10417 uint8_t effectMode = (uint8_t)val;
10418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10419 rc = BAD_VALUE;
10420 }
10421 }
10422 }
10423
10424 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10425 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10427 colorCorrectMode)) {
10428 rc = BAD_VALUE;
10429 }
10430 }
10431
10432 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10433 cam_color_correct_gains_t colorCorrectGains;
10434 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10435 colorCorrectGains.gains[i] =
10436 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10437 }
10438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10439 colorCorrectGains)) {
10440 rc = BAD_VALUE;
10441 }
10442 }
10443
10444 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10445 cam_color_correct_matrix_t colorCorrectTransform;
10446 cam_rational_type_t transform_elem;
10447 size_t num = 0;
10448 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10449 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10450 transform_elem.numerator =
10451 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10452 transform_elem.denominator =
10453 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10454 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10455 num++;
10456 }
10457 }
10458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10459 colorCorrectTransform)) {
10460 rc = BAD_VALUE;
10461 }
10462 }
10463
10464 cam_trigger_t aecTrigger;
10465 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10466 aecTrigger.trigger_id = -1;
10467 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10468 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10469 aecTrigger.trigger =
10470 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10471 aecTrigger.trigger_id =
10472 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10474 aecTrigger)) {
10475 rc = BAD_VALUE;
10476 }
10477 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10478 aecTrigger.trigger, aecTrigger.trigger_id);
10479 }
10480
10481 /*af_trigger must come with a trigger id*/
10482 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10483 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10484 cam_trigger_t af_trigger;
10485 af_trigger.trigger =
10486 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10487 af_trigger.trigger_id =
10488 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10490 rc = BAD_VALUE;
10491 }
10492 LOGD("AfTrigger: %d AfTriggerID: %d",
10493 af_trigger.trigger, af_trigger.trigger_id);
10494 }
10495
10496 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10497 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10499 rc = BAD_VALUE;
10500 }
10501 }
10502 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10503 cam_edge_application_t edge_application;
10504 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10505 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10506 edge_application.sharpness = 0;
10507 } else {
10508 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10509 }
10510 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10511 rc = BAD_VALUE;
10512 }
10513 }
10514
10515 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10516 int32_t respectFlashMode = 1;
10517 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10518 uint8_t fwk_aeMode =
10519 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10520 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10521 respectFlashMode = 0;
10522 LOGH("AE Mode controls flash, ignore android.flash.mode");
10523 }
10524 }
10525 if (respectFlashMode) {
10526 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10527 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10528 LOGH("flash mode after mapping %d", val);
10529 // To check: CAM_INTF_META_FLASH_MODE usage
10530 if (NAME_NOT_FOUND != val) {
10531 uint8_t flashMode = (uint8_t)val;
10532 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10533 rc = BAD_VALUE;
10534 }
10535 }
10536 }
10537 }
10538
10539 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10540 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10541 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10542 rc = BAD_VALUE;
10543 }
10544 }
10545
10546 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10547 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10549 flashFiringTime)) {
10550 rc = BAD_VALUE;
10551 }
10552 }
10553
10554 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10555 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10556 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10557 hotPixelMode)) {
10558 rc = BAD_VALUE;
10559 }
10560 }
10561
10562 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10563 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10565 lensAperture)) {
10566 rc = BAD_VALUE;
10567 }
10568 }
10569
10570 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10571 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10573 filterDensity)) {
10574 rc = BAD_VALUE;
10575 }
10576 }
10577
10578 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10579 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10581 focalLength)) {
10582 rc = BAD_VALUE;
10583 }
10584 }
10585
10586 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10587 uint8_t optStabMode =
10588 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10590 optStabMode)) {
10591 rc = BAD_VALUE;
10592 }
10593 }
10594
10595 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10596 uint8_t videoStabMode =
10597 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10598 LOGD("videoStabMode from APP = %d", videoStabMode);
10599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10600 videoStabMode)) {
10601 rc = BAD_VALUE;
10602 }
10603 }
10604
10605
10606 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10607 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10609 noiseRedMode)) {
10610 rc = BAD_VALUE;
10611 }
10612 }
10613
10614 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10615 float reprocessEffectiveExposureFactor =
10616 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10618 reprocessEffectiveExposureFactor)) {
10619 rc = BAD_VALUE;
10620 }
10621 }
10622
10623 cam_crop_region_t scalerCropRegion;
10624 bool scalerCropSet = false;
10625 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10626 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10627 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10628 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10629 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10630
10631 // Map coordinate system from active array to sensor output.
10632 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10633 scalerCropRegion.width, scalerCropRegion.height);
10634
10635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10636 scalerCropRegion)) {
10637 rc = BAD_VALUE;
10638 }
10639 scalerCropSet = true;
10640 }
10641
10642 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10643 int64_t sensorExpTime =
10644 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10645 LOGD("setting sensorExpTime %lld", sensorExpTime);
10646 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10647 sensorExpTime)) {
10648 rc = BAD_VALUE;
10649 }
10650 }
10651
10652 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10653 int64_t sensorFrameDuration =
10654 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10655 int64_t minFrameDuration = getMinFrameDuration(request);
10656 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10657 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10658 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10659 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10661 sensorFrameDuration)) {
10662 rc = BAD_VALUE;
10663 }
10664 }
10665
10666 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10667 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10668 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10669 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10670 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10671 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10672 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10674 sensorSensitivity)) {
10675 rc = BAD_VALUE;
10676 }
10677 }
10678
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010679#ifndef USE_HAL_3_3
10680 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10681 int32_t ispSensitivity =
10682 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10683 if (ispSensitivity <
10684 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10685 ispSensitivity =
10686 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10687 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10688 }
10689 if (ispSensitivity >
10690 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10691 ispSensitivity =
10692 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10693 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10694 }
10695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10696 ispSensitivity)) {
10697 rc = BAD_VALUE;
10698 }
10699 }
10700#endif
10701
Thierry Strudel3d639192016-09-09 11:52:26 -070010702 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10703 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10705 rc = BAD_VALUE;
10706 }
10707 }
10708
10709 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10710 uint8_t fwk_facedetectMode =
10711 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10712
10713 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10714 fwk_facedetectMode);
10715
10716 if (NAME_NOT_FOUND != val) {
10717 uint8_t facedetectMode = (uint8_t)val;
10718 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10719 facedetectMode)) {
10720 rc = BAD_VALUE;
10721 }
10722 }
10723 }
10724
10725 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10726 uint8_t histogramMode =
10727 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10729 histogramMode)) {
10730 rc = BAD_VALUE;
10731 }
10732 }
10733
10734 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10735 uint8_t sharpnessMapMode =
10736 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10738 sharpnessMapMode)) {
10739 rc = BAD_VALUE;
10740 }
10741 }
10742
10743 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10744 uint8_t tonemapMode =
10745 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10746 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10747 rc = BAD_VALUE;
10748 }
10749 }
10750 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10751 /*All tonemap channels will have the same number of points*/
10752 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10753 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10754 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10755 cam_rgb_tonemap_curves tonemapCurves;
10756 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10757 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10758 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10759 tonemapCurves.tonemap_points_cnt,
10760 CAM_MAX_TONEMAP_CURVE_SIZE);
10761 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10762 }
10763
10764 /* ch0 = G*/
10765 size_t point = 0;
10766 cam_tonemap_curve_t tonemapCurveGreen;
10767 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10768 for (size_t j = 0; j < 2; j++) {
10769 tonemapCurveGreen.tonemap_points[i][j] =
10770 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10771 point++;
10772 }
10773 }
10774 tonemapCurves.curves[0] = tonemapCurveGreen;
10775
10776 /* ch 1 = B */
10777 point = 0;
10778 cam_tonemap_curve_t tonemapCurveBlue;
10779 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10780 for (size_t j = 0; j < 2; j++) {
10781 tonemapCurveBlue.tonemap_points[i][j] =
10782 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10783 point++;
10784 }
10785 }
10786 tonemapCurves.curves[1] = tonemapCurveBlue;
10787
10788 /* ch 2 = R */
10789 point = 0;
10790 cam_tonemap_curve_t tonemapCurveRed;
10791 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10792 for (size_t j = 0; j < 2; j++) {
10793 tonemapCurveRed.tonemap_points[i][j] =
10794 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10795 point++;
10796 }
10797 }
10798 tonemapCurves.curves[2] = tonemapCurveRed;
10799
10800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10801 tonemapCurves)) {
10802 rc = BAD_VALUE;
10803 }
10804 }
10805
10806 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10807 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10809 captureIntent)) {
10810 rc = BAD_VALUE;
10811 }
10812 }
10813
10814 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10815 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10817 blackLevelLock)) {
10818 rc = BAD_VALUE;
10819 }
10820 }
10821
10822 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10823 uint8_t lensShadingMapMode =
10824 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10826 lensShadingMapMode)) {
10827 rc = BAD_VALUE;
10828 }
10829 }
10830
10831 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10832 cam_area_t roi;
10833 bool reset = true;
10834 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10835
10836 // Map coordinate system from active array to sensor output.
10837 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10838 roi.rect.height);
10839
10840 if (scalerCropSet) {
10841 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10842 }
10843 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10844 rc = BAD_VALUE;
10845 }
10846 }
10847
10848 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10849 cam_area_t roi;
10850 bool reset = true;
10851 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10852
10853 // Map coordinate system from active array to sensor output.
10854 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10855 roi.rect.height);
10856
10857 if (scalerCropSet) {
10858 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10859 }
10860 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10861 rc = BAD_VALUE;
10862 }
10863 }
10864
10865 // CDS for non-HFR non-video mode
10866 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10867 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10868 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10869 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10870 LOGE("Invalid CDS mode %d!", *fwk_cds);
10871 } else {
10872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10873 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10874 rc = BAD_VALUE;
10875 }
10876 }
10877 }
10878
Thierry Strudel04e026f2016-10-10 11:27:36 -070010879 // Video HDR
10880 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10881 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10882 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10883 rc = setVideoHdrMode(mParameters, vhdr);
10884 if (rc != NO_ERROR) {
10885 LOGE("setVideoHDR is failed");
10886 }
10887 }
10888
10889 //IR
10890 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10891 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10892 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10893 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10894 LOGE("Invalid IR mode %d!", fwk_ir);
10895 } else {
10896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10897 CAM_INTF_META_IR_MODE, fwk_ir)) {
10898 rc = BAD_VALUE;
10899 }
10900 }
10901 }
10902
Thierry Strudel269c81a2016-10-12 12:13:59 -070010903 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10904 float aec_speed;
10905 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10906 LOGD("AEC Speed :%f", aec_speed);
10907 if ( aec_speed < 0 ) {
10908 LOGE("Invalid AEC mode %f!", aec_speed);
10909 } else {
10910 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10911 aec_speed)) {
10912 rc = BAD_VALUE;
10913 }
10914 }
10915 }
10916
10917 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10918 float awb_speed;
10919 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10920 LOGD("AWB Speed :%f", awb_speed);
10921 if ( awb_speed < 0 ) {
10922 LOGE("Invalid AWB mode %f!", awb_speed);
10923 } else {
10924 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10925 awb_speed)) {
10926 rc = BAD_VALUE;
10927 }
10928 }
10929 }
10930
Thierry Strudel3d639192016-09-09 11:52:26 -070010931 // TNR
10932 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10933 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10934 uint8_t b_TnrRequested = 0;
10935 cam_denoise_param_t tnr;
10936 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10937 tnr.process_plates =
10938 (cam_denoise_process_type_t)frame_settings.find(
10939 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10940 b_TnrRequested = tnr.denoise_enable;
10941 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10942 rc = BAD_VALUE;
10943 }
10944 }
10945
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010946 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10947 int32_t* exposure_metering_mode =
10948 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10950 *exposure_metering_mode)) {
10951 rc = BAD_VALUE;
10952 }
10953 }
10954
Thierry Strudel3d639192016-09-09 11:52:26 -070010955 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10956 int32_t fwk_testPatternMode =
10957 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10958 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10959 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10960
10961 if (NAME_NOT_FOUND != testPatternMode) {
10962 cam_test_pattern_data_t testPatternData;
10963 memset(&testPatternData, 0, sizeof(testPatternData));
10964 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10965 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10966 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10967 int32_t *fwk_testPatternData =
10968 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10969 testPatternData.r = fwk_testPatternData[0];
10970 testPatternData.b = fwk_testPatternData[3];
10971 switch (gCamCapability[mCameraId]->color_arrangement) {
10972 case CAM_FILTER_ARRANGEMENT_RGGB:
10973 case CAM_FILTER_ARRANGEMENT_GRBG:
10974 testPatternData.gr = fwk_testPatternData[1];
10975 testPatternData.gb = fwk_testPatternData[2];
10976 break;
10977 case CAM_FILTER_ARRANGEMENT_GBRG:
10978 case CAM_FILTER_ARRANGEMENT_BGGR:
10979 testPatternData.gr = fwk_testPatternData[2];
10980 testPatternData.gb = fwk_testPatternData[1];
10981 break;
10982 default:
10983 LOGE("color arrangement %d is not supported",
10984 gCamCapability[mCameraId]->color_arrangement);
10985 break;
10986 }
10987 }
10988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
10989 testPatternData)) {
10990 rc = BAD_VALUE;
10991 }
10992 } else {
10993 LOGE("Invalid framework sensor test pattern mode %d",
10994 fwk_testPatternMode);
10995 }
10996 }
10997
10998 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
10999 size_t count = 0;
11000 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11001 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11002 gps_coords.data.d, gps_coords.count, count);
11003 if (gps_coords.count != count) {
11004 rc = BAD_VALUE;
11005 }
11006 }
11007
11008 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11009 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11010 size_t count = 0;
11011 const char *gps_methods_src = (const char *)
11012 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11013 memset(gps_methods, '\0', sizeof(gps_methods));
11014 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11015 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11016 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11017 if (GPS_PROCESSING_METHOD_SIZE != count) {
11018 rc = BAD_VALUE;
11019 }
11020 }
11021
11022 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11023 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11025 gps_timestamp)) {
11026 rc = BAD_VALUE;
11027 }
11028 }
11029
11030 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11031 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11032 cam_rotation_info_t rotation_info;
11033 if (orientation == 0) {
11034 rotation_info.rotation = ROTATE_0;
11035 } else if (orientation == 90) {
11036 rotation_info.rotation = ROTATE_90;
11037 } else if (orientation == 180) {
11038 rotation_info.rotation = ROTATE_180;
11039 } else if (orientation == 270) {
11040 rotation_info.rotation = ROTATE_270;
11041 }
11042 rotation_info.streamId = snapshotStreamId;
11043 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11045 rc = BAD_VALUE;
11046 }
11047 }
11048
11049 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11050 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11051 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11052 rc = BAD_VALUE;
11053 }
11054 }
11055
11056 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11057 uint32_t thumb_quality = (uint32_t)
11058 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11059 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11060 thumb_quality)) {
11061 rc = BAD_VALUE;
11062 }
11063 }
11064
11065 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11066 cam_dimension_t dim;
11067 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11068 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11069 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11070 rc = BAD_VALUE;
11071 }
11072 }
11073
11074 // Internal metadata
11075 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11076 size_t count = 0;
11077 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11078 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11079 privatedata.data.i32, privatedata.count, count);
11080 if (privatedata.count != count) {
11081 rc = BAD_VALUE;
11082 }
11083 }
11084
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011085 // ISO/Exposure Priority
11086 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11087 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11088 cam_priority_mode_t mode =
11089 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11090 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11091 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11092 use_iso_exp_pty.previewOnly = FALSE;
11093 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11094 use_iso_exp_pty.value = *ptr;
11095
11096 if(CAM_ISO_PRIORITY == mode) {
11097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11098 use_iso_exp_pty)) {
11099 rc = BAD_VALUE;
11100 }
11101 }
11102 else {
11103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11104 use_iso_exp_pty)) {
11105 rc = BAD_VALUE;
11106 }
11107 }
11108 }
11109 }
11110
11111 // Saturation
11112 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11113 int32_t* use_saturation =
11114 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11115 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11116 rc = BAD_VALUE;
11117 }
11118 }
11119
Thierry Strudel3d639192016-09-09 11:52:26 -070011120 // EV step
11121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11122 gCamCapability[mCameraId]->exp_compensation_step)) {
11123 rc = BAD_VALUE;
11124 }
11125
11126 // CDS info
11127 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11128 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11129 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11130
11131 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11132 CAM_INTF_META_CDS_DATA, *cdsData)) {
11133 rc = BAD_VALUE;
11134 }
11135 }
11136
Shuzhen Wang19463d72016-03-08 11:09:52 -080011137 // Hybrid AE
11138 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11139 uint8_t *hybrid_ae = (uint8_t *)
11140 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11141
11142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11143 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11144 rc = BAD_VALUE;
11145 }
11146 }
11147
Thierry Strudel3d639192016-09-09 11:52:26 -070011148 return rc;
11149}
11150
11151/*===========================================================================
11152 * FUNCTION : captureResultCb
11153 *
11154 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11155 *
11156 * PARAMETERS :
11157 * @frame : frame information from mm-camera-interface
11158 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11159 * @userdata: userdata
11160 *
11161 * RETURN : NONE
11162 *==========================================================================*/
11163void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11164 camera3_stream_buffer_t *buffer,
11165 uint32_t frame_number, bool isInputBuffer, void *userdata)
11166{
11167 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11168 if (hw == NULL) {
11169 LOGE("Invalid hw %p", hw);
11170 return;
11171 }
11172
11173 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11174 return;
11175}
11176
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011177/*===========================================================================
11178 * FUNCTION : setBufferErrorStatus
11179 *
11180 * DESCRIPTION: Callback handler for channels to report any buffer errors
11181 *
11182 * PARAMETERS :
11183 * @ch : Channel on which buffer error is reported from
11184 * @frame_number : frame number on which buffer error is reported on
11185 * @buffer_status : buffer error status
11186 * @userdata: userdata
11187 *
11188 * RETURN : NONE
11189 *==========================================================================*/
11190void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11191 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11192{
11193 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11194 if (hw == NULL) {
11195 LOGE("Invalid hw %p", hw);
11196 return;
11197 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011198
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011199 hw->setBufferErrorStatus(ch, frame_number, err);
11200 return;
11201}
11202
11203void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11204 uint32_t frameNumber, camera3_buffer_status_t err)
11205{
11206 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11207 pthread_mutex_lock(&mMutex);
11208
11209 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11210 if (req.frame_number != frameNumber)
11211 continue;
11212 for (auto& k : req.mPendingBufferList) {
11213 if(k.stream->priv == ch) {
11214 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11215 }
11216 }
11217 }
11218
11219 pthread_mutex_unlock(&mMutex);
11220 return;
11221}
Thierry Strudel3d639192016-09-09 11:52:26 -070011222/*===========================================================================
11223 * FUNCTION : initialize
11224 *
11225 * DESCRIPTION: Pass framework callback pointers to HAL
11226 *
11227 * PARAMETERS :
11228 *
11229 *
11230 * RETURN : Success : 0
11231 * Failure: -ENODEV
11232 *==========================================================================*/
11233
11234int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11235 const camera3_callback_ops_t *callback_ops)
11236{
11237 LOGD("E");
11238 QCamera3HardwareInterface *hw =
11239 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11240 if (!hw) {
11241 LOGE("NULL camera device");
11242 return -ENODEV;
11243 }
11244
11245 int rc = hw->initialize(callback_ops);
11246 LOGD("X");
11247 return rc;
11248}
11249
11250/*===========================================================================
11251 * FUNCTION : configure_streams
11252 *
11253 * DESCRIPTION:
11254 *
11255 * PARAMETERS :
11256 *
11257 *
11258 * RETURN : Success: 0
11259 * Failure: -EINVAL (if stream configuration is invalid)
11260 * -ENODEV (fatal error)
11261 *==========================================================================*/
11262
11263int QCamera3HardwareInterface::configure_streams(
11264 const struct camera3_device *device,
11265 camera3_stream_configuration_t *stream_list)
11266{
11267 LOGD("E");
11268 QCamera3HardwareInterface *hw =
11269 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11270 if (!hw) {
11271 LOGE("NULL camera device");
11272 return -ENODEV;
11273 }
11274 int rc = hw->configureStreams(stream_list);
11275 LOGD("X");
11276 return rc;
11277}
11278
11279/*===========================================================================
11280 * FUNCTION : construct_default_request_settings
11281 *
11282 * DESCRIPTION: Configure a settings buffer to meet the required use case
11283 *
11284 * PARAMETERS :
11285 *
11286 *
11287 * RETURN : Success: Return valid metadata
11288 * Failure: Return NULL
11289 *==========================================================================*/
11290const camera_metadata_t* QCamera3HardwareInterface::
11291 construct_default_request_settings(const struct camera3_device *device,
11292 int type)
11293{
11294
11295 LOGD("E");
11296 camera_metadata_t* fwk_metadata = NULL;
11297 QCamera3HardwareInterface *hw =
11298 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11299 if (!hw) {
11300 LOGE("NULL camera device");
11301 return NULL;
11302 }
11303
11304 fwk_metadata = hw->translateCapabilityToMetadata(type);
11305
11306 LOGD("X");
11307 return fwk_metadata;
11308}
11309
11310/*===========================================================================
11311 * FUNCTION : process_capture_request
11312 *
11313 * DESCRIPTION:
11314 *
11315 * PARAMETERS :
11316 *
11317 *
11318 * RETURN :
11319 *==========================================================================*/
11320int QCamera3HardwareInterface::process_capture_request(
11321 const struct camera3_device *device,
11322 camera3_capture_request_t *request)
11323{
11324 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011325 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011326 QCamera3HardwareInterface *hw =
11327 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11328 if (!hw) {
11329 LOGE("NULL camera device");
11330 return -EINVAL;
11331 }
11332
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011333 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 LOGD("X");
11335 return rc;
11336}
11337
11338/*===========================================================================
11339 * FUNCTION : dump
11340 *
11341 * DESCRIPTION:
11342 *
11343 * PARAMETERS :
11344 *
11345 *
11346 * RETURN :
11347 *==========================================================================*/
11348
11349void QCamera3HardwareInterface::dump(
11350 const struct camera3_device *device, int fd)
11351{
11352 /* Log level property is read when "adb shell dumpsys media.camera" is
11353 called so that the log level can be controlled without restarting
11354 the media server */
11355 getLogLevel();
11356
11357 LOGD("E");
11358 QCamera3HardwareInterface *hw =
11359 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11360 if (!hw) {
11361 LOGE("NULL camera device");
11362 return;
11363 }
11364
11365 hw->dump(fd);
11366 LOGD("X");
11367 return;
11368}
11369
11370/*===========================================================================
11371 * FUNCTION : flush
11372 *
11373 * DESCRIPTION:
11374 *
11375 * PARAMETERS :
11376 *
11377 *
11378 * RETURN :
11379 *==========================================================================*/
11380
11381int QCamera3HardwareInterface::flush(
11382 const struct camera3_device *device)
11383{
11384 int rc;
11385 LOGD("E");
11386 QCamera3HardwareInterface *hw =
11387 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11388 if (!hw) {
11389 LOGE("NULL camera device");
11390 return -EINVAL;
11391 }
11392
11393 pthread_mutex_lock(&hw->mMutex);
11394 // Validate current state
11395 switch (hw->mState) {
11396 case STARTED:
11397 /* valid state */
11398 break;
11399
11400 case ERROR:
11401 pthread_mutex_unlock(&hw->mMutex);
11402 hw->handleCameraDeviceError();
11403 return -ENODEV;
11404
11405 default:
11406 LOGI("Flush returned during state %d", hw->mState);
11407 pthread_mutex_unlock(&hw->mMutex);
11408 return 0;
11409 }
11410 pthread_mutex_unlock(&hw->mMutex);
11411
11412 rc = hw->flush(true /* restart channels */ );
11413 LOGD("X");
11414 return rc;
11415}
11416
11417/*===========================================================================
11418 * FUNCTION : close_camera_device
11419 *
11420 * DESCRIPTION:
11421 *
11422 * PARAMETERS :
11423 *
11424 *
11425 * RETURN :
11426 *==========================================================================*/
11427int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11428{
11429 int ret = NO_ERROR;
11430 QCamera3HardwareInterface *hw =
11431 reinterpret_cast<QCamera3HardwareInterface *>(
11432 reinterpret_cast<camera3_device_t *>(device)->priv);
11433 if (!hw) {
11434 LOGE("NULL camera device");
11435 return BAD_VALUE;
11436 }
11437
11438 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11439 delete hw;
11440 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011441 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011442 return ret;
11443}
11444
11445/*===========================================================================
11446 * FUNCTION : getWaveletDenoiseProcessPlate
11447 *
11448 * DESCRIPTION: query wavelet denoise process plate
11449 *
11450 * PARAMETERS : None
11451 *
11452 * RETURN : WNR prcocess plate value
11453 *==========================================================================*/
11454cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11455{
11456 char prop[PROPERTY_VALUE_MAX];
11457 memset(prop, 0, sizeof(prop));
11458 property_get("persist.denoise.process.plates", prop, "0");
11459 int processPlate = atoi(prop);
11460 switch(processPlate) {
11461 case 0:
11462 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11463 case 1:
11464 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11465 case 2:
11466 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11467 case 3:
11468 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11469 default:
11470 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11471 }
11472}
11473
11474
11475/*===========================================================================
11476 * FUNCTION : getTemporalDenoiseProcessPlate
11477 *
11478 * DESCRIPTION: query temporal denoise process plate
11479 *
11480 * PARAMETERS : None
11481 *
11482 * RETURN : TNR prcocess plate value
11483 *==========================================================================*/
11484cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11485{
11486 char prop[PROPERTY_VALUE_MAX];
11487 memset(prop, 0, sizeof(prop));
11488 property_get("persist.tnr.process.plates", prop, "0");
11489 int processPlate = atoi(prop);
11490 switch(processPlate) {
11491 case 0:
11492 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11493 case 1:
11494 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11495 case 2:
11496 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11497 case 3:
11498 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11499 default:
11500 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11501 }
11502}
11503
11504
11505/*===========================================================================
11506 * FUNCTION : extractSceneMode
11507 *
11508 * DESCRIPTION: Extract scene mode from frameworks set metadata
11509 *
11510 * PARAMETERS :
11511 * @frame_settings: CameraMetadata reference
11512 * @metaMode: ANDROID_CONTORL_MODE
11513 * @hal_metadata: hal metadata structure
11514 *
11515 * RETURN : None
11516 *==========================================================================*/
11517int32_t QCamera3HardwareInterface::extractSceneMode(
11518 const CameraMetadata &frame_settings, uint8_t metaMode,
11519 metadata_buffer_t *hal_metadata)
11520{
11521 int32_t rc = NO_ERROR;
11522
11523 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11524 camera_metadata_ro_entry entry =
11525 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11526 if (0 == entry.count)
11527 return rc;
11528
11529 uint8_t fwk_sceneMode = entry.data.u8[0];
11530
11531 int val = lookupHalName(SCENE_MODES_MAP,
11532 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11533 fwk_sceneMode);
11534 if (NAME_NOT_FOUND != val) {
11535 uint8_t sceneMode = (uint8_t)val;
11536 LOGD("sceneMode: %d", sceneMode);
11537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11538 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11539 rc = BAD_VALUE;
11540 }
11541 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011542
11543 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11544 cam_hdr_param_t hdr_params;
11545 hdr_params.hdr_enable = 1;
11546 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11547 hdr_params.hdr_need_1x = false;
11548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11549 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11550 rc = BAD_VALUE;
11551 }
11552 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011553 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11554 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11555 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11556 LOGD("sceneMode: %d", sceneMode);
11557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11558 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11559 rc = BAD_VALUE;
11560 }
11561 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011562
11563 if (mForceHdrSnapshot) {
11564 cam_hdr_param_t hdr_params;
11565 hdr_params.hdr_enable = 1;
11566 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11567 hdr_params.hdr_need_1x = false;
11568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11569 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11570 rc = BAD_VALUE;
11571 }
11572 }
11573
Thierry Strudel3d639192016-09-09 11:52:26 -070011574 return rc;
11575}
11576
11577/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011578 * FUNCTION : setVideoHdrMode
11579 *
11580 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11581 *
11582 * PARAMETERS :
11583 * @hal_metadata: hal metadata structure
11584 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11585 *
11586 * RETURN : None
11587 *==========================================================================*/
11588int32_t QCamera3HardwareInterface::setVideoHdrMode(
11589 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11590{
11591 int32_t rc = NO_ERROR;
11592 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11593 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11594 rc = BAD_VALUE;
11595 } else {
11596 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11597 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11598 LOGD("Setting HDR mode Off");
11599 vhdr_type = CAM_SENSOR_HDR_OFF;
11600 } else {
11601 char video_hdr_prop[PROPERTY_VALUE_MAX];
11602 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11603 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11604 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11605 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11606 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11607 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11608 LOGD("Setting HDR mode In Sensor");
11609 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11610 }
11611 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11612 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11613 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11614 LOGD("Setting HDR mode Zigzag");
11615 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11616 }
11617 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11618 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11619 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11620 LOGD("Setting HDR mode Staggered");
11621 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11622 }
11623 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11624 LOGD("HDR mode not supported");
11625 rc = BAD_VALUE;
11626 }
11627 }
11628 if(rc == NO_ERROR) {
11629 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11630 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11631 rc = BAD_VALUE;
11632 }
11633 }
11634 }
11635 return rc;
11636}
11637
11638/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011639 * FUNCTION : needRotationReprocess
11640 *
11641 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11642 *
11643 * PARAMETERS : none
11644 *
11645 * RETURN : true: needed
11646 * false: no need
11647 *==========================================================================*/
11648bool QCamera3HardwareInterface::needRotationReprocess()
11649{
11650 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11651 // current rotation is not zero, and pp has the capability to process rotation
11652 LOGH("need do reprocess for rotation");
11653 return true;
11654 }
11655
11656 return false;
11657}
11658
11659/*===========================================================================
11660 * FUNCTION : needReprocess
11661 *
11662 * DESCRIPTION: if reprocess in needed
11663 *
11664 * PARAMETERS : none
11665 *
11666 * RETURN : true: needed
11667 * false: no need
11668 *==========================================================================*/
11669bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11670{
11671 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11672 // TODO: add for ZSL HDR later
11673 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11674 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11675 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11676 return true;
11677 } else {
11678 LOGH("already post processed frame");
11679 return false;
11680 }
11681 }
11682 return needRotationReprocess();
11683}
11684
11685/*===========================================================================
11686 * FUNCTION : needJpegExifRotation
11687 *
11688 * DESCRIPTION: if rotation from jpeg is needed
11689 *
11690 * PARAMETERS : none
11691 *
11692 * RETURN : true: needed
11693 * false: no need
11694 *==========================================================================*/
11695bool QCamera3HardwareInterface::needJpegExifRotation()
11696{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011697 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011698 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11699 LOGD("Need use Jpeg EXIF Rotation");
11700 return true;
11701 }
11702 return false;
11703}
11704
11705/*===========================================================================
11706 * FUNCTION : addOfflineReprocChannel
11707 *
11708 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11709 * coming from input channel
11710 *
11711 * PARAMETERS :
11712 * @config : reprocess configuration
11713 * @inputChHandle : pointer to the input (source) channel
11714 *
11715 *
11716 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11717 *==========================================================================*/
11718QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11719 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11720{
11721 int32_t rc = NO_ERROR;
11722 QCamera3ReprocessChannel *pChannel = NULL;
11723
11724 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011725 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11726 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011727 if (NULL == pChannel) {
11728 LOGE("no mem for reprocess channel");
11729 return NULL;
11730 }
11731
11732 rc = pChannel->initialize(IS_TYPE_NONE);
11733 if (rc != NO_ERROR) {
11734 LOGE("init reprocess channel failed, ret = %d", rc);
11735 delete pChannel;
11736 return NULL;
11737 }
11738
11739 // pp feature config
11740 cam_pp_feature_config_t pp_config;
11741 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11742
11743 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11744 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11745 & CAM_QCOM_FEATURE_DSDN) {
11746 //Use CPP CDS incase h/w supports it.
11747 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11748 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11749 }
11750 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11751 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11752 }
11753
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011754 if (config.hdr_param.hdr_enable) {
11755 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11756 pp_config.hdr_param = config.hdr_param;
11757 }
11758
11759 if (mForceHdrSnapshot) {
11760 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11761 pp_config.hdr_param.hdr_enable = 1;
11762 pp_config.hdr_param.hdr_need_1x = 0;
11763 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11764 }
11765
Thierry Strudel3d639192016-09-09 11:52:26 -070011766 rc = pChannel->addReprocStreamsFromSource(pp_config,
11767 config,
11768 IS_TYPE_NONE,
11769 mMetadataChannel);
11770
11771 if (rc != NO_ERROR) {
11772 delete pChannel;
11773 return NULL;
11774 }
11775 return pChannel;
11776}
11777
11778/*===========================================================================
11779 * FUNCTION : getMobicatMask
11780 *
11781 * DESCRIPTION: returns mobicat mask
11782 *
11783 * PARAMETERS : none
11784 *
11785 * RETURN : mobicat mask
11786 *
11787 *==========================================================================*/
11788uint8_t QCamera3HardwareInterface::getMobicatMask()
11789{
11790 return m_MobicatMask;
11791}
11792
11793/*===========================================================================
11794 * FUNCTION : setMobicat
11795 *
11796 * DESCRIPTION: set Mobicat on/off.
11797 *
11798 * PARAMETERS :
11799 * @params : none
11800 *
11801 * RETURN : int32_t type of status
11802 * NO_ERROR -- success
11803 * none-zero failure code
11804 *==========================================================================*/
11805int32_t QCamera3HardwareInterface::setMobicat()
11806{
11807 char value [PROPERTY_VALUE_MAX];
11808 property_get("persist.camera.mobicat", value, "0");
11809 int32_t ret = NO_ERROR;
11810 uint8_t enableMobi = (uint8_t)atoi(value);
11811
11812 if (enableMobi) {
11813 tune_cmd_t tune_cmd;
11814 tune_cmd.type = SET_RELOAD_CHROMATIX;
11815 tune_cmd.module = MODULE_ALL;
11816 tune_cmd.value = TRUE;
11817 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11818 CAM_INTF_PARM_SET_VFE_COMMAND,
11819 tune_cmd);
11820
11821 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11822 CAM_INTF_PARM_SET_PP_COMMAND,
11823 tune_cmd);
11824 }
11825 m_MobicatMask = enableMobi;
11826
11827 return ret;
11828}
11829
11830/*===========================================================================
11831* FUNCTION : getLogLevel
11832*
11833* DESCRIPTION: Reads the log level property into a variable
11834*
11835* PARAMETERS :
11836* None
11837*
11838* RETURN :
11839* None
11840*==========================================================================*/
11841void QCamera3HardwareInterface::getLogLevel()
11842{
11843 char prop[PROPERTY_VALUE_MAX];
11844 uint32_t globalLogLevel = 0;
11845
11846 property_get("persist.camera.hal.debug", prop, "0");
11847 int val = atoi(prop);
11848 if (0 <= val) {
11849 gCamHal3LogLevel = (uint32_t)val;
11850 }
11851
11852 property_get("persist.camera.kpi.debug", prop, "1");
11853 gKpiDebugLevel = atoi(prop);
11854
11855 property_get("persist.camera.global.debug", prop, "0");
11856 val = atoi(prop);
11857 if (0 <= val) {
11858 globalLogLevel = (uint32_t)val;
11859 }
11860
11861 /* Highest log level among hal.logs and global.logs is selected */
11862 if (gCamHal3LogLevel < globalLogLevel)
11863 gCamHal3LogLevel = globalLogLevel;
11864
11865 return;
11866}
11867
11868/*===========================================================================
11869 * FUNCTION : validateStreamRotations
11870 *
11871 * DESCRIPTION: Check if the rotations requested are supported
11872 *
11873 * PARAMETERS :
11874 * @stream_list : streams to be configured
11875 *
11876 * RETURN : NO_ERROR on success
11877 * -EINVAL on failure
11878 *
11879 *==========================================================================*/
11880int QCamera3HardwareInterface::validateStreamRotations(
11881 camera3_stream_configuration_t *streamList)
11882{
11883 int rc = NO_ERROR;
11884
11885 /*
11886 * Loop through all streams requested in configuration
11887 * Check if unsupported rotations have been requested on any of them
11888 */
11889 for (size_t j = 0; j < streamList->num_streams; j++){
11890 camera3_stream_t *newStream = streamList->streams[j];
11891
11892 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11893 bool isImplDef = (newStream->format ==
11894 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11895 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11896 isImplDef);
11897
11898 if (isRotated && (!isImplDef || isZsl)) {
11899 LOGE("Error: Unsupported rotation of %d requested for stream"
11900 "type:%d and stream format:%d",
11901 newStream->rotation, newStream->stream_type,
11902 newStream->format);
11903 rc = -EINVAL;
11904 break;
11905 }
11906 }
11907
11908 return rc;
11909}
11910
11911/*===========================================================================
11912* FUNCTION : getFlashInfo
11913*
11914* DESCRIPTION: Retrieve information about whether the device has a flash.
11915*
11916* PARAMETERS :
11917* @cameraId : Camera id to query
11918* @hasFlash : Boolean indicating whether there is a flash device
11919* associated with given camera
11920* @flashNode : If a flash device exists, this will be its device node.
11921*
11922* RETURN :
11923* None
11924*==========================================================================*/
11925void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11926 bool& hasFlash,
11927 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11928{
11929 cam_capability_t* camCapability = gCamCapability[cameraId];
11930 if (NULL == camCapability) {
11931 hasFlash = false;
11932 flashNode[0] = '\0';
11933 } else {
11934 hasFlash = camCapability->flash_available;
11935 strlcpy(flashNode,
11936 (char*)camCapability->flash_dev_name,
11937 QCAMERA_MAX_FILEPATH_LENGTH);
11938 }
11939}
11940
11941/*===========================================================================
11942* FUNCTION : getEepromVersionInfo
11943*
11944* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11945*
11946* PARAMETERS : None
11947*
11948* RETURN : string describing EEPROM version
11949* "\0" if no such info available
11950*==========================================================================*/
11951const char *QCamera3HardwareInterface::getEepromVersionInfo()
11952{
11953 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11954}
11955
11956/*===========================================================================
11957* FUNCTION : getLdafCalib
11958*
11959* DESCRIPTION: Retrieve Laser AF calibration data
11960*
11961* PARAMETERS : None
11962*
11963* RETURN : Two uint32_t describing laser AF calibration data
11964* NULL if none is available.
11965*==========================================================================*/
11966const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11967{
11968 if (mLdafCalibExist) {
11969 return &mLdafCalib[0];
11970 } else {
11971 return NULL;
11972 }
11973}
11974
11975/*===========================================================================
11976 * FUNCTION : dynamicUpdateMetaStreamInfo
11977 *
11978 * DESCRIPTION: This function:
11979 * (1) stops all the channels
11980 * (2) returns error on pending requests and buffers
11981 * (3) sends metastream_info in setparams
11982 * (4) starts all channels
11983 * This is useful when sensor has to be restarted to apply any
11984 * settings such as frame rate from a different sensor mode
11985 *
11986 * PARAMETERS : None
11987 *
11988 * RETURN : NO_ERROR on success
11989 * Error codes on failure
11990 *
11991 *==========================================================================*/
11992int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
11993{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011994 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011995 int rc = NO_ERROR;
11996
11997 LOGD("E");
11998
11999 rc = stopAllChannels();
12000 if (rc < 0) {
12001 LOGE("stopAllChannels failed");
12002 return rc;
12003 }
12004
12005 rc = notifyErrorForPendingRequests();
12006 if (rc < 0) {
12007 LOGE("notifyErrorForPendingRequests failed");
12008 return rc;
12009 }
12010
12011 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12012 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12013 "Format:%d",
12014 mStreamConfigInfo.type[i],
12015 mStreamConfigInfo.stream_sizes[i].width,
12016 mStreamConfigInfo.stream_sizes[i].height,
12017 mStreamConfigInfo.postprocess_mask[i],
12018 mStreamConfigInfo.format[i]);
12019 }
12020
12021 /* Send meta stream info once again so that ISP can start */
12022 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12023 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12024 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12025 mParameters);
12026 if (rc < 0) {
12027 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12028 }
12029
12030 rc = startAllChannels();
12031 if (rc < 0) {
12032 LOGE("startAllChannels failed");
12033 return rc;
12034 }
12035
12036 LOGD("X");
12037 return rc;
12038}
12039
12040/*===========================================================================
12041 * FUNCTION : stopAllChannels
12042 *
12043 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12044 *
12045 * PARAMETERS : None
12046 *
12047 * RETURN : NO_ERROR on success
12048 * Error codes on failure
12049 *
12050 *==========================================================================*/
12051int32_t QCamera3HardwareInterface::stopAllChannels()
12052{
12053 int32_t rc = NO_ERROR;
12054
12055 LOGD("Stopping all channels");
12056 // Stop the Streams/Channels
12057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12058 it != mStreamInfo.end(); it++) {
12059 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12060 if (channel) {
12061 channel->stop();
12062 }
12063 (*it)->status = INVALID;
12064 }
12065
12066 if (mSupportChannel) {
12067 mSupportChannel->stop();
12068 }
12069 if (mAnalysisChannel) {
12070 mAnalysisChannel->stop();
12071 }
12072 if (mRawDumpChannel) {
12073 mRawDumpChannel->stop();
12074 }
12075 if (mMetadataChannel) {
12076 /* If content of mStreamInfo is not 0, there is metadata stream */
12077 mMetadataChannel->stop();
12078 }
12079
12080 LOGD("All channels stopped");
12081 return rc;
12082}
12083
12084/*===========================================================================
12085 * FUNCTION : startAllChannels
12086 *
12087 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12088 *
12089 * PARAMETERS : None
12090 *
12091 * RETURN : NO_ERROR on success
12092 * Error codes on failure
12093 *
12094 *==========================================================================*/
12095int32_t QCamera3HardwareInterface::startAllChannels()
12096{
12097 int32_t rc = NO_ERROR;
12098
12099 LOGD("Start all channels ");
12100 // Start the Streams/Channels
12101 if (mMetadataChannel) {
12102 /* If content of mStreamInfo is not 0, there is metadata stream */
12103 rc = mMetadataChannel->start();
12104 if (rc < 0) {
12105 LOGE("META channel start failed");
12106 return rc;
12107 }
12108 }
12109 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12110 it != mStreamInfo.end(); it++) {
12111 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12112 if (channel) {
12113 rc = channel->start();
12114 if (rc < 0) {
12115 LOGE("channel start failed");
12116 return rc;
12117 }
12118 }
12119 }
12120 if (mAnalysisChannel) {
12121 mAnalysisChannel->start();
12122 }
12123 if (mSupportChannel) {
12124 rc = mSupportChannel->start();
12125 if (rc < 0) {
12126 LOGE("Support channel start failed");
12127 return rc;
12128 }
12129 }
12130 if (mRawDumpChannel) {
12131 rc = mRawDumpChannel->start();
12132 if (rc < 0) {
12133 LOGE("RAW dump channel start failed");
12134 return rc;
12135 }
12136 }
12137
12138 LOGD("All channels started");
12139 return rc;
12140}
12141
12142/*===========================================================================
12143 * FUNCTION : notifyErrorForPendingRequests
12144 *
12145 * DESCRIPTION: This function sends error for all the pending requests/buffers
12146 *
12147 * PARAMETERS : None
12148 *
12149 * RETURN : Error codes
12150 * NO_ERROR on success
12151 *
12152 *==========================================================================*/
12153int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12154{
12155 int32_t rc = NO_ERROR;
12156 unsigned int frameNum = 0;
12157 camera3_capture_result_t result;
12158 camera3_stream_buffer_t *pStream_Buf = NULL;
12159
12160 memset(&result, 0, sizeof(camera3_capture_result_t));
12161
12162 if (mPendingRequestsList.size() > 0) {
12163 pendingRequestIterator i = mPendingRequestsList.begin();
12164 frameNum = i->frame_number;
12165 } else {
12166 /* There might still be pending buffers even though there are
12167 no pending requests. Setting the frameNum to MAX so that
12168 all the buffers with smaller frame numbers are returned */
12169 frameNum = UINT_MAX;
12170 }
12171
12172 LOGH("Oldest frame num on mPendingRequestsList = %u",
12173 frameNum);
12174
12175 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12176 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12177
12178 if (req->frame_number < frameNum) {
12179 // Send Error notify to frameworks for each buffer for which
12180 // metadata buffer is already sent
12181 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12182 req->frame_number, req->mPendingBufferList.size());
12183
12184 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12185 if (NULL == pStream_Buf) {
12186 LOGE("No memory for pending buffers array");
12187 return NO_MEMORY;
12188 }
12189 memset(pStream_Buf, 0,
12190 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12191 result.result = NULL;
12192 result.frame_number = req->frame_number;
12193 result.num_output_buffers = req->mPendingBufferList.size();
12194 result.output_buffers = pStream_Buf;
12195
12196 size_t index = 0;
12197 for (auto info = req->mPendingBufferList.begin();
12198 info != req->mPendingBufferList.end(); ) {
12199
12200 camera3_notify_msg_t notify_msg;
12201 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12202 notify_msg.type = CAMERA3_MSG_ERROR;
12203 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12204 notify_msg.message.error.error_stream = info->stream;
12205 notify_msg.message.error.frame_number = req->frame_number;
12206 pStream_Buf[index].acquire_fence = -1;
12207 pStream_Buf[index].release_fence = -1;
12208 pStream_Buf[index].buffer = info->buffer;
12209 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12210 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012211 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012212 index++;
12213 // Remove buffer from list
12214 info = req->mPendingBufferList.erase(info);
12215 }
12216
12217 // Remove this request from Map
12218 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12219 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12220 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12221
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012222 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012223
12224 delete [] pStream_Buf;
12225 } else {
12226
12227 // Go through the pending requests info and send error request to framework
12228 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12229
12230 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12231
12232 // Send error notify to frameworks
12233 camera3_notify_msg_t notify_msg;
12234 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12235 notify_msg.type = CAMERA3_MSG_ERROR;
12236 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12237 notify_msg.message.error.error_stream = NULL;
12238 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012239 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012240
12241 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12242 if (NULL == pStream_Buf) {
12243 LOGE("No memory for pending buffers array");
12244 return NO_MEMORY;
12245 }
12246 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12247
12248 result.result = NULL;
12249 result.frame_number = req->frame_number;
12250 result.input_buffer = i->input_buffer;
12251 result.num_output_buffers = req->mPendingBufferList.size();
12252 result.output_buffers = pStream_Buf;
12253
12254 size_t index = 0;
12255 for (auto info = req->mPendingBufferList.begin();
12256 info != req->mPendingBufferList.end(); ) {
12257 pStream_Buf[index].acquire_fence = -1;
12258 pStream_Buf[index].release_fence = -1;
12259 pStream_Buf[index].buffer = info->buffer;
12260 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12261 pStream_Buf[index].stream = info->stream;
12262 index++;
12263 // Remove buffer from list
12264 info = req->mPendingBufferList.erase(info);
12265 }
12266
12267 // Remove this request from Map
12268 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12269 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12270 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12271
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012272 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012273 delete [] pStream_Buf;
12274 i = erasePendingRequest(i);
12275 }
12276 }
12277
12278 /* Reset pending frame Drop list and requests list */
12279 mPendingFrameDropList.clear();
12280
12281 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12282 req.mPendingBufferList.clear();
12283 }
12284 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12285 mPendingReprocessResultList.clear();
12286 LOGH("Cleared all the pending buffers ");
12287
12288 return rc;
12289}
12290
12291bool QCamera3HardwareInterface::isOnEncoder(
12292 const cam_dimension_t max_viewfinder_size,
12293 uint32_t width, uint32_t height)
12294{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012295 return ((width > (uint32_t)max_viewfinder_size.width) ||
12296 (height > (uint32_t)max_viewfinder_size.height) ||
12297 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12298 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012299}
12300
12301/*===========================================================================
12302 * FUNCTION : setBundleInfo
12303 *
12304 * DESCRIPTION: Set bundle info for all streams that are bundle.
12305 *
12306 * PARAMETERS : None
12307 *
12308 * RETURN : NO_ERROR on success
12309 * Error codes on failure
12310 *==========================================================================*/
12311int32_t QCamera3HardwareInterface::setBundleInfo()
12312{
12313 int32_t rc = NO_ERROR;
12314
12315 if (mChannelHandle) {
12316 cam_bundle_config_t bundleInfo;
12317 memset(&bundleInfo, 0, sizeof(bundleInfo));
12318 rc = mCameraHandle->ops->get_bundle_info(
12319 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12320 if (rc != NO_ERROR) {
12321 LOGE("get_bundle_info failed");
12322 return rc;
12323 }
12324 if (mAnalysisChannel) {
12325 mAnalysisChannel->setBundleInfo(bundleInfo);
12326 }
12327 if (mSupportChannel) {
12328 mSupportChannel->setBundleInfo(bundleInfo);
12329 }
12330 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12331 it != mStreamInfo.end(); it++) {
12332 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12333 channel->setBundleInfo(bundleInfo);
12334 }
12335 if (mRawDumpChannel) {
12336 mRawDumpChannel->setBundleInfo(bundleInfo);
12337 }
12338 }
12339
12340 return rc;
12341}
12342
12343/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012344 * FUNCTION : setInstantAEC
12345 *
12346 * DESCRIPTION: Set Instant AEC related params.
12347 *
12348 * PARAMETERS :
12349 * @meta: CameraMetadata reference
12350 *
12351 * RETURN : NO_ERROR on success
12352 * Error codes on failure
12353 *==========================================================================*/
12354int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12355{
12356 int32_t rc = NO_ERROR;
12357 uint8_t val = 0;
12358 char prop[PROPERTY_VALUE_MAX];
12359
12360 // First try to configure instant AEC from framework metadata
12361 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12362 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12363 }
12364
12365 // If framework did not set this value, try to read from set prop.
12366 if (val == 0) {
12367 memset(prop, 0, sizeof(prop));
12368 property_get("persist.camera.instant.aec", prop, "0");
12369 val = (uint8_t)atoi(prop);
12370 }
12371
12372 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12373 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12374 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12375 mInstantAEC = val;
12376 mInstantAECSettledFrameNumber = 0;
12377 mInstantAecFrameIdxCount = 0;
12378 LOGH("instantAEC value set %d",val);
12379 if (mInstantAEC) {
12380 memset(prop, 0, sizeof(prop));
12381 property_get("persist.camera.ae.instant.bound", prop, "10");
12382 int32_t aec_frame_skip_cnt = atoi(prop);
12383 if (aec_frame_skip_cnt >= 0) {
12384 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12385 } else {
12386 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12387 rc = BAD_VALUE;
12388 }
12389 }
12390 } else {
12391 LOGE("Bad instant aec value set %d", val);
12392 rc = BAD_VALUE;
12393 }
12394 return rc;
12395}
12396
12397/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012398 * FUNCTION : get_num_overall_buffers
12399 *
12400 * DESCRIPTION: Estimate number of pending buffers across all requests.
12401 *
12402 * PARAMETERS : None
12403 *
12404 * RETURN : Number of overall pending buffers
12405 *
12406 *==========================================================================*/
12407uint32_t PendingBuffersMap::get_num_overall_buffers()
12408{
12409 uint32_t sum_buffers = 0;
12410 for (auto &req : mPendingBuffersInRequest) {
12411 sum_buffers += req.mPendingBufferList.size();
12412 }
12413 return sum_buffers;
12414}
12415
12416/*===========================================================================
12417 * FUNCTION : removeBuf
12418 *
12419 * DESCRIPTION: Remove a matching buffer from tracker.
12420 *
12421 * PARAMETERS : @buffer: image buffer for the callback
12422 *
12423 * RETURN : None
12424 *
12425 *==========================================================================*/
12426void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12427{
12428 bool buffer_found = false;
12429 for (auto req = mPendingBuffersInRequest.begin();
12430 req != mPendingBuffersInRequest.end(); req++) {
12431 for (auto k = req->mPendingBufferList.begin();
12432 k != req->mPendingBufferList.end(); k++ ) {
12433 if (k->buffer == buffer) {
12434 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12435 req->frame_number, buffer);
12436 k = req->mPendingBufferList.erase(k);
12437 if (req->mPendingBufferList.empty()) {
12438 // Remove this request from Map
12439 req = mPendingBuffersInRequest.erase(req);
12440 }
12441 buffer_found = true;
12442 break;
12443 }
12444 }
12445 if (buffer_found) {
12446 break;
12447 }
12448 }
12449 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12450 get_num_overall_buffers());
12451}
12452
12453/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012454 * FUNCTION : getBufErrStatus
12455 *
12456 * DESCRIPTION: get buffer error status
12457 *
12458 * PARAMETERS : @buffer: buffer handle
12459 *
12460 * RETURN : Error status
12461 *
12462 *==========================================================================*/
12463int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12464{
12465 for (auto& req : mPendingBuffersInRequest) {
12466 for (auto& k : req.mPendingBufferList) {
12467 if (k.buffer == buffer)
12468 return k.bufStatus;
12469 }
12470 }
12471 return CAMERA3_BUFFER_STATUS_OK;
12472}
12473
12474/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012475 * FUNCTION : setPAAFSupport
12476 *
12477 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12478 * feature mask according to stream type and filter
12479 * arrangement
12480 *
12481 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12482 * @stream_type: stream type
12483 * @filter_arrangement: filter arrangement
12484 *
12485 * RETURN : None
12486 *==========================================================================*/
12487void QCamera3HardwareInterface::setPAAFSupport(
12488 cam_feature_mask_t& feature_mask,
12489 cam_stream_type_t stream_type,
12490 cam_color_filter_arrangement_t filter_arrangement)
12491{
12492 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12493 feature_mask, stream_type, filter_arrangement);
12494
12495 switch (filter_arrangement) {
12496 case CAM_FILTER_ARRANGEMENT_RGGB:
12497 case CAM_FILTER_ARRANGEMENT_GRBG:
12498 case CAM_FILTER_ARRANGEMENT_GBRG:
12499 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012500 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12501 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012502 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12503 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12504 }
12505 break;
12506 case CAM_FILTER_ARRANGEMENT_Y:
12507 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12508 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12509 }
12510 break;
12511 default:
12512 break;
12513 }
12514}
12515
12516/*===========================================================================
12517* FUNCTION : getSensorMountAngle
12518*
12519* DESCRIPTION: Retrieve sensor mount angle
12520*
12521* PARAMETERS : None
12522*
12523* RETURN : sensor mount angle in uint32_t
12524*==========================================================================*/
12525uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12526{
12527 return gCamCapability[mCameraId]->sensor_mount_angle;
12528}
12529
12530/*===========================================================================
12531* FUNCTION : getRelatedCalibrationData
12532*
12533* DESCRIPTION: Retrieve related system calibration data
12534*
12535* PARAMETERS : None
12536*
12537* RETURN : Pointer of related system calibration data
12538*==========================================================================*/
12539const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12540{
12541 return (const cam_related_system_calibration_data_t *)
12542 &(gCamCapability[mCameraId]->related_cam_calibration);
12543}
12544}; //end namespace qcamera