blob: 9a3921435ede53677b4878e77a5d562c48fcd097 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
60extern "C" {
61#include "mm_camera_dbg.h"
62}
63
64using namespace android;
65
66namespace qcamera {
67
68#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
69
70#define EMPTY_PIPELINE_DELAY 2
71#define PARTIAL_RESULT_COUNT 2
72#define FRAME_SKIP_DELAY 0
73
74#define MAX_VALUE_8BIT ((1<<8)-1)
75#define MAX_VALUE_10BIT ((1<<10)-1)
76#define MAX_VALUE_12BIT ((1<<12)-1)
77
78#define VIDEO_4K_WIDTH 3840
79#define VIDEO_4K_HEIGHT 2160
80
81#define MAX_EIS_WIDTH 1920
82#define MAX_EIS_HEIGHT 1080
83
84#define MAX_RAW_STREAMS 1
85#define MAX_STALLING_STREAMS 1
86#define MAX_PROCESSED_STREAMS 3
87/* Batch mode is enabled only if FPS set is equal to or greater than this */
88#define MIN_FPS_FOR_BATCH_MODE (120)
89#define PREVIEW_FPS_FOR_HFR (30)
90#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080091#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070092#define MAX_HFR_BATCH_SIZE (8)
93#define REGIONS_TUPLE_COUNT 5
94#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070095// Set a threshold for detection of missing buffers //seconds
96#define MISSING_REQUEST_BUF_TIMEOUT 3
97#define FLUSH_TIMEOUT 3
98#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
99
100#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
101 CAM_QCOM_FEATURE_CROP |\
102 CAM_QCOM_FEATURE_ROTATION |\
103 CAM_QCOM_FEATURE_SHARPNESS |\
104 CAM_QCOM_FEATURE_SCALE |\
105 CAM_QCOM_FEATURE_CAC |\
106 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700107/* Per configuration size for static metadata length*/
108#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700109
110#define TIMEOUT_NEVER -1
111
Thierry Strudel04e026f2016-10-10 11:27:36 -0700112/* Face landmarks indices */
113#define LEFT_EYE_X 0
114#define LEFT_EYE_Y 1
115#define RIGHT_EYE_X 2
116#define RIGHT_EYE_Y 3
117#define MOUTH_X 4
118#define MOUTH_Y 5
119#define TOTAL_LANDMARK_INDICES 6
120
Thierry Strudel3d639192016-09-09 11:52:26 -0700121cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
122const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
123extern pthread_mutex_t gCamLock;
124volatile uint32_t gCamHal3LogLevel = 1;
125extern uint8_t gNumCameraSessions;
126
127const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
128 {"On", CAM_CDS_MODE_ON},
129 {"Off", CAM_CDS_MODE_OFF},
130 {"Auto",CAM_CDS_MODE_AUTO}
131};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700132const QCamera3HardwareInterface::QCameraMap<
133 camera_metadata_enum_android_video_hdr_mode_t,
134 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
135 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
136 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
137};
138
139
140const QCamera3HardwareInterface::QCameraMap<
141 camera_metadata_enum_android_ir_mode_t,
142 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
143 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
144 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
145 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
146};
Thierry Strudel3d639192016-09-09 11:52:26 -0700147
148const QCamera3HardwareInterface::QCameraMap<
149 camera_metadata_enum_android_control_effect_mode_t,
150 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
151 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
152 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
153 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
154 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
155 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
156 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
157 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
158 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
159 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
160};
161
162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_control_awb_mode_t,
164 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
165 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
166 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
167 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
168 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
169 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
170 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
171 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
172 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
173 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
174};
175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_control_scene_mode_t,
178 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
179 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
180 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
181 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
182 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
183 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
184 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
185 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
186 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
187 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
188 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
189 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
190 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
191 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
192 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
193 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
194 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
195};
196
197const QCamera3HardwareInterface::QCameraMap<
198 camera_metadata_enum_android_control_af_mode_t,
199 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
200 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
201 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
202 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
203 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
204 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
205 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
206 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
207};
208
209const QCamera3HardwareInterface::QCameraMap<
210 camera_metadata_enum_android_color_correction_aberration_mode_t,
211 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
212 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
213 CAM_COLOR_CORRECTION_ABERRATION_OFF },
214 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
215 CAM_COLOR_CORRECTION_ABERRATION_FAST },
216 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
217 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221 camera_metadata_enum_android_control_ae_antibanding_mode_t,
222 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
226 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
227};
228
229const QCamera3HardwareInterface::QCameraMap<
230 camera_metadata_enum_android_control_ae_mode_t,
231 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
232 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
233 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
234 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
235 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
236 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_flash_mode_t,
241 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
242 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
243 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
244 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_statistics_face_detect_mode_t,
249 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
250 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
252 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
253};
254
255const QCamera3HardwareInterface::QCameraMap<
256 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
257 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
258 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
259 CAM_FOCUS_UNCALIBRATED },
260 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
261 CAM_FOCUS_APPROXIMATE },
262 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
263 CAM_FOCUS_CALIBRATED }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_lens_state_t,
268 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
269 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
270 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
271};
272
273const int32_t available_thumbnail_sizes[] = {0, 0,
274 176, 144,
275 240, 144,
276 256, 144,
277 240, 160,
278 256, 154,
279 240, 240,
280 320, 240};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_sensor_test_pattern_mode_t,
284 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
291};
292
293/* Since there is no mapping for all the options some Android enum are not listed.
294 * Also, the order in this list is important because while mapping from HAL to Android it will
295 * traverse from lower to higher index which means that for HAL values that are map to different
296 * Android values, the traverse logic will select the first one found.
297 */
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_sensor_reference_illuminant1_t,
300 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
317};
318
319const QCamera3HardwareInterface::QCameraMap<
320 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
321 { 60, CAM_HFR_MODE_60FPS},
322 { 90, CAM_HFR_MODE_90FPS},
323 { 120, CAM_HFR_MODE_120FPS},
324 { 150, CAM_HFR_MODE_150FPS},
325 { 180, CAM_HFR_MODE_180FPS},
326 { 210, CAM_HFR_MODE_210FPS},
327 { 240, CAM_HFR_MODE_240FPS},
328 { 480, CAM_HFR_MODE_480FPS},
329};
330
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700331const QCamera3HardwareInterface::QCameraMap<
332 qcamera3_ext_instant_aec_mode_t,
333 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
334 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
335 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
336 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
337};
Thierry Strudel3d639192016-09-09 11:52:26 -0700338camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
339 .initialize = QCamera3HardwareInterface::initialize,
340 .configure_streams = QCamera3HardwareInterface::configure_streams,
341 .register_stream_buffers = NULL,
342 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
343 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
344 .get_metadata_vendor_tag_ops = NULL,
345 .dump = QCamera3HardwareInterface::dump,
346 .flush = QCamera3HardwareInterface::flush,
347 .reserved = {0},
348};
349
350// initialise to some default value
351uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
352
353/*===========================================================================
354 * FUNCTION : QCamera3HardwareInterface
355 *
356 * DESCRIPTION: constructor of QCamera3HardwareInterface
357 *
358 * PARAMETERS :
359 * @cameraId : camera ID
360 *
361 * RETURN : none
362 *==========================================================================*/
363QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
364 const camera_module_callbacks_t *callbacks)
365 : mCameraId(cameraId),
366 mCameraHandle(NULL),
367 mCameraInitialized(false),
368 mCallbackOps(NULL),
369 mMetadataChannel(NULL),
370 mPictureChannel(NULL),
371 mRawChannel(NULL),
372 mSupportChannel(NULL),
373 mAnalysisChannel(NULL),
374 mRawDumpChannel(NULL),
375 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800376 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700377 mCommon(),
378 mChannelHandle(0),
379 mFirstConfiguration(true),
380 mFlush(false),
381 mFlushPerf(false),
382 mParamHeap(NULL),
383 mParameters(NULL),
384 mPrevParameters(NULL),
385 m_bIsVideo(false),
386 m_bIs4KVideo(false),
387 m_bEisSupportedSize(false),
388 m_bEisEnable(false),
389 m_MobicatMask(0),
390 mMinProcessedFrameDuration(0),
391 mMinJpegFrameDuration(0),
392 mMinRawFrameDuration(0),
393 mMetaFrameCount(0U),
394 mUpdateDebugLevel(false),
395 mCallbacks(callbacks),
396 mCaptureIntent(0),
397 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800398 /* DevCamDebug metadata internal m control*/
399 mDevCamDebugMetaEnable(0),
400 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700401 mBatchSize(0),
402 mToBeQueuedVidBufs(0),
403 mHFRVideoFps(DEFAULT_VIDEO_FPS),
404 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
405 mFirstFrameNumberInBatch(0),
406 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800407 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700408 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
409 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700410 mInstantAEC(false),
411 mResetInstantAEC(false),
412 mInstantAECSettledFrameNumber(0),
413 mAecSkipDisplayFrameBound(0),
414 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700416 mLastCustIntentFrmNum(-1),
417 mState(CLOSED),
418 mIsDeviceLinked(false),
419 mIsMainCamera(true),
420 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700421 m_pDualCamCmdHeap(NULL),
422 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700423{
424 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700425 mCommon.init(gCamCapability[cameraId]);
426 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700427#ifndef USE_HAL_3_3
428 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
429#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700430 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700431#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700432 mCameraDevice.common.close = close_camera_device;
433 mCameraDevice.ops = &mCameraOps;
434 mCameraDevice.priv = this;
435 gCamCapability[cameraId]->version = CAM_HAL_V3;
436 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
437 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
438 gCamCapability[cameraId]->min_num_pp_bufs = 3;
439
440 pthread_cond_init(&mBuffersCond, NULL);
441
442 pthread_cond_init(&mRequestCond, NULL);
443 mPendingLiveRequest = 0;
444 mCurrentRequestId = -1;
445 pthread_mutex_init(&mMutex, NULL);
446
447 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
448 mDefaultMetadata[i] = NULL;
449
450 // Getting system props of different kinds
451 char prop[PROPERTY_VALUE_MAX];
452 memset(prop, 0, sizeof(prop));
453 property_get("persist.camera.raw.dump", prop, "0");
454 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800455 property_get("persist.camera.hal3.force.hdr", prop, "0");
456 mForceHdrSnapshot = atoi(prop);
457
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 if (mEnableRawDump)
459 LOGD("Raw dump from Camera HAL enabled");
460
461 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
462 memset(mLdafCalib, 0, sizeof(mLdafCalib));
463
464 memset(prop, 0, sizeof(prop));
465 property_get("persist.camera.tnr.preview", prop, "0");
466 m_bTnrPreview = (uint8_t)atoi(prop);
467
468 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800469 property_get("persist.camera.swtnr.preview", prop, "1");
470 m_bSwTnrPreview = (uint8_t)atoi(prop);
471
472 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700473 property_get("persist.camera.tnr.video", prop, "0");
474 m_bTnrVideo = (uint8_t)atoi(prop);
475
476 memset(prop, 0, sizeof(prop));
477 property_get("persist.camera.avtimer.debug", prop, "0");
478 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800479 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700480
481 //Load and read GPU library.
482 lib_surface_utils = NULL;
483 LINK_get_surface_pixel_alignment = NULL;
484 mSurfaceStridePadding = CAM_PAD_TO_32;
485 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
486 if (lib_surface_utils) {
487 *(void **)&LINK_get_surface_pixel_alignment =
488 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
489 if (LINK_get_surface_pixel_alignment) {
490 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
491 }
492 dlclose(lib_surface_utils);
493 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700494
495 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700496}
497
498/*===========================================================================
499 * FUNCTION : ~QCamera3HardwareInterface
500 *
501 * DESCRIPTION: destructor of QCamera3HardwareInterface
502 *
503 * PARAMETERS : none
504 *
505 * RETURN : none
506 *==========================================================================*/
507QCamera3HardwareInterface::~QCamera3HardwareInterface()
508{
509 LOGD("E");
510
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800511 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700512
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800513 // Disable power hint and enable the perf lock for close camera
514 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
515 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
516
517 // unlink of dualcam during close camera
518 if (mIsDeviceLinked) {
519 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
520 &m_pDualCamCmdPtr->bundle_info;
521 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
522 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
523 pthread_mutex_lock(&gCamLock);
524
525 if (mIsMainCamera == 1) {
526 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
527 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
528 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
529 // related session id should be session id of linked session
530 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
531 } else {
532 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
533 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
534 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
535 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
536 }
537 pthread_mutex_unlock(&gCamLock);
538
539 rc = mCameraHandle->ops->set_dual_cam_cmd(
540 mCameraHandle->camera_handle);
541 if (rc < 0) {
542 LOGE("Dualcam: Unlink failed, but still proceed to close");
543 }
544 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700545
546 /* We need to stop all streams before deleting any stream */
547 if (mRawDumpChannel) {
548 mRawDumpChannel->stop();
549 }
550
551 // NOTE: 'camera3_stream_t *' objects are already freed at
552 // this stage by the framework
553 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
554 it != mStreamInfo.end(); it++) {
555 QCamera3ProcessingChannel *channel = (*it)->channel;
556 if (channel) {
557 channel->stop();
558 }
559 }
560 if (mSupportChannel)
561 mSupportChannel->stop();
562
563 if (mAnalysisChannel) {
564 mAnalysisChannel->stop();
565 }
566 if (mMetadataChannel) {
567 mMetadataChannel->stop();
568 }
569 if (mChannelHandle) {
570 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
571 mChannelHandle);
572 LOGD("stopping channel %d", mChannelHandle);
573 }
574
575 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
576 it != mStreamInfo.end(); it++) {
577 QCamera3ProcessingChannel *channel = (*it)->channel;
578 if (channel)
579 delete channel;
580 free (*it);
581 }
582 if (mSupportChannel) {
583 delete mSupportChannel;
584 mSupportChannel = NULL;
585 }
586
587 if (mAnalysisChannel) {
588 delete mAnalysisChannel;
589 mAnalysisChannel = NULL;
590 }
591 if (mRawDumpChannel) {
592 delete mRawDumpChannel;
593 mRawDumpChannel = NULL;
594 }
595 if (mDummyBatchChannel) {
596 delete mDummyBatchChannel;
597 mDummyBatchChannel = NULL;
598 }
599
600 mPictureChannel = NULL;
601
602 if (mMetadataChannel) {
603 delete mMetadataChannel;
604 mMetadataChannel = NULL;
605 }
606
607 /* Clean up all channels */
608 if (mCameraInitialized) {
609 if(!mFirstConfiguration){
610 //send the last unconfigure
611 cam_stream_size_info_t stream_config_info;
612 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
613 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
614 stream_config_info.buffer_info.max_buffers =
615 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700616 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700617 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
618 stream_config_info);
619 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
620 if (rc < 0) {
621 LOGE("set_parms failed for unconfigure");
622 }
623 }
624 deinitParameters();
625 }
626
627 if (mChannelHandle) {
628 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
629 mChannelHandle);
630 LOGH("deleting channel %d", mChannelHandle);
631 mChannelHandle = 0;
632 }
633
634 if (mState != CLOSED)
635 closeCamera();
636
637 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
638 req.mPendingBufferList.clear();
639 }
640 mPendingBuffersMap.mPendingBuffersInRequest.clear();
641 mPendingReprocessResultList.clear();
642 for (pendingRequestIterator i = mPendingRequestsList.begin();
643 i != mPendingRequestsList.end();) {
644 i = erasePendingRequest(i);
645 }
646 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
647 if (mDefaultMetadata[i])
648 free_camera_metadata(mDefaultMetadata[i]);
649
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800650 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700651
652 pthread_cond_destroy(&mRequestCond);
653
654 pthread_cond_destroy(&mBuffersCond);
655
656 pthread_mutex_destroy(&mMutex);
657 LOGD("X");
658}
659
660/*===========================================================================
661 * FUNCTION : erasePendingRequest
662 *
663 * DESCRIPTION: function to erase a desired pending request after freeing any
664 * allocated memory
665 *
666 * PARAMETERS :
667 * @i : iterator pointing to pending request to be erased
668 *
669 * RETURN : iterator pointing to the next request
670 *==========================================================================*/
671QCamera3HardwareInterface::pendingRequestIterator
672 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
673{
674 if (i->input_buffer != NULL) {
675 free(i->input_buffer);
676 i->input_buffer = NULL;
677 }
678 if (i->settings != NULL)
679 free_camera_metadata((camera_metadata_t*)i->settings);
680 return mPendingRequestsList.erase(i);
681}
682
683/*===========================================================================
684 * FUNCTION : camEvtHandle
685 *
686 * DESCRIPTION: Function registered to mm-camera-interface to handle events
687 *
688 * PARAMETERS :
689 * @camera_handle : interface layer camera handle
690 * @evt : ptr to event
691 * @user_data : user data ptr
692 *
693 * RETURN : none
694 *==========================================================================*/
695void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
696 mm_camera_event_t *evt,
697 void *user_data)
698{
699 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
700 if (obj && evt) {
701 switch(evt->server_event_type) {
702 case CAM_EVENT_TYPE_DAEMON_DIED:
703 pthread_mutex_lock(&obj->mMutex);
704 obj->mState = ERROR;
705 pthread_mutex_unlock(&obj->mMutex);
706 LOGE("Fatal, camera daemon died");
707 break;
708
709 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
710 LOGD("HAL got request pull from Daemon");
711 pthread_mutex_lock(&obj->mMutex);
712 obj->mWokenUpByDaemon = true;
713 obj->unblockRequestIfNecessary();
714 pthread_mutex_unlock(&obj->mMutex);
715 break;
716
717 default:
718 LOGW("Warning: Unhandled event %d",
719 evt->server_event_type);
720 break;
721 }
722 } else {
723 LOGE("NULL user_data/evt");
724 }
725}
726
727/*===========================================================================
728 * FUNCTION : openCamera
729 *
730 * DESCRIPTION: open camera
731 *
732 * PARAMETERS :
733 * @hw_device : double ptr for camera device struct
734 *
735 * RETURN : int32_t type of status
736 * NO_ERROR -- success
737 * none-zero failure code
738 *==========================================================================*/
739int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
740{
741 int rc = 0;
742 if (mState != CLOSED) {
743 *hw_device = NULL;
744 return PERMISSION_DENIED;
745 }
746
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800747 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700748 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
749 mCameraId);
750
751 rc = openCamera();
752 if (rc == 0) {
753 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800754 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700755 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800756 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700757
Thierry Strudel3d639192016-09-09 11:52:26 -0700758 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
759 mCameraId, rc);
760
761 if (rc == NO_ERROR) {
762 mState = OPENED;
763 }
764 return rc;
765}
766
767/*===========================================================================
768 * FUNCTION : openCamera
769 *
770 * DESCRIPTION: open camera
771 *
772 * PARAMETERS : none
773 *
774 * RETURN : int32_t type of status
775 * NO_ERROR -- success
776 * none-zero failure code
777 *==========================================================================*/
778int QCamera3HardwareInterface::openCamera()
779{
780 int rc = 0;
781 char value[PROPERTY_VALUE_MAX];
782
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800783 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700784 if (mCameraHandle) {
785 LOGE("Failure: Camera already opened");
786 return ALREADY_EXISTS;
787 }
788
789 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
790 if (rc < 0) {
791 LOGE("Failed to reserve flash for camera id: %d",
792 mCameraId);
793 return UNKNOWN_ERROR;
794 }
795
796 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
797 if (rc) {
798 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
799 return rc;
800 }
801
802 if (!mCameraHandle) {
803 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
804 return -ENODEV;
805 }
806
807 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
808 camEvtHandle, (void *)this);
809
810 if (rc < 0) {
811 LOGE("Error, failed to register event callback");
812 /* Not closing camera here since it is already handled in destructor */
813 return FAILED_TRANSACTION;
814 }
815
816 mExifParams.debug_params =
817 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
818 if (mExifParams.debug_params) {
819 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
820 } else {
821 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
822 return NO_MEMORY;
823 }
824 mFirstConfiguration = true;
825
826 //Notify display HAL that a camera session is active.
827 //But avoid calling the same during bootup because camera service might open/close
828 //cameras at boot time during its initialization and display service will also internally
829 //wait for camera service to initialize first while calling this display API, resulting in a
830 //deadlock situation. Since boot time camera open/close calls are made only to fetch
831 //capabilities, no need of this display bw optimization.
832 //Use "service.bootanim.exit" property to know boot status.
833 property_get("service.bootanim.exit", value, "0");
834 if (atoi(value) == 1) {
835 pthread_mutex_lock(&gCamLock);
836 if (gNumCameraSessions++ == 0) {
837 setCameraLaunchStatus(true);
838 }
839 pthread_mutex_unlock(&gCamLock);
840 }
841
842 //fill the session id needed while linking dual cam
843 pthread_mutex_lock(&gCamLock);
844 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
845 &sessionId[mCameraId]);
846 pthread_mutex_unlock(&gCamLock);
847
848 if (rc < 0) {
849 LOGE("Error, failed to get sessiion id");
850 return UNKNOWN_ERROR;
851 } else {
852 //Allocate related cam sync buffer
853 //this is needed for the payload that goes along with bundling cmd for related
854 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700855 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
856 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 if(rc != OK) {
858 rc = NO_MEMORY;
859 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
860 return NO_MEMORY;
861 }
862
863 //Map memory for related cam sync buffer
864 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700865 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
866 m_pDualCamCmdHeap->getFd(0),
867 sizeof(cam_dual_camera_cmd_info_t),
868 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 if(rc < 0) {
870 LOGE("Dualcam: failed to map Related cam sync buffer");
871 rc = FAILED_TRANSACTION;
872 return NO_MEMORY;
873 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700874 m_pDualCamCmdPtr =
875 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700876 }
877
878 LOGH("mCameraId=%d",mCameraId);
879
880 return NO_ERROR;
881}
882
883/*===========================================================================
884 * FUNCTION : closeCamera
885 *
886 * DESCRIPTION: close camera
887 *
888 * PARAMETERS : none
889 *
890 * RETURN : int32_t type of status
891 * NO_ERROR -- success
892 * none-zero failure code
893 *==========================================================================*/
894int QCamera3HardwareInterface::closeCamera()
895{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800896 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700897 int rc = NO_ERROR;
898 char value[PROPERTY_VALUE_MAX];
899
900 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
901 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700902
903 // unmap memory for related cam sync buffer
904 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800905 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700906 if (NULL != m_pDualCamCmdHeap) {
907 m_pDualCamCmdHeap->deallocate();
908 delete m_pDualCamCmdHeap;
909 m_pDualCamCmdHeap = NULL;
910 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700911 }
912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
914 mCameraHandle = NULL;
915
916 //reset session id to some invalid id
917 pthread_mutex_lock(&gCamLock);
918 sessionId[mCameraId] = 0xDEADBEEF;
919 pthread_mutex_unlock(&gCamLock);
920
921 //Notify display HAL that there is no active camera session
922 //but avoid calling the same during bootup. Refer to openCamera
923 //for more details.
924 property_get("service.bootanim.exit", value, "0");
925 if (atoi(value) == 1) {
926 pthread_mutex_lock(&gCamLock);
927 if (--gNumCameraSessions == 0) {
928 setCameraLaunchStatus(false);
929 }
930 pthread_mutex_unlock(&gCamLock);
931 }
932
Thierry Strudel3d639192016-09-09 11:52:26 -0700933 if (mExifParams.debug_params) {
934 free(mExifParams.debug_params);
935 mExifParams.debug_params = NULL;
936 }
937 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
938 LOGW("Failed to release flash for camera id: %d",
939 mCameraId);
940 }
941 mState = CLOSED;
942 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
943 mCameraId, rc);
944 return rc;
945}
946
947/*===========================================================================
948 * FUNCTION : initialize
949 *
950 * DESCRIPTION: Initialize frameworks callback functions
951 *
952 * PARAMETERS :
953 * @callback_ops : callback function to frameworks
954 *
955 * RETURN :
956 *
957 *==========================================================================*/
958int QCamera3HardwareInterface::initialize(
959 const struct camera3_callback_ops *callback_ops)
960{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800961 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700962 int rc;
963
964 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
965 pthread_mutex_lock(&mMutex);
966
967 // Validate current state
968 switch (mState) {
969 case OPENED:
970 /* valid state */
971 break;
972 default:
973 LOGE("Invalid state %d", mState);
974 rc = -ENODEV;
975 goto err1;
976 }
977
978 rc = initParameters();
979 if (rc < 0) {
980 LOGE("initParamters failed %d", rc);
981 goto err1;
982 }
983 mCallbackOps = callback_ops;
984
985 mChannelHandle = mCameraHandle->ops->add_channel(
986 mCameraHandle->camera_handle, NULL, NULL, this);
987 if (mChannelHandle == 0) {
988 LOGE("add_channel failed");
989 rc = -ENOMEM;
990 pthread_mutex_unlock(&mMutex);
991 return rc;
992 }
993
994 pthread_mutex_unlock(&mMutex);
995 mCameraInitialized = true;
996 mState = INITIALIZED;
997 LOGI("X");
998 return 0;
999
1000err1:
1001 pthread_mutex_unlock(&mMutex);
1002 return rc;
1003}
1004
1005/*===========================================================================
1006 * FUNCTION : validateStreamDimensions
1007 *
1008 * DESCRIPTION: Check if the configuration requested are those advertised
1009 *
1010 * PARAMETERS :
1011 * @stream_list : streams to be configured
1012 *
1013 * RETURN :
1014 *
1015 *==========================================================================*/
1016int QCamera3HardwareInterface::validateStreamDimensions(
1017 camera3_stream_configuration_t *streamList)
1018{
1019 int rc = NO_ERROR;
1020 size_t count = 0;
1021
1022 camera3_stream_t *inputStream = NULL;
1023 /*
1024 * Loop through all streams to find input stream if it exists*
1025 */
1026 for (size_t i = 0; i< streamList->num_streams; i++) {
1027 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1028 if (inputStream != NULL) {
1029 LOGE("Error, Multiple input streams requested");
1030 return -EINVAL;
1031 }
1032 inputStream = streamList->streams[i];
1033 }
1034 }
1035 /*
1036 * Loop through all streams requested in configuration
1037 * Check if unsupported sizes have been requested on any of them
1038 */
1039 for (size_t j = 0; j < streamList->num_streams; j++) {
1040 bool sizeFound = false;
1041 camera3_stream_t *newStream = streamList->streams[j];
1042
1043 uint32_t rotatedHeight = newStream->height;
1044 uint32_t rotatedWidth = newStream->width;
1045 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1046 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1047 rotatedHeight = newStream->width;
1048 rotatedWidth = newStream->height;
1049 }
1050
1051 /*
1052 * Sizes are different for each type of stream format check against
1053 * appropriate table.
1054 */
1055 switch (newStream->format) {
1056 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1057 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1058 case HAL_PIXEL_FORMAT_RAW10:
1059 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1060 for (size_t i = 0; i < count; i++) {
1061 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1062 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1063 sizeFound = true;
1064 break;
1065 }
1066 }
1067 break;
1068 case HAL_PIXEL_FORMAT_BLOB:
1069 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1070 /* Verify set size against generated sizes table */
1071 for (size_t i = 0; i < count; i++) {
1072 if (((int32_t)rotatedWidth ==
1073 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1074 ((int32_t)rotatedHeight ==
1075 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1076 sizeFound = true;
1077 break;
1078 }
1079 }
1080 break;
1081 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1082 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1083 default:
1084 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1085 || newStream->stream_type == CAMERA3_STREAM_INPUT
1086 || IS_USAGE_ZSL(newStream->usage)) {
1087 if (((int32_t)rotatedWidth ==
1088 gCamCapability[mCameraId]->active_array_size.width) &&
1089 ((int32_t)rotatedHeight ==
1090 gCamCapability[mCameraId]->active_array_size.height)) {
1091 sizeFound = true;
1092 break;
1093 }
1094 /* We could potentially break here to enforce ZSL stream
1095 * set from frameworks always is full active array size
1096 * but it is not clear from the spc if framework will always
1097 * follow that, also we have logic to override to full array
1098 * size, so keeping the logic lenient at the moment
1099 */
1100 }
1101 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1102 MAX_SIZES_CNT);
1103 for (size_t i = 0; i < count; i++) {
1104 if (((int32_t)rotatedWidth ==
1105 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1106 ((int32_t)rotatedHeight ==
1107 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1108 sizeFound = true;
1109 break;
1110 }
1111 }
1112 break;
1113 } /* End of switch(newStream->format) */
1114
1115 /* We error out even if a single stream has unsupported size set */
1116 if (!sizeFound) {
1117 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1118 rotatedWidth, rotatedHeight, newStream->format,
1119 gCamCapability[mCameraId]->active_array_size.width,
1120 gCamCapability[mCameraId]->active_array_size.height);
1121 rc = -EINVAL;
1122 break;
1123 }
1124 } /* End of for each stream */
1125 return rc;
1126}
1127
1128/*==============================================================================
1129 * FUNCTION : isSupportChannelNeeded
1130 *
1131 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1132 *
1133 * PARAMETERS :
1134 * @stream_list : streams to be configured
1135 * @stream_config_info : the config info for streams to be configured
1136 *
1137 * RETURN : Boolen true/false decision
1138 *
1139 *==========================================================================*/
1140bool QCamera3HardwareInterface::isSupportChannelNeeded(
1141 camera3_stream_configuration_t *streamList,
1142 cam_stream_size_info_t stream_config_info)
1143{
1144 uint32_t i;
1145 bool pprocRequested = false;
1146 /* Check for conditions where PProc pipeline does not have any streams*/
1147 for (i = 0; i < stream_config_info.num_streams; i++) {
1148 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1149 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1150 pprocRequested = true;
1151 break;
1152 }
1153 }
1154
1155 if (pprocRequested == false )
1156 return true;
1157
1158 /* Dummy stream needed if only raw or jpeg streams present */
1159 for (i = 0; i < streamList->num_streams; i++) {
1160 switch(streamList->streams[i]->format) {
1161 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1162 case HAL_PIXEL_FORMAT_RAW10:
1163 case HAL_PIXEL_FORMAT_RAW16:
1164 case HAL_PIXEL_FORMAT_BLOB:
1165 break;
1166 default:
1167 return false;
1168 }
1169 }
1170 return true;
1171}
1172
1173/*==============================================================================
1174 * FUNCTION : getSensorOutputSize
1175 *
1176 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1177 *
1178 * PARAMETERS :
1179 * @sensor_dim : sensor output dimension (output)
1180 *
1181 * RETURN : int32_t type of status
1182 * NO_ERROR -- success
1183 * none-zero failure code
1184 *
1185 *==========================================================================*/
1186int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1187{
1188 int32_t rc = NO_ERROR;
1189
1190 cam_dimension_t max_dim = {0, 0};
1191 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1192 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1193 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1194 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1195 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1196 }
1197
1198 clear_metadata_buffer(mParameters);
1199
1200 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1201 max_dim);
1202 if (rc != NO_ERROR) {
1203 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1204 return rc;
1205 }
1206
1207 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1208 if (rc != NO_ERROR) {
1209 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1210 return rc;
1211 }
1212
1213 clear_metadata_buffer(mParameters);
1214 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1215
1216 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1217 mParameters);
1218 if (rc != NO_ERROR) {
1219 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1220 return rc;
1221 }
1222
1223 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1224 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1225
1226 return rc;
1227}
1228
1229/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001230 * FUNCTION : addToPPFeatureMask
1231 *
1232 * DESCRIPTION: add additional features to pp feature mask based on
1233 * stream type and usecase
1234 *
1235 * PARAMETERS :
1236 * @stream_format : stream type for feature mask
1237 * @stream_idx : stream idx within postprocess_mask list to change
1238 *
1239 * RETURN : NULL
1240 *
1241 *==========================================================================*/
1242void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1243 uint32_t stream_idx)
1244{
1245 char feature_mask_value[PROPERTY_VALUE_MAX];
1246 cam_feature_mask_t feature_mask;
1247 int args_converted;
1248 int property_len;
1249
1250 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001251#ifdef _LE_CAMERA_
1252 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1253 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1254 property_len = property_get("persist.camera.hal3.feature",
1255 feature_mask_value, swtnr_feature_mask_value);
1256#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001257 property_len = property_get("persist.camera.hal3.feature",
1258 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001259#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1261 (feature_mask_value[1] == 'x')) {
1262 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1263 } else {
1264 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1265 }
1266 if (1 != args_converted) {
1267 feature_mask = 0;
1268 LOGE("Wrong feature mask %s", feature_mask_value);
1269 return;
1270 }
1271
1272 switch (stream_format) {
1273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1274 /* Add LLVD to pp feature mask only if video hint is enabled */
1275 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1276 mStreamConfigInfo.postprocess_mask[stream_idx]
1277 |= CAM_QTI_FEATURE_SW_TNR;
1278 LOGH("Added SW TNR to pp feature mask");
1279 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1280 mStreamConfigInfo.postprocess_mask[stream_idx]
1281 |= CAM_QCOM_FEATURE_LLVD;
1282 LOGH("Added LLVD SeeMore to pp feature mask");
1283 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001284 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1285 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1286 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1287 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001288 break;
1289 }
1290 default:
1291 break;
1292 }
1293 LOGD("PP feature mask %llx",
1294 mStreamConfigInfo.postprocess_mask[stream_idx]);
1295}
1296
1297/*==============================================================================
1298 * FUNCTION : updateFpsInPreviewBuffer
1299 *
1300 * DESCRIPTION: update FPS information in preview buffer.
1301 *
1302 * PARAMETERS :
1303 * @metadata : pointer to metadata buffer
1304 * @frame_number: frame_number to look for in pending buffer list
1305 *
1306 * RETURN : None
1307 *
1308 *==========================================================================*/
1309void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1310 uint32_t frame_number)
1311{
1312 // Mark all pending buffers for this particular request
1313 // with corresponding framerate information
1314 for (List<PendingBuffersInRequest>::iterator req =
1315 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1316 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1317 for(List<PendingBufferInfo>::iterator j =
1318 req->mPendingBufferList.begin();
1319 j != req->mPendingBufferList.end(); j++) {
1320 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1321 if ((req->frame_number == frame_number) &&
1322 (channel->getStreamTypeMask() &
1323 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1324 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1325 CAM_INTF_PARM_FPS_RANGE, metadata) {
1326 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1327 struct private_handle_t *priv_handle =
1328 (struct private_handle_t *)(*(j->buffer));
1329 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1330 }
1331 }
1332 }
1333 }
1334}
1335
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001336/*==============================================================================
1337 * FUNCTION : updateTimeStampInPendingBuffers
1338 *
1339 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1340 * of a frame number
1341 *
1342 * PARAMETERS :
1343 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1344 * @timestamp : timestamp to be set
1345 *
1346 * RETURN : None
1347 *
1348 *==========================================================================*/
1349void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1350 uint32_t frameNumber, nsecs_t timestamp)
1351{
1352 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1353 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1354 if (req->frame_number != frameNumber)
1355 continue;
1356
1357 for (auto k = req->mPendingBufferList.begin();
1358 k != req->mPendingBufferList.end(); k++ ) {
1359 struct private_handle_t *priv_handle =
1360 (struct private_handle_t *) (*(k->buffer));
1361 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1362 }
1363 }
1364 return;
1365}
1366
Thierry Strudel3d639192016-09-09 11:52:26 -07001367/*===========================================================================
1368 * FUNCTION : configureStreams
1369 *
1370 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1371 * and output streams.
1372 *
1373 * PARAMETERS :
1374 * @stream_list : streams to be configured
1375 *
1376 * RETURN :
1377 *
1378 *==========================================================================*/
1379int QCamera3HardwareInterface::configureStreams(
1380 camera3_stream_configuration_t *streamList)
1381{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001382 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001383 int rc = 0;
1384
1385 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001386 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001387 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001388 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001389
1390 return rc;
1391}
1392
1393/*===========================================================================
1394 * FUNCTION : configureStreamsPerfLocked
1395 *
1396 * DESCRIPTION: configureStreams while perfLock is held.
1397 *
1398 * PARAMETERS :
1399 * @stream_list : streams to be configured
1400 *
1401 * RETURN : int32_t type of status
1402 * NO_ERROR -- success
1403 * none-zero failure code
1404 *==========================================================================*/
1405int QCamera3HardwareInterface::configureStreamsPerfLocked(
1406 camera3_stream_configuration_t *streamList)
1407{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001408 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001409 int rc = 0;
1410
1411 // Sanity check stream_list
1412 if (streamList == NULL) {
1413 LOGE("NULL stream configuration");
1414 return BAD_VALUE;
1415 }
1416 if (streamList->streams == NULL) {
1417 LOGE("NULL stream list");
1418 return BAD_VALUE;
1419 }
1420
1421 if (streamList->num_streams < 1) {
1422 LOGE("Bad number of streams requested: %d",
1423 streamList->num_streams);
1424 return BAD_VALUE;
1425 }
1426
1427 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1428 LOGE("Maximum number of streams %d exceeded: %d",
1429 MAX_NUM_STREAMS, streamList->num_streams);
1430 return BAD_VALUE;
1431 }
1432
1433 mOpMode = streamList->operation_mode;
1434 LOGD("mOpMode: %d", mOpMode);
1435
1436 /* first invalidate all the steams in the mStreamList
1437 * if they appear again, they will be validated */
1438 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1439 it != mStreamInfo.end(); it++) {
1440 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1441 if (channel) {
1442 channel->stop();
1443 }
1444 (*it)->status = INVALID;
1445 }
1446
1447 if (mRawDumpChannel) {
1448 mRawDumpChannel->stop();
1449 delete mRawDumpChannel;
1450 mRawDumpChannel = NULL;
1451 }
1452
1453 if (mSupportChannel)
1454 mSupportChannel->stop();
1455
1456 if (mAnalysisChannel) {
1457 mAnalysisChannel->stop();
1458 }
1459 if (mMetadataChannel) {
1460 /* If content of mStreamInfo is not 0, there is metadata stream */
1461 mMetadataChannel->stop();
1462 }
1463 if (mChannelHandle) {
1464 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1465 mChannelHandle);
1466 LOGD("stopping channel %d", mChannelHandle);
1467 }
1468
1469 pthread_mutex_lock(&mMutex);
1470
1471 // Check state
1472 switch (mState) {
1473 case INITIALIZED:
1474 case CONFIGURED:
1475 case STARTED:
1476 /* valid state */
1477 break;
1478 default:
1479 LOGE("Invalid state %d", mState);
1480 pthread_mutex_unlock(&mMutex);
1481 return -ENODEV;
1482 }
1483
1484 /* Check whether we have video stream */
1485 m_bIs4KVideo = false;
1486 m_bIsVideo = false;
1487 m_bEisSupportedSize = false;
1488 m_bTnrEnabled = false;
1489 bool isZsl = false;
1490 uint32_t videoWidth = 0U;
1491 uint32_t videoHeight = 0U;
1492 size_t rawStreamCnt = 0;
1493 size_t stallStreamCnt = 0;
1494 size_t processedStreamCnt = 0;
1495 // Number of streams on ISP encoder path
1496 size_t numStreamsOnEncoder = 0;
1497 size_t numYuv888OnEncoder = 0;
1498 bool bYuv888OverrideJpeg = false;
1499 cam_dimension_t largeYuv888Size = {0, 0};
1500 cam_dimension_t maxViewfinderSize = {0, 0};
1501 bool bJpegExceeds4K = false;
1502 bool bJpegOnEncoder = false;
1503 bool bUseCommonFeatureMask = false;
1504 cam_feature_mask_t commonFeatureMask = 0;
1505 bool bSmallJpegSize = false;
1506 uint32_t width_ratio;
1507 uint32_t height_ratio;
1508 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1509 camera3_stream_t *inputStream = NULL;
1510 bool isJpeg = false;
1511 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001512 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001513
1514 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1515
1516 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001518 uint8_t eis_prop_set;
1519 uint32_t maxEisWidth = 0;
1520 uint32_t maxEisHeight = 0;
1521
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001522 // Initialize all instant AEC related variables
1523 mInstantAEC = false;
1524 mResetInstantAEC = false;
1525 mInstantAECSettledFrameNumber = 0;
1526 mAecSkipDisplayFrameBound = 0;
1527 mInstantAecFrameIdxCount = 0;
1528
Thierry Strudel3d639192016-09-09 11:52:26 -07001529 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1530
1531 size_t count = IS_TYPE_MAX;
1532 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1533 for (size_t i = 0; i < count; i++) {
1534 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001535 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1536 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001537 break;
1538 }
1539 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001540 count = CAM_OPT_STAB_MAX;
1541 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1542 for (size_t i = 0; i < count; i++) {
1543 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1544 oisSupported = true;
1545 break;
1546 }
1547 }
1548
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001549 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001550 maxEisWidth = MAX_EIS_WIDTH;
1551 maxEisHeight = MAX_EIS_HEIGHT;
1552 }
1553
1554 /* EIS setprop control */
1555 char eis_prop[PROPERTY_VALUE_MAX];
1556 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001557 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001558 eis_prop_set = (uint8_t)atoi(eis_prop);
1559
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001560 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001561 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1562
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001563 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1564 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1565
Thierry Strudel3d639192016-09-09 11:52:26 -07001566 /* stream configurations */
1567 for (size_t i = 0; i < streamList->num_streams; i++) {
1568 camera3_stream_t *newStream = streamList->streams[i];
1569 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1570 "height = %d, rotation = %d, usage = 0x%x",
1571 i, newStream->stream_type, newStream->format,
1572 newStream->width, newStream->height, newStream->rotation,
1573 newStream->usage);
1574 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1575 newStream->stream_type == CAMERA3_STREAM_INPUT){
1576 isZsl = true;
1577 }
1578 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1579 inputStream = newStream;
1580 }
1581
1582 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1583 isJpeg = true;
1584 jpegSize.width = newStream->width;
1585 jpegSize.height = newStream->height;
1586 if (newStream->width > VIDEO_4K_WIDTH ||
1587 newStream->height > VIDEO_4K_HEIGHT)
1588 bJpegExceeds4K = true;
1589 }
1590
1591 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1592 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1593 m_bIsVideo = true;
1594 videoWidth = newStream->width;
1595 videoHeight = newStream->height;
1596 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1597 (VIDEO_4K_HEIGHT <= newStream->height)) {
1598 m_bIs4KVideo = true;
1599 }
1600 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1601 (newStream->height <= maxEisHeight);
1602 }
1603 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1604 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1605 switch (newStream->format) {
1606 case HAL_PIXEL_FORMAT_BLOB:
1607 stallStreamCnt++;
1608 if (isOnEncoder(maxViewfinderSize, newStream->width,
1609 newStream->height)) {
1610 numStreamsOnEncoder++;
1611 bJpegOnEncoder = true;
1612 }
1613 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1614 newStream->width);
1615 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1616 newStream->height);;
1617 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1618 "FATAL: max_downscale_factor cannot be zero and so assert");
1619 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1620 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1621 LOGH("Setting small jpeg size flag to true");
1622 bSmallJpegSize = true;
1623 }
1624 break;
1625 case HAL_PIXEL_FORMAT_RAW10:
1626 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1627 case HAL_PIXEL_FORMAT_RAW16:
1628 rawStreamCnt++;
1629 break;
1630 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1631 processedStreamCnt++;
1632 if (isOnEncoder(maxViewfinderSize, newStream->width,
1633 newStream->height)) {
1634 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1635 !IS_USAGE_ZSL(newStream->usage)) {
1636 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1637 }
1638 numStreamsOnEncoder++;
1639 }
1640 break;
1641 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1642 processedStreamCnt++;
1643 if (isOnEncoder(maxViewfinderSize, newStream->width,
1644 newStream->height)) {
1645 // If Yuv888 size is not greater than 4K, set feature mask
1646 // to SUPERSET so that it support concurrent request on
1647 // YUV and JPEG.
1648 if (newStream->width <= VIDEO_4K_WIDTH &&
1649 newStream->height <= VIDEO_4K_HEIGHT) {
1650 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1651 }
1652 numStreamsOnEncoder++;
1653 numYuv888OnEncoder++;
1654 largeYuv888Size.width = newStream->width;
1655 largeYuv888Size.height = newStream->height;
1656 }
1657 break;
1658 default:
1659 processedStreamCnt++;
1660 if (isOnEncoder(maxViewfinderSize, newStream->width,
1661 newStream->height)) {
1662 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1663 numStreamsOnEncoder++;
1664 }
1665 break;
1666 }
1667
1668 }
1669 }
1670
1671 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1672 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1673 !m_bIsVideo) {
1674 m_bEisEnable = false;
1675 }
1676
1677 /* Logic to enable/disable TNR based on specific config size/etc.*/
1678 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1679 ((videoWidth == 1920 && videoHeight == 1080) ||
1680 (videoWidth == 1280 && videoHeight == 720)) &&
1681 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1682 m_bTnrEnabled = true;
1683
1684 /* Check if num_streams is sane */
1685 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1686 rawStreamCnt > MAX_RAW_STREAMS ||
1687 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1688 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1689 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1690 pthread_mutex_unlock(&mMutex);
1691 return -EINVAL;
1692 }
1693 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001694 if (isZsl && m_bIs4KVideo) {
1695 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001696 pthread_mutex_unlock(&mMutex);
1697 return -EINVAL;
1698 }
1699 /* Check if stream sizes are sane */
1700 if (numStreamsOnEncoder > 2) {
1701 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1702 pthread_mutex_unlock(&mMutex);
1703 return -EINVAL;
1704 } else if (1 < numStreamsOnEncoder){
1705 bUseCommonFeatureMask = true;
1706 LOGH("Multiple streams above max viewfinder size, common mask needed");
1707 }
1708
1709 /* Check if BLOB size is greater than 4k in 4k recording case */
1710 if (m_bIs4KVideo && bJpegExceeds4K) {
1711 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1712 pthread_mutex_unlock(&mMutex);
1713 return -EINVAL;
1714 }
1715
1716 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1717 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1718 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1719 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1720 // configurations:
1721 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1722 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1723 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1724 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1725 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1726 __func__);
1727 pthread_mutex_unlock(&mMutex);
1728 return -EINVAL;
1729 }
1730
1731 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1732 // the YUV stream's size is greater or equal to the JPEG size, set common
1733 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1734 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1735 jpegSize.width, jpegSize.height) &&
1736 largeYuv888Size.width > jpegSize.width &&
1737 largeYuv888Size.height > jpegSize.height) {
1738 bYuv888OverrideJpeg = true;
1739 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1740 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1741 }
1742
1743 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1744 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1745 commonFeatureMask);
1746 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1747 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1748
1749 rc = validateStreamDimensions(streamList);
1750 if (rc == NO_ERROR) {
1751 rc = validateStreamRotations(streamList);
1752 }
1753 if (rc != NO_ERROR) {
1754 LOGE("Invalid stream configuration requested!");
1755 pthread_mutex_unlock(&mMutex);
1756 return rc;
1757 }
1758
1759 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1760 for (size_t i = 0; i < streamList->num_streams; i++) {
1761 camera3_stream_t *newStream = streamList->streams[i];
1762 LOGH("newStream type = %d, stream format = %d "
1763 "stream size : %d x %d, stream rotation = %d",
1764 newStream->stream_type, newStream->format,
1765 newStream->width, newStream->height, newStream->rotation);
1766 //if the stream is in the mStreamList validate it
1767 bool stream_exists = false;
1768 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1769 it != mStreamInfo.end(); it++) {
1770 if ((*it)->stream == newStream) {
1771 QCamera3ProcessingChannel *channel =
1772 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1773 stream_exists = true;
1774 if (channel)
1775 delete channel;
1776 (*it)->status = VALID;
1777 (*it)->stream->priv = NULL;
1778 (*it)->channel = NULL;
1779 }
1780 }
1781 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1782 //new stream
1783 stream_info_t* stream_info;
1784 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1785 if (!stream_info) {
1786 LOGE("Could not allocate stream info");
1787 rc = -ENOMEM;
1788 pthread_mutex_unlock(&mMutex);
1789 return rc;
1790 }
1791 stream_info->stream = newStream;
1792 stream_info->status = VALID;
1793 stream_info->channel = NULL;
1794 mStreamInfo.push_back(stream_info);
1795 }
1796 /* Covers Opaque ZSL and API1 F/W ZSL */
1797 if (IS_USAGE_ZSL(newStream->usage)
1798 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1799 if (zslStream != NULL) {
1800 LOGE("Multiple input/reprocess streams requested!");
1801 pthread_mutex_unlock(&mMutex);
1802 return BAD_VALUE;
1803 }
1804 zslStream = newStream;
1805 }
1806 /* Covers YUV reprocess */
1807 if (inputStream != NULL) {
1808 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1809 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1810 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1811 && inputStream->width == newStream->width
1812 && inputStream->height == newStream->height) {
1813 if (zslStream != NULL) {
1814 /* This scenario indicates multiple YUV streams with same size
1815 * as input stream have been requested, since zsl stream handle
1816 * is solely use for the purpose of overriding the size of streams
1817 * which share h/w streams we will just make a guess here as to
1818 * which of the stream is a ZSL stream, this will be refactored
1819 * once we make generic logic for streams sharing encoder output
1820 */
1821 LOGH("Warning, Multiple ip/reprocess streams requested!");
1822 }
1823 zslStream = newStream;
1824 }
1825 }
1826 }
1827
1828 /* If a zsl stream is set, we know that we have configured at least one input or
1829 bidirectional stream */
1830 if (NULL != zslStream) {
1831 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1832 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1833 mInputStreamInfo.format = zslStream->format;
1834 mInputStreamInfo.usage = zslStream->usage;
1835 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1836 mInputStreamInfo.dim.width,
1837 mInputStreamInfo.dim.height,
1838 mInputStreamInfo.format, mInputStreamInfo.usage);
1839 }
1840
1841 cleanAndSortStreamInfo();
1842 if (mMetadataChannel) {
1843 delete mMetadataChannel;
1844 mMetadataChannel = NULL;
1845 }
1846 if (mSupportChannel) {
1847 delete mSupportChannel;
1848 mSupportChannel = NULL;
1849 }
1850
1851 if (mAnalysisChannel) {
1852 delete mAnalysisChannel;
1853 mAnalysisChannel = NULL;
1854 }
1855
1856 if (mDummyBatchChannel) {
1857 delete mDummyBatchChannel;
1858 mDummyBatchChannel = NULL;
1859 }
1860
1861 //Create metadata channel and initialize it
1862 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1863 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1864 gCamCapability[mCameraId]->color_arrangement);
1865 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1866 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001867 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 if (mMetadataChannel == NULL) {
1869 LOGE("failed to allocate metadata channel");
1870 rc = -ENOMEM;
1871 pthread_mutex_unlock(&mMutex);
1872 return rc;
1873 }
1874 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1875 if (rc < 0) {
1876 LOGE("metadata channel initialization failed");
1877 delete mMetadataChannel;
1878 mMetadataChannel = NULL;
1879 pthread_mutex_unlock(&mMutex);
1880 return rc;
1881 }
1882
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 bool isRawStreamRequested = false;
1884 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1885 /* Allocate channel objects for the requested streams */
1886 for (size_t i = 0; i < streamList->num_streams; i++) {
1887 camera3_stream_t *newStream = streamList->streams[i];
1888 uint32_t stream_usage = newStream->usage;
1889 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1890 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1891 struct camera_info *p_info = NULL;
1892 pthread_mutex_lock(&gCamLock);
1893 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1894 pthread_mutex_unlock(&gCamLock);
1895 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1896 || IS_USAGE_ZSL(newStream->usage)) &&
1897 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1898 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1899 if (bUseCommonFeatureMask) {
1900 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1901 commonFeatureMask;
1902 } else {
1903 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1904 CAM_QCOM_FEATURE_NONE;
1905 }
1906
1907 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1908 LOGH("Input stream configured, reprocess config");
1909 } else {
1910 //for non zsl streams find out the format
1911 switch (newStream->format) {
1912 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1913 {
1914 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1915 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1916 /* add additional features to pp feature mask */
1917 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1918 mStreamConfigInfo.num_streams);
1919
1920 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1922 CAM_STREAM_TYPE_VIDEO;
1923 if (m_bTnrEnabled && m_bTnrVideo) {
1924 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1925 CAM_QCOM_FEATURE_CPP_TNR;
1926 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1927 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1928 ~CAM_QCOM_FEATURE_CDS;
1929 }
1930 } else {
1931 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1932 CAM_STREAM_TYPE_PREVIEW;
1933 if (m_bTnrEnabled && m_bTnrPreview) {
1934 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1935 CAM_QCOM_FEATURE_CPP_TNR;
1936 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1937 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1938 ~CAM_QCOM_FEATURE_CDS;
1939 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001940 if(!m_bSwTnrPreview) {
1941 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1942 ~CAM_QTI_FEATURE_SW_TNR;
1943 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001944 padding_info.width_padding = mSurfaceStridePadding;
1945 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001946 previewSize.width = (int32_t)newStream->width;
1947 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 }
1949 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1950 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1951 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1952 newStream->height;
1953 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1954 newStream->width;
1955 }
1956 }
1957 break;
1958 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1959 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1961 if (bUseCommonFeatureMask)
1962 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1963 commonFeatureMask;
1964 else
1965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1966 CAM_QCOM_FEATURE_NONE;
1967 } else {
1968 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1969 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1970 }
1971 break;
1972 case HAL_PIXEL_FORMAT_BLOB:
1973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1974 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1975 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1976 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1977 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1978 } else {
1979 if (bUseCommonFeatureMask &&
1980 isOnEncoder(maxViewfinderSize, newStream->width,
1981 newStream->height)) {
1982 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1983 } else {
1984 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1985 }
1986 }
1987 if (isZsl) {
1988 if (zslStream) {
1989 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1990 (int32_t)zslStream->width;
1991 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1992 (int32_t)zslStream->height;
1993 } else {
1994 LOGE("Error, No ZSL stream identified");
1995 pthread_mutex_unlock(&mMutex);
1996 return -EINVAL;
1997 }
1998 } else if (m_bIs4KVideo) {
1999 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2001 } else if (bYuv888OverrideJpeg) {
2002 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2003 (int32_t)largeYuv888Size.width;
2004 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2005 (int32_t)largeYuv888Size.height;
2006 }
2007 break;
2008 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2009 case HAL_PIXEL_FORMAT_RAW16:
2010 case HAL_PIXEL_FORMAT_RAW10:
2011 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2012 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2013 isRawStreamRequested = true;
2014 break;
2015 default:
2016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2017 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2018 break;
2019 }
2020 }
2021
2022 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2023 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2024 gCamCapability[mCameraId]->color_arrangement);
2025
2026 if (newStream->priv == NULL) {
2027 //New stream, construct channel
2028 switch (newStream->stream_type) {
2029 case CAMERA3_STREAM_INPUT:
2030 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2031 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2032 break;
2033 case CAMERA3_STREAM_BIDIRECTIONAL:
2034 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2035 GRALLOC_USAGE_HW_CAMERA_WRITE;
2036 break;
2037 case CAMERA3_STREAM_OUTPUT:
2038 /* For video encoding stream, set read/write rarely
2039 * flag so that they may be set to un-cached */
2040 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2041 newStream->usage |=
2042 (GRALLOC_USAGE_SW_READ_RARELY |
2043 GRALLOC_USAGE_SW_WRITE_RARELY |
2044 GRALLOC_USAGE_HW_CAMERA_WRITE);
2045 else if (IS_USAGE_ZSL(newStream->usage))
2046 {
2047 LOGD("ZSL usage flag skipping");
2048 }
2049 else if (newStream == zslStream
2050 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2051 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2052 } else
2053 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2054 break;
2055 default:
2056 LOGE("Invalid stream_type %d", newStream->stream_type);
2057 break;
2058 }
2059
2060 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2061 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2062 QCamera3ProcessingChannel *channel = NULL;
2063 switch (newStream->format) {
2064 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2065 if ((newStream->usage &
2066 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2067 (streamList->operation_mode ==
2068 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2069 ) {
2070 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2071 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002072 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002073 this,
2074 newStream,
2075 (cam_stream_type_t)
2076 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2077 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2078 mMetadataChannel,
2079 0); //heap buffers are not required for HFR video channel
2080 if (channel == NULL) {
2081 LOGE("allocation of channel failed");
2082 pthread_mutex_unlock(&mMutex);
2083 return -ENOMEM;
2084 }
2085 //channel->getNumBuffers() will return 0 here so use
2086 //MAX_INFLIGH_HFR_REQUESTS
2087 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2088 newStream->priv = channel;
2089 LOGI("num video buffers in HFR mode: %d",
2090 MAX_INFLIGHT_HFR_REQUESTS);
2091 } else {
2092 /* Copy stream contents in HFR preview only case to create
2093 * dummy batch channel so that sensor streaming is in
2094 * HFR mode */
2095 if (!m_bIsVideo && (streamList->operation_mode ==
2096 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2097 mDummyBatchStream = *newStream;
2098 }
2099 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2100 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002101 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002102 this,
2103 newStream,
2104 (cam_stream_type_t)
2105 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2106 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2107 mMetadataChannel,
2108 MAX_INFLIGHT_REQUESTS);
2109 if (channel == NULL) {
2110 LOGE("allocation of channel failed");
2111 pthread_mutex_unlock(&mMutex);
2112 return -ENOMEM;
2113 }
2114 newStream->max_buffers = channel->getNumBuffers();
2115 newStream->priv = channel;
2116 }
2117 break;
2118 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2119 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2120 mChannelHandle,
2121 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002122 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 this,
2124 newStream,
2125 (cam_stream_type_t)
2126 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2127 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2128 mMetadataChannel);
2129 if (channel == NULL) {
2130 LOGE("allocation of YUV channel failed");
2131 pthread_mutex_unlock(&mMutex);
2132 return -ENOMEM;
2133 }
2134 newStream->max_buffers = channel->getNumBuffers();
2135 newStream->priv = channel;
2136 break;
2137 }
2138 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2139 case HAL_PIXEL_FORMAT_RAW16:
2140 case HAL_PIXEL_FORMAT_RAW10:
2141 mRawChannel = new QCamera3RawChannel(
2142 mCameraHandle->camera_handle, mChannelHandle,
2143 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002144 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002145 this, newStream,
2146 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2147 mMetadataChannel,
2148 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2149 if (mRawChannel == NULL) {
2150 LOGE("allocation of raw channel failed");
2151 pthread_mutex_unlock(&mMutex);
2152 return -ENOMEM;
2153 }
2154 newStream->max_buffers = mRawChannel->getNumBuffers();
2155 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2156 break;
2157 case HAL_PIXEL_FORMAT_BLOB:
2158 // Max live snapshot inflight buffer is 1. This is to mitigate
2159 // frame drop issues for video snapshot. The more buffers being
2160 // allocated, the more frame drops there are.
2161 mPictureChannel = new QCamera3PicChannel(
2162 mCameraHandle->camera_handle, mChannelHandle,
2163 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002164 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002165 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2166 m_bIs4KVideo, isZsl, mMetadataChannel,
2167 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2168 if (mPictureChannel == NULL) {
2169 LOGE("allocation of channel failed");
2170 pthread_mutex_unlock(&mMutex);
2171 return -ENOMEM;
2172 }
2173 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2174 newStream->max_buffers = mPictureChannel->getNumBuffers();
2175 mPictureChannel->overrideYuvSize(
2176 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2177 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2178 break;
2179
2180 default:
2181 LOGE("not a supported format 0x%x", newStream->format);
2182 break;
2183 }
2184 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2185 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2186 } else {
2187 LOGE("Error, Unknown stream type");
2188 pthread_mutex_unlock(&mMutex);
2189 return -EINVAL;
2190 }
2191
2192 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2193 if (channel != NULL && channel->isUBWCEnabled()) {
2194 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002195 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2196 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002197 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2198 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2199 }
2200 }
2201
2202 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2203 it != mStreamInfo.end(); it++) {
2204 if ((*it)->stream == newStream) {
2205 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2206 break;
2207 }
2208 }
2209 } else {
2210 // Channel already exists for this stream
2211 // Do nothing for now
2212 }
2213 padding_info = gCamCapability[mCameraId]->padding_info;
2214
2215 /* Do not add entries for input stream in metastream info
2216 * since there is no real stream associated with it
2217 */
2218 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2219 mStreamConfigInfo.num_streams++;
2220 }
2221
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002222 // Create analysis stream all the time, even when h/w support is not available
2223 {
2224 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2225 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2226 gCamCapability[mCameraId]->color_arrangement);
2227 cam_analysis_info_t analysisInfo;
2228 int32_t ret = NO_ERROR;
2229 ret = mCommon.getAnalysisInfo(
2230 FALSE,
2231 analysisFeatureMask,
2232 &analysisInfo);
2233 if (ret == NO_ERROR) {
2234 cam_dimension_t analysisDim;
2235 analysisDim = mCommon.getMatchingDimension(previewSize,
2236 analysisInfo.analysis_recommended_res);
2237
2238 mAnalysisChannel = new QCamera3SupportChannel(
2239 mCameraHandle->camera_handle,
2240 mChannelHandle,
2241 mCameraHandle->ops,
2242 &analysisInfo.analysis_padding_info,
2243 analysisFeatureMask,
2244 CAM_STREAM_TYPE_ANALYSIS,
2245 &analysisDim,
2246 (analysisInfo.analysis_format
2247 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2248 : CAM_FORMAT_YUV_420_NV21),
2249 analysisInfo.hw_analysis_supported,
2250 gCamCapability[mCameraId]->color_arrangement,
2251 this,
2252 0); // force buffer count to 0
2253 } else {
2254 LOGW("getAnalysisInfo failed, ret = %d", ret);
2255 }
2256 if (!mAnalysisChannel) {
2257 LOGW("Analysis channel cannot be created");
2258 }
2259 }
2260
Thierry Strudel3d639192016-09-09 11:52:26 -07002261 //RAW DUMP channel
2262 if (mEnableRawDump && isRawStreamRequested == false){
2263 cam_dimension_t rawDumpSize;
2264 rawDumpSize = getMaxRawSize(mCameraId);
2265 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2266 setPAAFSupport(rawDumpFeatureMask,
2267 CAM_STREAM_TYPE_RAW,
2268 gCamCapability[mCameraId]->color_arrangement);
2269 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2270 mChannelHandle,
2271 mCameraHandle->ops,
2272 rawDumpSize,
2273 &padding_info,
2274 this, rawDumpFeatureMask);
2275 if (!mRawDumpChannel) {
2276 LOGE("Raw Dump channel cannot be created");
2277 pthread_mutex_unlock(&mMutex);
2278 return -ENOMEM;
2279 }
2280 }
2281
2282
2283 if (mAnalysisChannel) {
2284 cam_analysis_info_t analysisInfo;
2285 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2286 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2287 CAM_STREAM_TYPE_ANALYSIS;
2288 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2289 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2290 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2291 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2292 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002293 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2295 &analysisInfo);
2296 if (rc != NO_ERROR) {
2297 LOGE("getAnalysisInfo failed, ret = %d", rc);
2298 pthread_mutex_unlock(&mMutex);
2299 return rc;
2300 }
2301 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002302 mCommon.getMatchingDimension(previewSize,
2303 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 mStreamConfigInfo.num_streams++;
2305 }
2306
2307 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2308 cam_analysis_info_t supportInfo;
2309 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2310 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2311 setPAAFSupport(callbackFeatureMask,
2312 CAM_STREAM_TYPE_CALLBACK,
2313 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002314 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002315 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002316 if (ret != NO_ERROR) {
2317 /* Ignore the error for Mono camera
2318 * because the PAAF bit mask is only set
2319 * for CAM_STREAM_TYPE_ANALYSIS stream type
2320 */
2321 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2322 LOGW("getAnalysisInfo failed, ret = %d", ret);
2323 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002324 }
2325 mSupportChannel = new QCamera3SupportChannel(
2326 mCameraHandle->camera_handle,
2327 mChannelHandle,
2328 mCameraHandle->ops,
2329 &gCamCapability[mCameraId]->padding_info,
2330 callbackFeatureMask,
2331 CAM_STREAM_TYPE_CALLBACK,
2332 &QCamera3SupportChannel::kDim,
2333 CAM_FORMAT_YUV_420_NV21,
2334 supportInfo.hw_analysis_supported,
2335 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002336 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 if (!mSupportChannel) {
2338 LOGE("dummy channel cannot be created");
2339 pthread_mutex_unlock(&mMutex);
2340 return -ENOMEM;
2341 }
2342 }
2343
2344 if (mSupportChannel) {
2345 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2346 QCamera3SupportChannel::kDim;
2347 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2348 CAM_STREAM_TYPE_CALLBACK;
2349 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2350 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2351 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2353 gCamCapability[mCameraId]->color_arrangement);
2354 mStreamConfigInfo.num_streams++;
2355 }
2356
2357 if (mRawDumpChannel) {
2358 cam_dimension_t rawSize;
2359 rawSize = getMaxRawSize(mCameraId);
2360 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2361 rawSize;
2362 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2363 CAM_STREAM_TYPE_RAW;
2364 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2365 CAM_QCOM_FEATURE_NONE;
2366 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2368 gCamCapability[mCameraId]->color_arrangement);
2369 mStreamConfigInfo.num_streams++;
2370 }
2371 /* In HFR mode, if video stream is not added, create a dummy channel so that
2372 * ISP can create a batch mode even for preview only case. This channel is
2373 * never 'start'ed (no stream-on), it is only 'initialized' */
2374 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2375 !m_bIsVideo) {
2376 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2377 setPAAFSupport(dummyFeatureMask,
2378 CAM_STREAM_TYPE_VIDEO,
2379 gCamCapability[mCameraId]->color_arrangement);
2380 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2381 mChannelHandle,
2382 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002383 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002384 this,
2385 &mDummyBatchStream,
2386 CAM_STREAM_TYPE_VIDEO,
2387 dummyFeatureMask,
2388 mMetadataChannel);
2389 if (NULL == mDummyBatchChannel) {
2390 LOGE("creation of mDummyBatchChannel failed."
2391 "Preview will use non-hfr sensor mode ");
2392 }
2393 }
2394 if (mDummyBatchChannel) {
2395 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2396 mDummyBatchStream.width;
2397 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2398 mDummyBatchStream.height;
2399 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2400 CAM_STREAM_TYPE_VIDEO;
2401 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2402 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2403 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2404 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2405 gCamCapability[mCameraId]->color_arrangement);
2406 mStreamConfigInfo.num_streams++;
2407 }
2408
2409 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2410 mStreamConfigInfo.buffer_info.max_buffers =
2411 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2412
2413 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2414 for (pendingRequestIterator i = mPendingRequestsList.begin();
2415 i != mPendingRequestsList.end();) {
2416 i = erasePendingRequest(i);
2417 }
2418 mPendingFrameDropList.clear();
2419 // Initialize/Reset the pending buffers list
2420 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2421 req.mPendingBufferList.clear();
2422 }
2423 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2424
2425 mPendingReprocessResultList.clear();
2426
2427 mCurJpegMeta.clear();
2428 //Get min frame duration for this streams configuration
2429 deriveMinFrameDuration();
2430
2431 // Update state
2432 mState = CONFIGURED;
2433
2434 pthread_mutex_unlock(&mMutex);
2435
2436 return rc;
2437}
2438
2439/*===========================================================================
2440 * FUNCTION : validateCaptureRequest
2441 *
2442 * DESCRIPTION: validate a capture request from camera service
2443 *
2444 * PARAMETERS :
2445 * @request : request from framework to process
2446 *
2447 * RETURN :
2448 *
2449 *==========================================================================*/
2450int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002451 camera3_capture_request_t *request,
2452 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002453{
2454 ssize_t idx = 0;
2455 const camera3_stream_buffer_t *b;
2456 CameraMetadata meta;
2457
2458 /* Sanity check the request */
2459 if (request == NULL) {
2460 LOGE("NULL capture request");
2461 return BAD_VALUE;
2462 }
2463
2464 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2465 /*settings cannot be null for the first request*/
2466 return BAD_VALUE;
2467 }
2468
2469 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002470 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2471 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002472 LOGE("Request %d: No output buffers provided!",
2473 __FUNCTION__, frameNumber);
2474 return BAD_VALUE;
2475 }
2476 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2477 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2478 request->num_output_buffers, MAX_NUM_STREAMS);
2479 return BAD_VALUE;
2480 }
2481 if (request->input_buffer != NULL) {
2482 b = request->input_buffer;
2483 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2484 LOGE("Request %d: Buffer %ld: Status not OK!",
2485 frameNumber, (long)idx);
2486 return BAD_VALUE;
2487 }
2488 if (b->release_fence != -1) {
2489 LOGE("Request %d: Buffer %ld: Has a release fence!",
2490 frameNumber, (long)idx);
2491 return BAD_VALUE;
2492 }
2493 if (b->buffer == NULL) {
2494 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2495 frameNumber, (long)idx);
2496 return BAD_VALUE;
2497 }
2498 }
2499
2500 // Validate all buffers
2501 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002502 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 QCamera3ProcessingChannel *channel =
2504 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2505 if (channel == NULL) {
2506 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2507 frameNumber, (long)idx);
2508 return BAD_VALUE;
2509 }
2510 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2511 LOGE("Request %d: Buffer %ld: Status not OK!",
2512 frameNumber, (long)idx);
2513 return BAD_VALUE;
2514 }
2515 if (b->release_fence != -1) {
2516 LOGE("Request %d: Buffer %ld: Has a release fence!",
2517 frameNumber, (long)idx);
2518 return BAD_VALUE;
2519 }
2520 if (b->buffer == NULL) {
2521 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2522 frameNumber, (long)idx);
2523 return BAD_VALUE;
2524 }
2525 if (*(b->buffer) == NULL) {
2526 LOGE("Request %d: Buffer %ld: NULL private handle!",
2527 frameNumber, (long)idx);
2528 return BAD_VALUE;
2529 }
2530 idx++;
2531 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002533 return NO_ERROR;
2534}
2535
2536/*===========================================================================
2537 * FUNCTION : deriveMinFrameDuration
2538 *
2539 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2540 * on currently configured streams.
2541 *
2542 * PARAMETERS : NONE
2543 *
2544 * RETURN : NONE
2545 *
2546 *==========================================================================*/
2547void QCamera3HardwareInterface::deriveMinFrameDuration()
2548{
2549 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2550
2551 maxJpegDim = 0;
2552 maxProcessedDim = 0;
2553 maxRawDim = 0;
2554
2555 // Figure out maximum jpeg, processed, and raw dimensions
2556 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2557 it != mStreamInfo.end(); it++) {
2558
2559 // Input stream doesn't have valid stream_type
2560 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2561 continue;
2562
2563 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2564 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2565 if (dimension > maxJpegDim)
2566 maxJpegDim = dimension;
2567 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2568 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2569 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2570 if (dimension > maxRawDim)
2571 maxRawDim = dimension;
2572 } else {
2573 if (dimension > maxProcessedDim)
2574 maxProcessedDim = dimension;
2575 }
2576 }
2577
2578 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2579 MAX_SIZES_CNT);
2580
2581 //Assume all jpeg dimensions are in processed dimensions.
2582 if (maxJpegDim > maxProcessedDim)
2583 maxProcessedDim = maxJpegDim;
2584 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2585 if (maxProcessedDim > maxRawDim) {
2586 maxRawDim = INT32_MAX;
2587
2588 for (size_t i = 0; i < count; i++) {
2589 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2590 gCamCapability[mCameraId]->raw_dim[i].height;
2591 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2592 maxRawDim = dimension;
2593 }
2594 }
2595
2596 //Find minimum durations for processed, jpeg, and raw
2597 for (size_t i = 0; i < count; i++) {
2598 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2599 gCamCapability[mCameraId]->raw_dim[i].height) {
2600 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2601 break;
2602 }
2603 }
2604 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2605 for (size_t i = 0; i < count; i++) {
2606 if (maxProcessedDim ==
2607 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2608 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2609 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2610 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2611 break;
2612 }
2613 }
2614}
2615
2616/*===========================================================================
2617 * FUNCTION : getMinFrameDuration
2618 *
2619 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2620 * and current request configuration.
2621 *
2622 * PARAMETERS : @request: requset sent by the frameworks
2623 *
2624 * RETURN : min farme duration for a particular request
2625 *
2626 *==========================================================================*/
2627int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2628{
2629 bool hasJpegStream = false;
2630 bool hasRawStream = false;
2631 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2632 const camera3_stream_t *stream = request->output_buffers[i].stream;
2633 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2634 hasJpegStream = true;
2635 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2636 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2637 stream->format == HAL_PIXEL_FORMAT_RAW16)
2638 hasRawStream = true;
2639 }
2640
2641 if (!hasJpegStream)
2642 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2643 else
2644 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2645}
2646
2647/*===========================================================================
2648 * FUNCTION : handleBuffersDuringFlushLock
2649 *
2650 * DESCRIPTION: Account for buffers returned from back-end during flush
2651 * This function is executed while mMutex is held by the caller.
2652 *
2653 * PARAMETERS :
2654 * @buffer: image buffer for the callback
2655 *
2656 * RETURN :
2657 *==========================================================================*/
2658void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2659{
2660 bool buffer_found = false;
2661 for (List<PendingBuffersInRequest>::iterator req =
2662 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2663 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2664 for (List<PendingBufferInfo>::iterator i =
2665 req->mPendingBufferList.begin();
2666 i != req->mPendingBufferList.end(); i++) {
2667 if (i->buffer == buffer->buffer) {
2668 mPendingBuffersMap.numPendingBufsAtFlush--;
2669 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2670 buffer->buffer, req->frame_number,
2671 mPendingBuffersMap.numPendingBufsAtFlush);
2672 buffer_found = true;
2673 break;
2674 }
2675 }
2676 if (buffer_found) {
2677 break;
2678 }
2679 }
2680 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2681 //signal the flush()
2682 LOGD("All buffers returned to HAL. Continue flush");
2683 pthread_cond_signal(&mBuffersCond);
2684 }
2685}
2686
2687
2688/*===========================================================================
2689 * FUNCTION : handlePendingReprocResults
2690 *
2691 * DESCRIPTION: check and notify on any pending reprocess results
2692 *
2693 * PARAMETERS :
2694 * @frame_number : Pending request frame number
2695 *
2696 * RETURN : int32_t type of status
2697 * NO_ERROR -- success
2698 * none-zero failure code
2699 *==========================================================================*/
2700int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2701{
2702 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2703 j != mPendingReprocessResultList.end(); j++) {
2704 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002705 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706
2707 LOGD("Delayed reprocess notify %d",
2708 frame_number);
2709
2710 for (pendingRequestIterator k = mPendingRequestsList.begin();
2711 k != mPendingRequestsList.end(); k++) {
2712
2713 if (k->frame_number == j->frame_number) {
2714 LOGD("Found reprocess frame number %d in pending reprocess List "
2715 "Take it out!!",
2716 k->frame_number);
2717
2718 camera3_capture_result result;
2719 memset(&result, 0, sizeof(camera3_capture_result));
2720 result.frame_number = frame_number;
2721 result.num_output_buffers = 1;
2722 result.output_buffers = &j->buffer;
2723 result.input_buffer = k->input_buffer;
2724 result.result = k->settings;
2725 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002726 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002727
2728 erasePendingRequest(k);
2729 break;
2730 }
2731 }
2732 mPendingReprocessResultList.erase(j);
2733 break;
2734 }
2735 }
2736 return NO_ERROR;
2737}
2738
2739/*===========================================================================
2740 * FUNCTION : handleBatchMetadata
2741 *
2742 * DESCRIPTION: Handles metadata buffer callback in batch mode
2743 *
2744 * PARAMETERS : @metadata_buf: metadata buffer
2745 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2746 * the meta buf in this method
2747 *
2748 * RETURN :
2749 *
2750 *==========================================================================*/
2751void QCamera3HardwareInterface::handleBatchMetadata(
2752 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2753{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002754 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002755
2756 if (NULL == metadata_buf) {
2757 LOGE("metadata_buf is NULL");
2758 return;
2759 }
2760 /* In batch mode, the metdata will contain the frame number and timestamp of
2761 * the last frame in the batch. Eg: a batch containing buffers from request
2762 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2763 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2764 * multiple process_capture_results */
2765 metadata_buffer_t *metadata =
2766 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2767 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2768 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2769 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2770 uint32_t frame_number = 0, urgent_frame_number = 0;
2771 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2772 bool invalid_metadata = false;
2773 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2774 size_t loopCount = 1;
2775
2776 int32_t *p_frame_number_valid =
2777 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2778 uint32_t *p_frame_number =
2779 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2780 int64_t *p_capture_time =
2781 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2782 int32_t *p_urgent_frame_number_valid =
2783 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2784 uint32_t *p_urgent_frame_number =
2785 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2786
2787 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2788 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2789 (NULL == p_urgent_frame_number)) {
2790 LOGE("Invalid metadata");
2791 invalid_metadata = true;
2792 } else {
2793 frame_number_valid = *p_frame_number_valid;
2794 last_frame_number = *p_frame_number;
2795 last_frame_capture_time = *p_capture_time;
2796 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2797 last_urgent_frame_number = *p_urgent_frame_number;
2798 }
2799
2800 /* In batchmode, when no video buffers are requested, set_parms are sent
2801 * for every capture_request. The difference between consecutive urgent
2802 * frame numbers and frame numbers should be used to interpolate the
2803 * corresponding frame numbers and time stamps */
2804 pthread_mutex_lock(&mMutex);
2805 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002806 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2807 if(idx < 0) {
2808 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2809 last_urgent_frame_number);
2810 mState = ERROR;
2811 pthread_mutex_unlock(&mMutex);
2812 return;
2813 }
2814 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002815 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2816 first_urgent_frame_number;
2817
2818 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2819 urgent_frame_number_valid,
2820 first_urgent_frame_number, last_urgent_frame_number);
2821 }
2822
2823 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002824 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2825 if(idx < 0) {
2826 LOGE("Invalid frame number received: %d. Irrecoverable error",
2827 last_frame_number);
2828 mState = ERROR;
2829 pthread_mutex_unlock(&mMutex);
2830 return;
2831 }
2832 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002833 frameNumDiff = last_frame_number + 1 -
2834 first_frame_number;
2835 mPendingBatchMap.removeItem(last_frame_number);
2836
2837 LOGD("frm: valid: %d frm_num: %d - %d",
2838 frame_number_valid,
2839 first_frame_number, last_frame_number);
2840
2841 }
2842 pthread_mutex_unlock(&mMutex);
2843
2844 if (urgent_frame_number_valid || frame_number_valid) {
2845 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2846 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2847 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2848 urgentFrameNumDiff, last_urgent_frame_number);
2849 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2850 LOGE("frameNumDiff: %d frameNum: %d",
2851 frameNumDiff, last_frame_number);
2852 }
2853
2854 for (size_t i = 0; i < loopCount; i++) {
2855 /* handleMetadataWithLock is called even for invalid_metadata for
2856 * pipeline depth calculation */
2857 if (!invalid_metadata) {
2858 /* Infer frame number. Batch metadata contains frame number of the
2859 * last frame */
2860 if (urgent_frame_number_valid) {
2861 if (i < urgentFrameNumDiff) {
2862 urgent_frame_number =
2863 first_urgent_frame_number + i;
2864 LOGD("inferred urgent frame_number: %d",
2865 urgent_frame_number);
2866 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2867 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2868 } else {
2869 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2870 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2871 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2872 }
2873 }
2874
2875 /* Infer frame number. Batch metadata contains frame number of the
2876 * last frame */
2877 if (frame_number_valid) {
2878 if (i < frameNumDiff) {
2879 frame_number = first_frame_number + i;
2880 LOGD("inferred frame_number: %d", frame_number);
2881 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2882 CAM_INTF_META_FRAME_NUMBER, frame_number);
2883 } else {
2884 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2885 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2886 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2887 }
2888 }
2889
2890 if (last_frame_capture_time) {
2891 //Infer timestamp
2892 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002893 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002894 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002895 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2897 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2898 LOGD("batch capture_time: %lld, capture_time: %lld",
2899 last_frame_capture_time, capture_time);
2900 }
2901 }
2902 pthread_mutex_lock(&mMutex);
2903 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002904 false /* free_and_bufdone_meta_buf */,
2905 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002906 pthread_mutex_unlock(&mMutex);
2907 }
2908
2909 /* BufDone metadata buffer */
2910 if (free_and_bufdone_meta_buf) {
2911 mMetadataChannel->bufDone(metadata_buf);
2912 free(metadata_buf);
2913 }
2914}
2915
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002916void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2917 camera3_error_msg_code_t errorCode)
2918{
2919 camera3_notify_msg_t notify_msg;
2920 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2921 notify_msg.type = CAMERA3_MSG_ERROR;
2922 notify_msg.message.error.error_code = errorCode;
2923 notify_msg.message.error.error_stream = NULL;
2924 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002925 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002926
2927 return;
2928}
Thierry Strudel3d639192016-09-09 11:52:26 -07002929/*===========================================================================
2930 * FUNCTION : handleMetadataWithLock
2931 *
2932 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2933 *
2934 * PARAMETERS : @metadata_buf: metadata buffer
2935 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2936 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002937 * @firstMetadataInBatch: Boolean to indicate whether this is the
2938 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002939 *
2940 * RETURN :
2941 *
2942 *==========================================================================*/
2943void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002944 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2945 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002946{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002947 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002948 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2949 //during flush do not send metadata from this thread
2950 LOGD("not sending metadata during flush or when mState is error");
2951 if (free_and_bufdone_meta_buf) {
2952 mMetadataChannel->bufDone(metadata_buf);
2953 free(metadata_buf);
2954 }
2955 return;
2956 }
2957
2958 //not in flush
2959 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2960 int32_t frame_number_valid, urgent_frame_number_valid;
2961 uint32_t frame_number, urgent_frame_number;
2962 int64_t capture_time;
2963 nsecs_t currentSysTime;
2964
2965 int32_t *p_frame_number_valid =
2966 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2967 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2968 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2969 int32_t *p_urgent_frame_number_valid =
2970 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2971 uint32_t *p_urgent_frame_number =
2972 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2973 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2974 metadata) {
2975 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2976 *p_frame_number_valid, *p_frame_number);
2977 }
2978
2979 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2980 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2981 LOGE("Invalid metadata");
2982 if (free_and_bufdone_meta_buf) {
2983 mMetadataChannel->bufDone(metadata_buf);
2984 free(metadata_buf);
2985 }
2986 goto done_metadata;
2987 }
2988 frame_number_valid = *p_frame_number_valid;
2989 frame_number = *p_frame_number;
2990 capture_time = *p_capture_time;
2991 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2992 urgent_frame_number = *p_urgent_frame_number;
2993 currentSysTime = systemTime(CLOCK_MONOTONIC);
2994
2995 // Detect if buffers from any requests are overdue
2996 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2997 if ( (currentSysTime - req.timestamp) >
2998 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2999 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003000 assert(missed.stream->priv);
3001 if (missed.stream->priv) {
3002 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3003 assert(ch->mStreams[0]);
3004 if (ch->mStreams[0]) {
3005 LOGE("Cancel missing frame = %d, buffer = %p,"
3006 "stream type = %d, stream format = %d",
3007 req.frame_number, missed.buffer,
3008 ch->mStreams[0]->getMyType(), missed.stream->format);
3009 ch->timeoutFrame(req.frame_number);
3010 }
3011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003012 }
3013 }
3014 }
3015 //Partial result on process_capture_result for timestamp
3016 if (urgent_frame_number_valid) {
3017 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3018 urgent_frame_number, capture_time);
3019
3020 //Recieved an urgent Frame Number, handle it
3021 //using partial results
3022 for (pendingRequestIterator i =
3023 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3024 LOGD("Iterator Frame = %d urgent frame = %d",
3025 i->frame_number, urgent_frame_number);
3026
3027 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3028 (i->partial_result_cnt == 0)) {
3029 LOGE("Error: HAL missed urgent metadata for frame number %d",
3030 i->frame_number);
3031 }
3032
3033 if (i->frame_number == urgent_frame_number &&
3034 i->bUrgentReceived == 0) {
3035
3036 camera3_capture_result_t result;
3037 memset(&result, 0, sizeof(camera3_capture_result_t));
3038
3039 i->partial_result_cnt++;
3040 i->bUrgentReceived = 1;
3041 // Extract 3A metadata
3042 result.result =
3043 translateCbUrgentMetadataToResultMetadata(metadata);
3044 // Populate metadata result
3045 result.frame_number = urgent_frame_number;
3046 result.num_output_buffers = 0;
3047 result.output_buffers = NULL;
3048 result.partial_result = i->partial_result_cnt;
3049
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003050 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003051 LOGD("urgent frame_number = %u, capture_time = %lld",
3052 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003053 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3054 // Instant AEC settled for this frame.
3055 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3056 mInstantAECSettledFrameNumber = urgent_frame_number;
3057 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 free_camera_metadata((camera_metadata_t *)result.result);
3059 break;
3060 }
3061 }
3062 }
3063
3064 if (!frame_number_valid) {
3065 LOGD("Not a valid normal frame number, used as SOF only");
3066 if (free_and_bufdone_meta_buf) {
3067 mMetadataChannel->bufDone(metadata_buf);
3068 free(metadata_buf);
3069 }
3070 goto done_metadata;
3071 }
3072 LOGH("valid frame_number = %u, capture_time = %lld",
3073 frame_number, capture_time);
3074
3075 for (pendingRequestIterator i = mPendingRequestsList.begin();
3076 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3077 // Flush out all entries with less or equal frame numbers.
3078
3079 camera3_capture_result_t result;
3080 memset(&result, 0, sizeof(camera3_capture_result_t));
3081
3082 LOGD("frame_number in the list is %u", i->frame_number);
3083 i->partial_result_cnt++;
3084 result.partial_result = i->partial_result_cnt;
3085
3086 // Check whether any stream buffer corresponding to this is dropped or not
3087 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003088 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3089 if (p_cam_frame_drop ||
3090 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003091 /* Clear notify_msg structure */
3092 camera3_notify_msg_t notify_msg;
3093 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3094 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3095 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003096 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003097 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3098 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003099 if (p_cam_frame_drop) {
3100 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003101 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003102 // Got the stream ID for drop frame.
3103 dropFrame = true;
3104 break;
3105 }
3106 }
3107 } else {
3108 // This is instant AEC case.
3109 // For instant AEC drop the stream untill AEC is settled.
3110 dropFrame = true;
3111 }
3112 if (dropFrame) {
3113 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3114 if (p_cam_frame_drop) {
3115 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003116 LOGE("Start of reporting error frame#=%u, streamID=%u",
3117 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003118 } else {
3119 // For instant AEC, inform frame drop and frame number
3120 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3121 "AEC settled frame number = %u",
3122 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3123 }
3124 notify_msg.type = CAMERA3_MSG_ERROR;
3125 notify_msg.message.error.frame_number = i->frame_number;
3126 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3127 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003128 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003129 if (p_cam_frame_drop) {
3130 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003131 LOGE("End of reporting error frame#=%u, streamID=%u",
3132 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003133 } else {
3134 // For instant AEC, inform frame drop and frame number
3135 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3136 "AEC settled frame number = %u",
3137 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3138 }
3139 PendingFrameDropInfo PendingFrameDrop;
3140 PendingFrameDrop.frame_number=i->frame_number;
3141 PendingFrameDrop.stream_ID = streamID;
3142 // Add the Frame drop info to mPendingFrameDropList
3143 mPendingFrameDropList.push_back(PendingFrameDrop);
3144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003145 }
3146 }
3147
3148 // Send empty metadata with already filled buffers for dropped metadata
3149 // and send valid metadata with already filled buffers for current metadata
3150 /* we could hit this case when we either
3151 * 1. have a pending reprocess request or
3152 * 2. miss a metadata buffer callback */
3153 if (i->frame_number < frame_number) {
3154 if (i->input_buffer) {
3155 /* this will be handled in handleInputBufferWithLock */
3156 i++;
3157 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003158 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003159
3160 mPendingLiveRequest--;
3161
3162 CameraMetadata dummyMetadata;
3163 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3164 result.result = dummyMetadata.release();
3165
3166 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003167 }
3168 } else {
3169 mPendingLiveRequest--;
3170 /* Clear notify_msg structure */
3171 camera3_notify_msg_t notify_msg;
3172 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3173
3174 // Send shutter notify to frameworks
3175 notify_msg.type = CAMERA3_MSG_SHUTTER;
3176 notify_msg.message.shutter.frame_number = i->frame_number;
3177 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003179
3180 i->timestamp = capture_time;
3181
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003182 /* Set the timestamp in display metadata so that clients aware of
3183 private_handle such as VT can use this un-modified timestamps.
3184 Camera framework is unaware of this timestamp and cannot change this */
3185 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3186
Thierry Strudel3d639192016-09-09 11:52:26 -07003187 // Find channel requiring metadata, meaning internal offline postprocess
3188 // is needed.
3189 //TODO: for now, we don't support two streams requiring metadata at the same time.
3190 // (because we are not making copies, and metadata buffer is not reference counted.
3191 bool internalPproc = false;
3192 for (pendingBufferIterator iter = i->buffers.begin();
3193 iter != i->buffers.end(); iter++) {
3194 if (iter->need_metadata) {
3195 internalPproc = true;
3196 QCamera3ProcessingChannel *channel =
3197 (QCamera3ProcessingChannel *)iter->stream->priv;
3198 channel->queueReprocMetadata(metadata_buf);
3199 break;
3200 }
3201 }
3202
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003203 for (auto itr = i->internalRequestList.begin();
3204 itr != i->internalRequestList.end(); itr++) {
3205 if (itr->need_metadata) {
3206 internalPproc = true;
3207 QCamera3ProcessingChannel *channel =
3208 (QCamera3ProcessingChannel *)itr->stream->priv;
3209 channel->queueReprocMetadata(metadata_buf);
3210 break;
3211 }
3212 }
3213
3214
Thierry Strudel3d639192016-09-09 11:52:26 -07003215 result.result = translateFromHalMetadata(metadata,
3216 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003217 i->capture_intent,
3218 /* DevCamDebug metadata translateFromHalMetadata function call*/
3219 i->DevCamDebug_meta_enable,
3220 /* DevCamDebug metadata end */
3221 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003222 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003223
3224 saveExifParams(metadata);
3225
3226 if (i->blob_request) {
3227 {
3228 //Dump tuning metadata if enabled and available
3229 char prop[PROPERTY_VALUE_MAX];
3230 memset(prop, 0, sizeof(prop));
3231 property_get("persist.camera.dumpmetadata", prop, "0");
3232 int32_t enabled = atoi(prop);
3233 if (enabled && metadata->is_tuning_params_valid) {
3234 dumpMetadataToFile(metadata->tuning_params,
3235 mMetaFrameCount,
3236 enabled,
3237 "Snapshot",
3238 frame_number);
3239 }
3240 }
3241 }
3242
3243 if (!internalPproc) {
3244 LOGD("couldn't find need_metadata for this metadata");
3245 // Return metadata buffer
3246 if (free_and_bufdone_meta_buf) {
3247 mMetadataChannel->bufDone(metadata_buf);
3248 free(metadata_buf);
3249 }
3250 }
3251 }
3252 if (!result.result) {
3253 LOGE("metadata is NULL");
3254 }
3255 result.frame_number = i->frame_number;
3256 result.input_buffer = i->input_buffer;
3257 result.num_output_buffers = 0;
3258 result.output_buffers = NULL;
3259 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3260 j != i->buffers.end(); j++) {
3261 if (j->buffer) {
3262 result.num_output_buffers++;
3263 }
3264 }
3265
3266 updateFpsInPreviewBuffer(metadata, i->frame_number);
3267
3268 if (result.num_output_buffers > 0) {
3269 camera3_stream_buffer_t *result_buffers =
3270 new camera3_stream_buffer_t[result.num_output_buffers];
3271 if (result_buffers != NULL) {
3272 size_t result_buffers_idx = 0;
3273 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3274 j != i->buffers.end(); j++) {
3275 if (j->buffer) {
3276 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3277 m != mPendingFrameDropList.end(); m++) {
3278 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3279 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3280 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3281 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3282 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3283 frame_number, streamID);
3284 m = mPendingFrameDropList.erase(m);
3285 break;
3286 }
3287 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003288 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003289 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3290 result_buffers[result_buffers_idx++] = *(j->buffer);
3291 free(j->buffer);
3292 j->buffer = NULL;
3293 }
3294 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003295
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003297 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003298 LOGD("meta frame_number = %u, capture_time = %lld",
3299 result.frame_number, i->timestamp);
3300 free_camera_metadata((camera_metadata_t *)result.result);
3301 delete[] result_buffers;
3302 }else {
3303 LOGE("Fatal error: out of memory");
3304 }
3305 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003306 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003307 LOGD("meta frame_number = %u, capture_time = %lld",
3308 result.frame_number, i->timestamp);
3309 free_camera_metadata((camera_metadata_t *)result.result);
3310 }
3311
3312 i = erasePendingRequest(i);
3313
3314 if (!mPendingReprocessResultList.empty()) {
3315 handlePendingReprocResults(frame_number + 1);
3316 }
3317 }
3318
3319done_metadata:
3320 for (pendingRequestIterator i = mPendingRequestsList.begin();
3321 i != mPendingRequestsList.end() ;i++) {
3322 i->pipeline_depth++;
3323 }
3324 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3325 unblockRequestIfNecessary();
3326}
3327
3328/*===========================================================================
3329 * FUNCTION : hdrPlusPerfLock
3330 *
3331 * DESCRIPTION: perf lock for HDR+ using custom intent
3332 *
3333 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3334 *
3335 * RETURN : None
3336 *
3337 *==========================================================================*/
3338void QCamera3HardwareInterface::hdrPlusPerfLock(
3339 mm_camera_super_buf_t *metadata_buf)
3340{
3341 if (NULL == metadata_buf) {
3342 LOGE("metadata_buf is NULL");
3343 return;
3344 }
3345 metadata_buffer_t *metadata =
3346 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3347 int32_t *p_frame_number_valid =
3348 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3349 uint32_t *p_frame_number =
3350 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3351
3352 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3353 LOGE("%s: Invalid metadata", __func__);
3354 return;
3355 }
3356
3357 //acquire perf lock for 5 sec after the last HDR frame is captured
3358 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3359 if ((p_frame_number != NULL) &&
3360 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003361 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003362 }
3363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003364}
3365
3366/*===========================================================================
3367 * FUNCTION : handleInputBufferWithLock
3368 *
3369 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3370 *
3371 * PARAMETERS : @frame_number: frame number of the input buffer
3372 *
3373 * RETURN :
3374 *
3375 *==========================================================================*/
3376void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3377{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003378 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003379 pendingRequestIterator i = mPendingRequestsList.begin();
3380 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3381 i++;
3382 }
3383 if (i != mPendingRequestsList.end() && i->input_buffer) {
3384 //found the right request
3385 if (!i->shutter_notified) {
3386 CameraMetadata settings;
3387 camera3_notify_msg_t notify_msg;
3388 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3389 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3390 if(i->settings) {
3391 settings = i->settings;
3392 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3393 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3394 } else {
3395 LOGE("No timestamp in input settings! Using current one.");
3396 }
3397 } else {
3398 LOGE("Input settings missing!");
3399 }
3400
3401 notify_msg.type = CAMERA3_MSG_SHUTTER;
3402 notify_msg.message.shutter.frame_number = frame_number;
3403 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003404 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003405 i->shutter_notified = true;
3406 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3407 i->frame_number, notify_msg.message.shutter.timestamp);
3408 }
3409
3410 if (i->input_buffer->release_fence != -1) {
3411 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3412 close(i->input_buffer->release_fence);
3413 if (rc != OK) {
3414 LOGE("input buffer sync wait failed %d", rc);
3415 }
3416 }
3417
3418 camera3_capture_result result;
3419 memset(&result, 0, sizeof(camera3_capture_result));
3420 result.frame_number = frame_number;
3421 result.result = i->settings;
3422 result.input_buffer = i->input_buffer;
3423 result.partial_result = PARTIAL_RESULT_COUNT;
3424
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003425 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 LOGD("Input request metadata and input buffer frame_number = %u",
3427 i->frame_number);
3428 i = erasePendingRequest(i);
3429 } else {
3430 LOGE("Could not find input request for frame number %d", frame_number);
3431 }
3432}
3433
3434/*===========================================================================
3435 * FUNCTION : handleBufferWithLock
3436 *
3437 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3438 *
3439 * PARAMETERS : @buffer: image buffer for the callback
3440 * @frame_number: frame number of the image buffer
3441 *
3442 * RETURN :
3443 *
3444 *==========================================================================*/
3445void QCamera3HardwareInterface::handleBufferWithLock(
3446 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3447{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003448 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003449
3450 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3451 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3452 }
3453
Thierry Strudel3d639192016-09-09 11:52:26 -07003454 /* Nothing to be done during error state */
3455 if ((ERROR == mState) || (DEINIT == mState)) {
3456 return;
3457 }
3458 if (mFlushPerf) {
3459 handleBuffersDuringFlushLock(buffer);
3460 return;
3461 }
3462 //not in flush
3463 // If the frame number doesn't exist in the pending request list,
3464 // directly send the buffer to the frameworks, and update pending buffers map
3465 // Otherwise, book-keep the buffer.
3466 pendingRequestIterator i = mPendingRequestsList.begin();
3467 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3468 i++;
3469 }
3470 if (i == mPendingRequestsList.end()) {
3471 // Verify all pending requests frame_numbers are greater
3472 for (pendingRequestIterator j = mPendingRequestsList.begin();
3473 j != mPendingRequestsList.end(); j++) {
3474 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3475 LOGW("Error: pending live frame number %d is smaller than %d",
3476 j->frame_number, frame_number);
3477 }
3478 }
3479 camera3_capture_result_t result;
3480 memset(&result, 0, sizeof(camera3_capture_result_t));
3481 result.result = NULL;
3482 result.frame_number = frame_number;
3483 result.num_output_buffers = 1;
3484 result.partial_result = 0;
3485 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3486 m != mPendingFrameDropList.end(); m++) {
3487 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3488 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3489 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3490 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3491 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3492 frame_number, streamID);
3493 m = mPendingFrameDropList.erase(m);
3494 break;
3495 }
3496 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003497 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 result.output_buffers = buffer;
3499 LOGH("result frame_number = %d, buffer = %p",
3500 frame_number, buffer->buffer);
3501
3502 mPendingBuffersMap.removeBuf(buffer->buffer);
3503
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003504 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003505 } else {
3506 if (i->input_buffer) {
3507 CameraMetadata settings;
3508 camera3_notify_msg_t notify_msg;
3509 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3510 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3511 if(i->settings) {
3512 settings = i->settings;
3513 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3514 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3515 } else {
3516 LOGW("No timestamp in input settings! Using current one.");
3517 }
3518 } else {
3519 LOGE("Input settings missing!");
3520 }
3521
3522 notify_msg.type = CAMERA3_MSG_SHUTTER;
3523 notify_msg.message.shutter.frame_number = frame_number;
3524 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3525
3526 if (i->input_buffer->release_fence != -1) {
3527 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3528 close(i->input_buffer->release_fence);
3529 if (rc != OK) {
3530 LOGE("input buffer sync wait failed %d", rc);
3531 }
3532 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003533 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003534 mPendingBuffersMap.removeBuf(buffer->buffer);
3535
Thierry Strudel04e026f2016-10-10 11:27:36 -07003536 camera3_capture_result result;
3537 memset(&result, 0, sizeof(camera3_capture_result));
3538 result.frame_number = frame_number;
3539 result.result = i->settings;
3540 result.input_buffer = i->input_buffer;
3541 result.num_output_buffers = 1;
3542 result.output_buffers = buffer;
3543 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003544
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003545 orchestrateNotify(&notify_msg);
3546 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003547 LOGD("Notify reprocess now %d!", frame_number);
3548 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003549 } else {
3550 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3551 j != i->buffers.end(); j++) {
3552 if (j->stream == buffer->stream) {
3553 if (j->buffer != NULL) {
3554 LOGE("Error: buffer is already set");
3555 } else {
3556 j->buffer = (camera3_stream_buffer_t *)malloc(
3557 sizeof(camera3_stream_buffer_t));
3558 *(j->buffer) = *buffer;
3559 LOGH("cache buffer %p at result frame_number %u",
3560 buffer->buffer, frame_number);
3561 }
3562 }
3563 }
3564 }
3565 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003566
3567 if (mPreviewStarted == false) {
3568 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3569 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3570 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3571 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3572 mPreviewStarted = true;
3573
3574 // Set power hint for preview
3575 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3576 }
3577 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003578}
3579
3580/*===========================================================================
3581 * FUNCTION : unblockRequestIfNecessary
3582 *
3583 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3584 * that mMutex is held when this function is called.
3585 *
3586 * PARAMETERS :
3587 *
3588 * RETURN :
3589 *
3590 *==========================================================================*/
3591void QCamera3HardwareInterface::unblockRequestIfNecessary()
3592{
3593 // Unblock process_capture_request
3594 pthread_cond_signal(&mRequestCond);
3595}
3596
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003597/*===========================================================================
3598 * FUNCTION : isHdrSnapshotRequest
3599 *
3600 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3601 *
3602 * PARAMETERS : camera3 request structure
3603 *
3604 * RETURN : boolean decision variable
3605 *
3606 *==========================================================================*/
3607bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3608{
3609 if (request == NULL) {
3610 LOGE("Invalid request handle");
3611 assert(0);
3612 return false;
3613 }
3614
3615 if (!mForceHdrSnapshot) {
3616 CameraMetadata frame_settings;
3617 frame_settings = request->settings;
3618
3619 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3620 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3621 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3622 return false;
3623 }
3624 } else {
3625 return false;
3626 }
3627
3628 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3629 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3630 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3631 return false;
3632 }
3633 } else {
3634 return false;
3635 }
3636 }
3637
3638 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3639 if (request->output_buffers[i].stream->format
3640 == HAL_PIXEL_FORMAT_BLOB) {
3641 return true;
3642 }
3643 }
3644
3645 return false;
3646}
3647/*===========================================================================
3648 * FUNCTION : orchestrateRequest
3649 *
3650 * DESCRIPTION: Orchestrates a capture request from camera service
3651 *
3652 * PARAMETERS :
3653 * @request : request from framework to process
3654 *
3655 * RETURN : Error status codes
3656 *
3657 *==========================================================================*/
3658int32_t QCamera3HardwareInterface::orchestrateRequest(
3659 camera3_capture_request_t *request)
3660{
3661
3662 uint32_t originalFrameNumber = request->frame_number;
3663 uint32_t originalOutputCount = request->num_output_buffers;
3664 const camera_metadata_t *original_settings = request->settings;
3665 List<InternalRequest> internallyRequestedStreams;
3666 List<InternalRequest> emptyInternalList;
3667
3668 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3669 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3670 uint32_t internalFrameNumber;
3671 CameraMetadata modified_meta;
3672
3673
3674 /* Add Blob channel to list of internally requested streams */
3675 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3676 if (request->output_buffers[i].stream->format
3677 == HAL_PIXEL_FORMAT_BLOB) {
3678 InternalRequest streamRequested;
3679 streamRequested.meteringOnly = 1;
3680 streamRequested.need_metadata = 0;
3681 streamRequested.stream = request->output_buffers[i].stream;
3682 internallyRequestedStreams.push_back(streamRequested);
3683 }
3684 }
3685 request->num_output_buffers = 0;
3686 auto itr = internallyRequestedStreams.begin();
3687
3688 /* Modify setting to set compensation */
3689 modified_meta = request->settings;
3690 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3691 uint8_t aeLock = 1;
3692 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3693 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3694 camera_metadata_t *modified_settings = modified_meta.release();
3695 request->settings = modified_settings;
3696
3697 /* Capture Settling & -2x frame */
3698 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3699 request->frame_number = internalFrameNumber;
3700 processCaptureRequest(request, internallyRequestedStreams);
3701
3702 request->num_output_buffers = originalOutputCount;
3703 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3704 request->frame_number = internalFrameNumber;
3705 processCaptureRequest(request, emptyInternalList);
3706 request->num_output_buffers = 0;
3707
3708 modified_meta = modified_settings;
3709 expCompensation = 0;
3710 aeLock = 1;
3711 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3712 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3713 modified_settings = modified_meta.release();
3714 request->settings = modified_settings;
3715
3716 /* Capture Settling & 0X frame */
3717
3718 itr = internallyRequestedStreams.begin();
3719 if (itr == internallyRequestedStreams.end()) {
3720 LOGE("Error Internally Requested Stream list is empty");
3721 assert(0);
3722 } else {
3723 itr->need_metadata = 0;
3724 itr->meteringOnly = 1;
3725 }
3726
3727 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3728 request->frame_number = internalFrameNumber;
3729 processCaptureRequest(request, internallyRequestedStreams);
3730
3731 itr = internallyRequestedStreams.begin();
3732 if (itr == internallyRequestedStreams.end()) {
3733 ALOGE("Error Internally Requested Stream list is empty");
3734 assert(0);
3735 } else {
3736 itr->need_metadata = 1;
3737 itr->meteringOnly = 0;
3738 }
3739
3740 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3741 request->frame_number = internalFrameNumber;
3742 processCaptureRequest(request, internallyRequestedStreams);
3743
3744 /* Capture 2X frame*/
3745 modified_meta = modified_settings;
3746 expCompensation = GB_HDR_2X_STEP_EV;
3747 aeLock = 1;
3748 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3749 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3750 modified_settings = modified_meta.release();
3751 request->settings = modified_settings;
3752
3753 itr = internallyRequestedStreams.begin();
3754 if (itr == internallyRequestedStreams.end()) {
3755 ALOGE("Error Internally Requested Stream list is empty");
3756 assert(0);
3757 } else {
3758 itr->need_metadata = 0;
3759 itr->meteringOnly = 1;
3760 }
3761 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3762 request->frame_number = internalFrameNumber;
3763 processCaptureRequest(request, internallyRequestedStreams);
3764
3765 itr = internallyRequestedStreams.begin();
3766 if (itr == internallyRequestedStreams.end()) {
3767 ALOGE("Error Internally Requested Stream list is empty");
3768 assert(0);
3769 } else {
3770 itr->need_metadata = 1;
3771 itr->meteringOnly = 0;
3772 }
3773
3774 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3775 request->frame_number = internalFrameNumber;
3776 processCaptureRequest(request, internallyRequestedStreams);
3777
3778
3779 /* Capture 2X on original streaming config*/
3780 internallyRequestedStreams.clear();
3781
3782 /* Restore original settings pointer */
3783 request->settings = original_settings;
3784 } else {
3785 uint32_t internalFrameNumber;
3786 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3787 request->frame_number = internalFrameNumber;
3788 return processCaptureRequest(request, internallyRequestedStreams);
3789 }
3790
3791 return NO_ERROR;
3792}
3793
3794/*===========================================================================
3795 * FUNCTION : orchestrateResult
3796 *
3797 * DESCRIPTION: Orchestrates a capture result to camera service
3798 *
3799 * PARAMETERS :
3800 * @request : request from framework to process
3801 *
3802 * RETURN :
3803 *
3804 *==========================================================================*/
3805void QCamera3HardwareInterface::orchestrateResult(
3806 camera3_capture_result_t *result)
3807{
3808 uint32_t frameworkFrameNumber;
3809 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3810 frameworkFrameNumber);
3811 if (rc != NO_ERROR) {
3812 LOGE("Cannot find translated frameworkFrameNumber");
3813 assert(0);
3814 } else {
3815 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3816 LOGD("CAM_DEBUG Internal Request drop the result");
3817 } else {
3818 result->frame_number = frameworkFrameNumber;
3819 mCallbackOps->process_capture_result(mCallbackOps, result);
3820 }
3821 }
3822}
3823
3824/*===========================================================================
3825 * FUNCTION : orchestrateNotify
3826 *
3827 * DESCRIPTION: Orchestrates a notify to camera service
3828 *
3829 * PARAMETERS :
3830 * @request : request from framework to process
3831 *
3832 * RETURN :
3833 *
3834 *==========================================================================*/
3835void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3836{
3837 uint32_t frameworkFrameNumber;
3838 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3839 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3840 frameworkFrameNumber);
3841 if (rc != NO_ERROR) {
3842 LOGE("Cannot find translated frameworkFrameNumber");
3843 assert(0);
3844 } else {
3845 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3846 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3847 } else {
3848 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3849 mCallbackOps->notify(mCallbackOps, notify_msg);
3850 }
3851 }
3852}
3853
3854/*===========================================================================
3855 * FUNCTION : FrameNumberRegistry
3856 *
3857 * DESCRIPTION: Constructor
3858 *
3859 * PARAMETERS :
3860 *
3861 * RETURN :
3862 *
3863 *==========================================================================*/
3864FrameNumberRegistry::FrameNumberRegistry()
3865{
3866 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3867}
3868
3869/*===========================================================================
3870 * FUNCTION : ~FrameNumberRegistry
3871 *
3872 * DESCRIPTION: Destructor
3873 *
3874 * PARAMETERS :
3875 *
3876 * RETURN :
3877 *
3878 *==========================================================================*/
3879FrameNumberRegistry::~FrameNumberRegistry()
3880{
3881}
3882
3883/*===========================================================================
3884 * FUNCTION : PurgeOldEntriesLocked
3885 *
3886 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3887 *
3888 * PARAMETERS :
3889 *
3890 * RETURN : NONE
3891 *
3892 *==========================================================================*/
3893void FrameNumberRegistry::purgeOldEntriesLocked()
3894{
3895 while (_register.begin() != _register.end()) {
3896 auto itr = _register.begin();
3897 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3898 _register.erase(itr);
3899 } else {
3900 return;
3901 }
3902 }
3903}
3904
3905/*===========================================================================
3906 * FUNCTION : allocStoreInternalFrameNumber
3907 *
3908 * DESCRIPTION: Method to note down a framework request and associate a new
3909 * internal request number against it
3910 *
3911 * PARAMETERS :
3912 * @fFrameNumber: Identifier given by framework
3913 * @internalFN : Output parameter which will have the newly generated internal
3914 * entry
3915 *
3916 * RETURN : Error code
3917 *
3918 *==========================================================================*/
3919int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3920 uint32_t &internalFrameNumber)
3921{
3922 Mutex::Autolock lock(mRegistryLock);
3923 internalFrameNumber = _nextFreeInternalNumber++;
3924 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3925 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3926 purgeOldEntriesLocked();
3927 return NO_ERROR;
3928}
3929
3930/*===========================================================================
3931 * FUNCTION : generateStoreInternalFrameNumber
3932 *
3933 * DESCRIPTION: Method to associate a new internal request number independent
3934 * of any associate with framework requests
3935 *
3936 * PARAMETERS :
3937 * @internalFrame#: Output parameter which will have the newly generated internal
3938 *
3939 *
3940 * RETURN : Error code
3941 *
3942 *==========================================================================*/
3943int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3944{
3945 Mutex::Autolock lock(mRegistryLock);
3946 internalFrameNumber = _nextFreeInternalNumber++;
3947 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3948 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3949 purgeOldEntriesLocked();
3950 return NO_ERROR;
3951}
3952
3953/*===========================================================================
3954 * FUNCTION : getFrameworkFrameNumber
3955 *
3956 * DESCRIPTION: Method to query the framework framenumber given an internal #
3957 *
3958 * PARAMETERS :
3959 * @internalFrame#: Internal reference
3960 * @frameworkframenumber: Output parameter holding framework frame entry
3961 *
3962 * RETURN : Error code
3963 *
3964 *==========================================================================*/
3965int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3966 uint32_t &frameworkFrameNumber)
3967{
3968 Mutex::Autolock lock(mRegistryLock);
3969 auto itr = _register.find(internalFrameNumber);
3970 if (itr == _register.end()) {
3971 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3972 return -ENOENT;
3973 }
3974
3975 frameworkFrameNumber = itr->second;
3976 purgeOldEntriesLocked();
3977 return NO_ERROR;
3978}
Thierry Strudel3d639192016-09-09 11:52:26 -07003979
3980/*===========================================================================
3981 * FUNCTION : processCaptureRequest
3982 *
3983 * DESCRIPTION: process a capture request from camera service
3984 *
3985 * PARAMETERS :
3986 * @request : request from framework to process
3987 *
3988 * RETURN :
3989 *
3990 *==========================================================================*/
3991int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003992 camera3_capture_request_t *request,
3993 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003994{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003995 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003996 int rc = NO_ERROR;
3997 int32_t request_id;
3998 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003999 bool isVidBufRequested = false;
4000 camera3_stream_buffer_t *pInputBuffer = NULL;
4001
4002 pthread_mutex_lock(&mMutex);
4003
4004 // Validate current state
4005 switch (mState) {
4006 case CONFIGURED:
4007 case STARTED:
4008 /* valid state */
4009 break;
4010
4011 case ERROR:
4012 pthread_mutex_unlock(&mMutex);
4013 handleCameraDeviceError();
4014 return -ENODEV;
4015
4016 default:
4017 LOGE("Invalid state %d", mState);
4018 pthread_mutex_unlock(&mMutex);
4019 return -ENODEV;
4020 }
4021
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004022 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004023 if (rc != NO_ERROR) {
4024 LOGE("incoming request is not valid");
4025 pthread_mutex_unlock(&mMutex);
4026 return rc;
4027 }
4028
4029 meta = request->settings;
4030
4031 // For first capture request, send capture intent, and
4032 // stream on all streams
4033 if (mState == CONFIGURED) {
4034 // send an unconfigure to the backend so that the isp
4035 // resources are deallocated
4036 if (!mFirstConfiguration) {
4037 cam_stream_size_info_t stream_config_info;
4038 int32_t hal_version = CAM_HAL_V3;
4039 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4040 stream_config_info.buffer_info.min_buffers =
4041 MIN_INFLIGHT_REQUESTS;
4042 stream_config_info.buffer_info.max_buffers =
4043 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4044 clear_metadata_buffer(mParameters);
4045 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4046 CAM_INTF_PARM_HAL_VERSION, hal_version);
4047 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4048 CAM_INTF_META_STREAM_INFO, stream_config_info);
4049 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4050 mParameters);
4051 if (rc < 0) {
4052 LOGE("set_parms for unconfigure failed");
4053 pthread_mutex_unlock(&mMutex);
4054 return rc;
4055 }
4056 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004057 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004058 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004059 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004060 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004061 property_get("persist.camera.is_type", is_type_value, "4");
4062 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4063 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4064 property_get("persist.camera.is_type_preview", is_type_value, "4");
4065 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4066 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004067
4068 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4069 int32_t hal_version = CAM_HAL_V3;
4070 uint8_t captureIntent =
4071 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4072 mCaptureIntent = captureIntent;
4073 clear_metadata_buffer(mParameters);
4074 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4075 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4076 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004077 if (mFirstConfiguration) {
4078 // configure instant AEC
4079 // Instant AEC is a session based parameter and it is needed only
4080 // once per complete session after open camera.
4081 // i.e. This is set only once for the first capture request, after open camera.
4082 setInstantAEC(meta);
4083 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004084 uint8_t fwkVideoStabMode=0;
4085 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4086 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4087 }
4088
4089 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4090 // turn it on for video/preview
4091 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4092 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004093 int32_t vsMode;
4094 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4096 rc = BAD_VALUE;
4097 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004098 LOGD("setEis %d", setEis);
4099 bool eis3Supported = false;
4100 size_t count = IS_TYPE_MAX;
4101 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4102 for (size_t i = 0; i < count; i++) {
4103 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4104 eis3Supported = true;
4105 break;
4106 }
4107 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004108
4109 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004110 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4112 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004113 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4114 is_type = isTypePreview;
4115 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4116 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4117 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004119 } else {
4120 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004121 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004122 } else {
4123 is_type = IS_TYPE_NONE;
4124 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004126 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004127 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4128 }
4129 }
4130
4131 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4132 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4133
4134 int32_t tintless_value = 1;
4135 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4136 CAM_INTF_PARM_TINTLESS, tintless_value);
4137 //Disable CDS for HFR mode or if DIS/EIS is on.
4138 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4139 //after every configure_stream
4140 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4141 (m_bIsVideo)) {
4142 int32_t cds = CAM_CDS_MODE_OFF;
4143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4144 CAM_INTF_PARM_CDS_MODE, cds))
4145 LOGE("Failed to disable CDS for HFR mode");
4146
4147 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004148
4149 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4150 uint8_t* use_av_timer = NULL;
4151
4152 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004153 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004154 use_av_timer = &m_debug_avtimer;
4155 }
4156 else{
4157 use_av_timer =
4158 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159 if (use_av_timer) {
4160 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4161 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004162 }
4163
4164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4165 rc = BAD_VALUE;
4166 }
4167 }
4168
Thierry Strudel3d639192016-09-09 11:52:26 -07004169 setMobicat();
4170
4171 /* Set fps and hfr mode while sending meta stream info so that sensor
4172 * can configure appropriate streaming mode */
4173 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004174 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4175 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004176 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4177 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004178 if (rc == NO_ERROR) {
4179 int32_t max_fps =
4180 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004181 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004182 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4183 }
4184 /* For HFR, more buffers are dequeued upfront to improve the performance */
4185 if (mBatchSize) {
4186 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4187 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4188 }
4189 }
4190 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004191 LOGE("setHalFpsRange failed");
4192 }
4193 }
4194 if (meta.exists(ANDROID_CONTROL_MODE)) {
4195 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4196 rc = extractSceneMode(meta, metaMode, mParameters);
4197 if (rc != NO_ERROR) {
4198 LOGE("extractSceneMode failed");
4199 }
4200 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004201 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004202
Thierry Strudel04e026f2016-10-10 11:27:36 -07004203 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4204 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4205 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4206 rc = setVideoHdrMode(mParameters, vhdr);
4207 if (rc != NO_ERROR) {
4208 LOGE("setVideoHDR is failed");
4209 }
4210 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004211
Thierry Strudel3d639192016-09-09 11:52:26 -07004212 //TODO: validate the arguments, HSV scenemode should have only the
4213 //advertised fps ranges
4214
4215 /*set the capture intent, hal version, tintless, stream info,
4216 *and disenable parameters to the backend*/
4217 LOGD("set_parms META_STREAM_INFO " );
4218 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4219 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004220 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004221 mStreamConfigInfo.type[i],
4222 mStreamConfigInfo.stream_sizes[i].width,
4223 mStreamConfigInfo.stream_sizes[i].height,
4224 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225 mStreamConfigInfo.format[i],
4226 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004227 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004228
Thierry Strudel3d639192016-09-09 11:52:26 -07004229 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4230 mParameters);
4231 if (rc < 0) {
4232 LOGE("set_parms failed for hal version, stream info");
4233 }
4234
4235 cam_dimension_t sensor_dim;
4236 memset(&sensor_dim, 0, sizeof(sensor_dim));
4237 rc = getSensorOutputSize(sensor_dim);
4238 if (rc != NO_ERROR) {
4239 LOGE("Failed to get sensor output size");
4240 pthread_mutex_unlock(&mMutex);
4241 goto error_exit;
4242 }
4243
4244 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4245 gCamCapability[mCameraId]->active_array_size.height,
4246 sensor_dim.width, sensor_dim.height);
4247
4248 /* Set batchmode before initializing channel. Since registerBuffer
4249 * internally initializes some of the channels, better set batchmode
4250 * even before first register buffer */
4251 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4252 it != mStreamInfo.end(); it++) {
4253 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4254 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4255 && mBatchSize) {
4256 rc = channel->setBatchSize(mBatchSize);
4257 //Disable per frame map unmap for HFR/batchmode case
4258 rc |= channel->setPerFrameMapUnmap(false);
4259 if (NO_ERROR != rc) {
4260 LOGE("Channel init failed %d", rc);
4261 pthread_mutex_unlock(&mMutex);
4262 goto error_exit;
4263 }
4264 }
4265 }
4266
4267 //First initialize all streams
4268 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4269 it != mStreamInfo.end(); it++) {
4270 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4271 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4272 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004273 setEis) {
4274 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4275 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4276 is_type = mStreamConfigInfo.is_type[i];
4277 break;
4278 }
4279 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004280 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004281 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004282 rc = channel->initialize(IS_TYPE_NONE);
4283 }
4284 if (NO_ERROR != rc) {
4285 LOGE("Channel initialization failed %d", rc);
4286 pthread_mutex_unlock(&mMutex);
4287 goto error_exit;
4288 }
4289 }
4290
4291 if (mRawDumpChannel) {
4292 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4293 if (rc != NO_ERROR) {
4294 LOGE("Error: Raw Dump Channel init failed");
4295 pthread_mutex_unlock(&mMutex);
4296 goto error_exit;
4297 }
4298 }
4299 if (mSupportChannel) {
4300 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4301 if (rc < 0) {
4302 LOGE("Support channel initialization failed");
4303 pthread_mutex_unlock(&mMutex);
4304 goto error_exit;
4305 }
4306 }
4307 if (mAnalysisChannel) {
4308 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4309 if (rc < 0) {
4310 LOGE("Analysis channel initialization failed");
4311 pthread_mutex_unlock(&mMutex);
4312 goto error_exit;
4313 }
4314 }
4315 if (mDummyBatchChannel) {
4316 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4317 if (rc < 0) {
4318 LOGE("mDummyBatchChannel setBatchSize failed");
4319 pthread_mutex_unlock(&mMutex);
4320 goto error_exit;
4321 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004322 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004323 if (rc < 0) {
4324 LOGE("mDummyBatchChannel initialization failed");
4325 pthread_mutex_unlock(&mMutex);
4326 goto error_exit;
4327 }
4328 }
4329
4330 // Set bundle info
4331 rc = setBundleInfo();
4332 if (rc < 0) {
4333 LOGE("setBundleInfo failed %d", rc);
4334 pthread_mutex_unlock(&mMutex);
4335 goto error_exit;
4336 }
4337
4338 //update settings from app here
4339 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4340 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4341 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4342 }
4343 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4344 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4345 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4346 }
4347 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4348 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4349 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4350
4351 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4352 (mLinkedCameraId != mCameraId) ) {
4353 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4354 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004355 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004356 goto error_exit;
4357 }
4358 }
4359
4360 // add bundle related cameras
4361 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4362 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004363 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4364 &m_pDualCamCmdPtr->bundle_info;
4365 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004366 if (mIsDeviceLinked)
4367 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4368 else
4369 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4370
4371 pthread_mutex_lock(&gCamLock);
4372
4373 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4374 LOGE("Dualcam: Invalid Session Id ");
4375 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004376 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004377 goto error_exit;
4378 }
4379
4380 if (mIsMainCamera == 1) {
4381 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4382 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004383 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004384 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004385 // related session id should be session id of linked session
4386 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4387 } else {
4388 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4389 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004390 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004391 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004392 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4393 }
4394 pthread_mutex_unlock(&gCamLock);
4395
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004396 rc = mCameraHandle->ops->set_dual_cam_cmd(
4397 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004398 if (rc < 0) {
4399 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004400 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004401 goto error_exit;
4402 }
4403 }
4404
4405 //Then start them.
4406 LOGH("Start META Channel");
4407 rc = mMetadataChannel->start();
4408 if (rc < 0) {
4409 LOGE("META channel start failed");
4410 pthread_mutex_unlock(&mMutex);
4411 goto error_exit;
4412 }
4413
4414 if (mAnalysisChannel) {
4415 rc = mAnalysisChannel->start();
4416 if (rc < 0) {
4417 LOGE("Analysis channel start failed");
4418 mMetadataChannel->stop();
4419 pthread_mutex_unlock(&mMutex);
4420 goto error_exit;
4421 }
4422 }
4423
4424 if (mSupportChannel) {
4425 rc = mSupportChannel->start();
4426 if (rc < 0) {
4427 LOGE("Support channel start failed");
4428 mMetadataChannel->stop();
4429 /* Although support and analysis are mutually exclusive today
4430 adding it in anycase for future proofing */
4431 if (mAnalysisChannel) {
4432 mAnalysisChannel->stop();
4433 }
4434 pthread_mutex_unlock(&mMutex);
4435 goto error_exit;
4436 }
4437 }
4438 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4439 it != mStreamInfo.end(); it++) {
4440 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4441 LOGH("Start Processing Channel mask=%d",
4442 channel->getStreamTypeMask());
4443 rc = channel->start();
4444 if (rc < 0) {
4445 LOGE("channel start failed");
4446 pthread_mutex_unlock(&mMutex);
4447 goto error_exit;
4448 }
4449 }
4450
4451 if (mRawDumpChannel) {
4452 LOGD("Starting raw dump stream");
4453 rc = mRawDumpChannel->start();
4454 if (rc != NO_ERROR) {
4455 LOGE("Error Starting Raw Dump Channel");
4456 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4457 it != mStreamInfo.end(); it++) {
4458 QCamera3Channel *channel =
4459 (QCamera3Channel *)(*it)->stream->priv;
4460 LOGH("Stopping Processing Channel mask=%d",
4461 channel->getStreamTypeMask());
4462 channel->stop();
4463 }
4464 if (mSupportChannel)
4465 mSupportChannel->stop();
4466 if (mAnalysisChannel) {
4467 mAnalysisChannel->stop();
4468 }
4469 mMetadataChannel->stop();
4470 pthread_mutex_unlock(&mMutex);
4471 goto error_exit;
4472 }
4473 }
4474
4475 if (mChannelHandle) {
4476
4477 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4478 mChannelHandle);
4479 if (rc != NO_ERROR) {
4480 LOGE("start_channel failed %d", rc);
4481 pthread_mutex_unlock(&mMutex);
4482 goto error_exit;
4483 }
4484 }
4485
4486 goto no_error;
4487error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004488 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004489 return rc;
4490no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004491 mWokenUpByDaemon = false;
4492 mPendingLiveRequest = 0;
4493 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004494 }
4495
4496 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004497 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004498
4499 if (mFlushPerf) {
4500 //we cannot accept any requests during flush
4501 LOGE("process_capture_request cannot proceed during flush");
4502 pthread_mutex_unlock(&mMutex);
4503 return NO_ERROR; //should return an error
4504 }
4505
4506 if (meta.exists(ANDROID_REQUEST_ID)) {
4507 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4508 mCurrentRequestId = request_id;
4509 LOGD("Received request with id: %d", request_id);
4510 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4511 LOGE("Unable to find request id field, \
4512 & no previous id available");
4513 pthread_mutex_unlock(&mMutex);
4514 return NAME_NOT_FOUND;
4515 } else {
4516 LOGD("Re-using old request id");
4517 request_id = mCurrentRequestId;
4518 }
4519
4520 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4521 request->num_output_buffers,
4522 request->input_buffer,
4523 frameNumber);
4524 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004525 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004526 int blob_request = 0;
4527 uint32_t snapshotStreamId = 0;
4528 for (size_t i = 0; i < request->num_output_buffers; i++) {
4529 const camera3_stream_buffer_t& output = request->output_buffers[i];
4530 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4531
4532 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004533 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004534 blob_request = 1;
4535 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4536 }
4537
4538 if (output.acquire_fence != -1) {
4539 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4540 close(output.acquire_fence);
4541 if (rc != OK) {
4542 LOGE("sync wait failed %d", rc);
4543 pthread_mutex_unlock(&mMutex);
4544 return rc;
4545 }
4546 }
4547
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004548 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004549 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004550
4551 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4552 isVidBufRequested = true;
4553 }
4554 }
4555
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004556 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4557 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4558 itr++) {
4559 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4560 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4561 channel->getStreamID(channel->getStreamTypeMask());
4562
4563 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4564 isVidBufRequested = true;
4565 }
4566 }
4567
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004569 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004570 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004571 }
4572 if (blob_request && mRawDumpChannel) {
4573 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004574 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004576 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004577 }
4578
4579 if(request->input_buffer == NULL) {
4580 /* Parse the settings:
4581 * - For every request in NORMAL MODE
4582 * - For every request in HFR mode during preview only case
4583 * - For first request of every batch in HFR mode during video
4584 * recording. In batchmode the same settings except frame number is
4585 * repeated in each request of the batch.
4586 */
4587 if (!mBatchSize ||
4588 (mBatchSize && !isVidBufRequested) ||
4589 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004590 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004591 if (rc < 0) {
4592 LOGE("fail to set frame parameters");
4593 pthread_mutex_unlock(&mMutex);
4594 return rc;
4595 }
4596 }
4597 /* For batchMode HFR, setFrameParameters is not called for every
4598 * request. But only frame number of the latest request is parsed.
4599 * Keep track of first and last frame numbers in a batch so that
4600 * metadata for the frame numbers of batch can be duplicated in
4601 * handleBatchMetadta */
4602 if (mBatchSize) {
4603 if (!mToBeQueuedVidBufs) {
4604 //start of the batch
4605 mFirstFrameNumberInBatch = request->frame_number;
4606 }
4607 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4608 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4609 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004610 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 return BAD_VALUE;
4612 }
4613 }
4614 if (mNeedSensorRestart) {
4615 /* Unlock the mutex as restartSensor waits on the channels to be
4616 * stopped, which in turn calls stream callback functions -
4617 * handleBufferWithLock and handleMetadataWithLock */
4618 pthread_mutex_unlock(&mMutex);
4619 rc = dynamicUpdateMetaStreamInfo();
4620 if (rc != NO_ERROR) {
4621 LOGE("Restarting the sensor failed");
4622 return BAD_VALUE;
4623 }
4624 mNeedSensorRestart = false;
4625 pthread_mutex_lock(&mMutex);
4626 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004627 if(mResetInstantAEC) {
4628 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4629 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4630 mResetInstantAEC = false;
4631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004632 } else {
4633
4634 if (request->input_buffer->acquire_fence != -1) {
4635 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4636 close(request->input_buffer->acquire_fence);
4637 if (rc != OK) {
4638 LOGE("input buffer sync wait failed %d", rc);
4639 pthread_mutex_unlock(&mMutex);
4640 return rc;
4641 }
4642 }
4643 }
4644
4645 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4646 mLastCustIntentFrmNum = frameNumber;
4647 }
4648 /* Update pending request list and pending buffers map */
4649 PendingRequestInfo pendingRequest;
4650 pendingRequestIterator latestRequest;
4651 pendingRequest.frame_number = frameNumber;
4652 pendingRequest.num_buffers = request->num_output_buffers;
4653 pendingRequest.request_id = request_id;
4654 pendingRequest.blob_request = blob_request;
4655 pendingRequest.timestamp = 0;
4656 pendingRequest.bUrgentReceived = 0;
4657 if (request->input_buffer) {
4658 pendingRequest.input_buffer =
4659 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4660 *(pendingRequest.input_buffer) = *(request->input_buffer);
4661 pInputBuffer = pendingRequest.input_buffer;
4662 } else {
4663 pendingRequest.input_buffer = NULL;
4664 pInputBuffer = NULL;
4665 }
4666
4667 pendingRequest.pipeline_depth = 0;
4668 pendingRequest.partial_result_cnt = 0;
4669 extractJpegMetadata(mCurJpegMeta, request);
4670 pendingRequest.jpegMetadata = mCurJpegMeta;
4671 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4672 pendingRequest.shutter_notified = false;
4673
4674 //extract capture intent
4675 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4676 mCaptureIntent =
4677 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4678 }
4679 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004680 /* DevCamDebug metadata processCaptureRequest */
4681 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4682 mDevCamDebugMetaEnable =
4683 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4684 }
4685 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4686 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004687
4688 //extract CAC info
4689 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4690 mCacMode =
4691 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4692 }
4693 pendingRequest.fwkCacMode = mCacMode;
4694
4695 PendingBuffersInRequest bufsForCurRequest;
4696 bufsForCurRequest.frame_number = frameNumber;
4697 // Mark current timestamp for the new request
4698 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4699
4700 for (size_t i = 0; i < request->num_output_buffers; i++) {
4701 RequestedBufferInfo requestedBuf;
4702 memset(&requestedBuf, 0, sizeof(requestedBuf));
4703 requestedBuf.stream = request->output_buffers[i].stream;
4704 requestedBuf.buffer = NULL;
4705 pendingRequest.buffers.push_back(requestedBuf);
4706
4707 // Add to buffer handle the pending buffers list
4708 PendingBufferInfo bufferInfo;
4709 bufferInfo.buffer = request->output_buffers[i].buffer;
4710 bufferInfo.stream = request->output_buffers[i].stream;
4711 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4712 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4713 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4714 frameNumber, bufferInfo.buffer,
4715 channel->getStreamTypeMask(), bufferInfo.stream->format);
4716 }
4717 // Add this request packet into mPendingBuffersMap
4718 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4719 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4720 mPendingBuffersMap.get_num_overall_buffers());
4721
4722 latestRequest = mPendingRequestsList.insert(
4723 mPendingRequestsList.end(), pendingRequest);
4724 if(mFlush) {
4725 LOGI("mFlush is true");
4726 pthread_mutex_unlock(&mMutex);
4727 return NO_ERROR;
4728 }
4729
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004730 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004731 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004732 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004733
4734 if(request->input_buffer != NULL){
4735 LOGD("Input request, frame_number %d", frameNumber);
4736 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4737 if (NO_ERROR != rc) {
4738 LOGE("fail to set reproc parameters");
4739 pthread_mutex_unlock(&mMutex);
4740 return rc;
4741 }
4742 }
4743
4744 // Call request on other streams
4745 uint32_t streams_need_metadata = 0;
4746 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4747 for (size_t i = 0; i < request->num_output_buffers; i++) {
4748 const camera3_stream_buffer_t& output = request->output_buffers[i];
4749 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4750
4751 if (channel == NULL) {
4752 LOGW("invalid channel pointer for stream");
4753 continue;
4754 }
4755
4756 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4757 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4758 output.buffer, request->input_buffer, frameNumber);
4759 if(request->input_buffer != NULL){
4760 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004761 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004762 if (rc < 0) {
4763 LOGE("Fail to request on picture channel");
4764 pthread_mutex_unlock(&mMutex);
4765 return rc;
4766 }
4767 } else {
4768 LOGD("snapshot request with buffer %p, frame_number %d",
4769 output.buffer, frameNumber);
4770 if (!request->settings) {
4771 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004772 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 } else {
4774 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004775 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004776 }
4777 if (rc < 0) {
4778 LOGE("Fail to request on picture channel");
4779 pthread_mutex_unlock(&mMutex);
4780 return rc;
4781 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004782
4783 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4784 uint32_t j = 0;
4785 for (j = 0; j < streamsArray.num_streams; j++) {
4786 if (streamsArray.stream_request[j].streamID == streamId) {
4787 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4788 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4789 else
4790 streamsArray.stream_request[j].buf_index = indexUsed;
4791 break;
4792 }
4793 }
4794 if (j == streamsArray.num_streams) {
4795 LOGE("Did not find matching stream to update index");
4796 assert(0);
4797 }
4798
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 pendingBufferIter->need_metadata = true;
4800 streams_need_metadata++;
4801 }
4802 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4803 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4805 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004806 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4807 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004808 if (rc < 0) {
4809 LOGE("Fail to request on YUV channel");
4810 pthread_mutex_unlock(&mMutex);
4811 return rc;
4812 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004813
4814 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4815 uint32_t j = 0;
4816 for (j = 0; j < streamsArray.num_streams; j++) {
4817 if (streamsArray.stream_request[j].streamID == streamId) {
4818 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4819 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4820 else
4821 streamsArray.stream_request[j].buf_index = indexUsed;
4822 break;
4823 }
4824 }
4825 if (j == streamsArray.num_streams) {
4826 LOGE("Did not find matching stream to update index");
4827 assert(0);
4828 }
4829
Thierry Strudel3d639192016-09-09 11:52:26 -07004830 pendingBufferIter->need_metadata = needMetadata;
4831 if (needMetadata)
4832 streams_need_metadata += 1;
4833 LOGD("calling YUV channel request, need_metadata is %d",
4834 needMetadata);
4835 } else {
4836 LOGD("request with buffer %p, frame_number %d",
4837 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004838
4839 rc = channel->request(output.buffer, frameNumber, indexUsed);
4840
4841 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4842 uint32_t j = 0;
4843 for (j = 0; j < streamsArray.num_streams; j++) {
4844 if (streamsArray.stream_request[j].streamID == streamId) {
4845 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4846 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4847 else
4848 streamsArray.stream_request[j].buf_index = indexUsed;
4849 break;
4850 }
4851 }
4852 if (j == streamsArray.num_streams) {
4853 LOGE("Did not find matching stream to update index");
4854 assert(0);
4855 }
4856
Thierry Strudel3d639192016-09-09 11:52:26 -07004857 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4858 && mBatchSize) {
4859 mToBeQueuedVidBufs++;
4860 if (mToBeQueuedVidBufs == mBatchSize) {
4861 channel->queueBatchBuf();
4862 }
4863 }
4864 if (rc < 0) {
4865 LOGE("request failed");
4866 pthread_mutex_unlock(&mMutex);
4867 return rc;
4868 }
4869 }
4870 pendingBufferIter++;
4871 }
4872
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004873 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4874 itr++) {
4875 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4876
4877 if (channel == NULL) {
4878 LOGE("invalid channel pointer for stream");
4879 assert(0);
4880 return BAD_VALUE;
4881 }
4882
4883 InternalRequest requestedStream;
4884 requestedStream = (*itr);
4885
4886
4887 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4888 LOGD("snapshot request internally input buffer %p, frame_number %d",
4889 request->input_buffer, frameNumber);
4890 if(request->input_buffer != NULL){
4891 rc = channel->request(NULL, frameNumber,
4892 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4893 if (rc < 0) {
4894 LOGE("Fail to request on picture channel");
4895 pthread_mutex_unlock(&mMutex);
4896 return rc;
4897 }
4898 } else {
4899 LOGD("snapshot request with frame_number %d", frameNumber);
4900 if (!request->settings) {
4901 rc = channel->request(NULL, frameNumber,
4902 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4903 } else {
4904 rc = channel->request(NULL, frameNumber,
4905 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4906 }
4907 if (rc < 0) {
4908 LOGE("Fail to request on picture channel");
4909 pthread_mutex_unlock(&mMutex);
4910 return rc;
4911 }
4912
4913 if ((*itr).meteringOnly != 1) {
4914 requestedStream.need_metadata = 1;
4915 streams_need_metadata++;
4916 }
4917 }
4918
4919 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4920 uint32_t j = 0;
4921 for (j = 0; j < streamsArray.num_streams; j++) {
4922 if (streamsArray.stream_request[j].streamID == streamId) {
4923 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4924 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4925 else
4926 streamsArray.stream_request[j].buf_index = indexUsed;
4927 break;
4928 }
4929 }
4930 if (j == streamsArray.num_streams) {
4931 LOGE("Did not find matching stream to update index");
4932 assert(0);
4933 }
4934
4935 } else {
4936 LOGE("Internal requests not supported on this stream type");
4937 assert(0);
4938 return INVALID_OPERATION;
4939 }
4940 latestRequest->internalRequestList.push_back(requestedStream);
4941 }
4942
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 //If 2 streams have need_metadata set to true, fail the request, unless
4944 //we copy/reference count the metadata buffer
4945 if (streams_need_metadata > 1) {
4946 LOGE("not supporting request in which two streams requires"
4947 " 2 HAL metadata for reprocessing");
4948 pthread_mutex_unlock(&mMutex);
4949 return -EINVAL;
4950 }
4951
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004952 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 /* Set the parameters to backend:
4954 * - For every request in NORMAL MODE
4955 * - For every request in HFR mode during preview only case
4956 * - Once every batch in HFR mode during video recording
4957 */
4958 if (!mBatchSize ||
4959 (mBatchSize && !isVidBufRequested) ||
4960 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4961 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4962 mBatchSize, isVidBufRequested,
4963 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004964
4965 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4966 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4967 uint32_t m = 0;
4968 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4969 if (streamsArray.stream_request[k].streamID ==
4970 mBatchedStreamsArray.stream_request[m].streamID)
4971 break;
4972 }
4973 if (m == mBatchedStreamsArray.num_streams) {
4974 mBatchedStreamsArray.stream_request\
4975 [mBatchedStreamsArray.num_streams].streamID =
4976 streamsArray.stream_request[k].streamID;
4977 mBatchedStreamsArray.stream_request\
4978 [mBatchedStreamsArray.num_streams].buf_index =
4979 streamsArray.stream_request[k].buf_index;
4980 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4981 }
4982 }
4983 streamsArray = mBatchedStreamsArray;
4984 }
4985 /* Update stream id of all the requested buffers */
4986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4987 LOGE("Failed to set stream type mask in the parameters");
4988 return BAD_VALUE;
4989 }
4990
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4992 mParameters);
4993 if (rc < 0) {
4994 LOGE("set_parms failed");
4995 }
4996 /* reset to zero coz, the batch is queued */
4997 mToBeQueuedVidBufs = 0;
4998 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004999 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5000 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
5001 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5002 uint32_t m = 0;
5003 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5004 if (streamsArray.stream_request[k].streamID ==
5005 mBatchedStreamsArray.stream_request[m].streamID)
5006 break;
5007 }
5008 if (m == mBatchedStreamsArray.num_streams) {
5009 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5010 streamsArray.stream_request[k].streamID;
5011 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5012 streamsArray.stream_request[k].buf_index;
5013 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5014 }
5015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 }
5017 mPendingLiveRequest++;
5018 }
5019
5020 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5021
5022 mState = STARTED;
5023 // Added a timed condition wait
5024 struct timespec ts;
5025 uint8_t isValidTimeout = 1;
5026 rc = clock_gettime(CLOCK_REALTIME, &ts);
5027 if (rc < 0) {
5028 isValidTimeout = 0;
5029 LOGE("Error reading the real time clock!!");
5030 }
5031 else {
5032 // Make timeout as 5 sec for request to be honored
5033 ts.tv_sec += 5;
5034 }
5035 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 (mState != ERROR) && (mState != DEINIT)) {
5038 if (!isValidTimeout) {
5039 LOGD("Blocking on conditional wait");
5040 pthread_cond_wait(&mRequestCond, &mMutex);
5041 }
5042 else {
5043 LOGD("Blocking on timed conditional wait");
5044 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5045 if (rc == ETIMEDOUT) {
5046 rc = -ENODEV;
5047 LOGE("Unblocked on timeout!!!!");
5048 break;
5049 }
5050 }
5051 LOGD("Unblocked");
5052 if (mWokenUpByDaemon) {
5053 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005054 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005055 break;
5056 }
5057 }
5058 pthread_mutex_unlock(&mMutex);
5059
5060 return rc;
5061}
5062
5063/*===========================================================================
5064 * FUNCTION : dump
5065 *
5066 * DESCRIPTION:
5067 *
5068 * PARAMETERS :
5069 *
5070 *
5071 * RETURN :
5072 *==========================================================================*/
5073void QCamera3HardwareInterface::dump(int fd)
5074{
5075 pthread_mutex_lock(&mMutex);
5076 dprintf(fd, "\n Camera HAL3 information Begin \n");
5077
5078 dprintf(fd, "\nNumber of pending requests: %zu \n",
5079 mPendingRequestsList.size());
5080 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5081 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5082 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5083 for(pendingRequestIterator i = mPendingRequestsList.begin();
5084 i != mPendingRequestsList.end(); i++) {
5085 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5086 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5087 i->input_buffer);
5088 }
5089 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5090 mPendingBuffersMap.get_num_overall_buffers());
5091 dprintf(fd, "-------+------------------\n");
5092 dprintf(fd, " Frame | Stream type mask \n");
5093 dprintf(fd, "-------+------------------\n");
5094 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5095 for(auto &j : req.mPendingBufferList) {
5096 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5097 dprintf(fd, " %5d | %11d \n",
5098 req.frame_number, channel->getStreamTypeMask());
5099 }
5100 }
5101 dprintf(fd, "-------+------------------\n");
5102
5103 dprintf(fd, "\nPending frame drop list: %zu\n",
5104 mPendingFrameDropList.size());
5105 dprintf(fd, "-------+-----------\n");
5106 dprintf(fd, " Frame | Stream ID \n");
5107 dprintf(fd, "-------+-----------\n");
5108 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5109 i != mPendingFrameDropList.end(); i++) {
5110 dprintf(fd, " %5d | %9d \n",
5111 i->frame_number, i->stream_ID);
5112 }
5113 dprintf(fd, "-------+-----------\n");
5114
5115 dprintf(fd, "\n Camera HAL3 information End \n");
5116
5117 /* use dumpsys media.camera as trigger to send update debug level event */
5118 mUpdateDebugLevel = true;
5119 pthread_mutex_unlock(&mMutex);
5120 return;
5121}
5122
5123/*===========================================================================
5124 * FUNCTION : flush
5125 *
5126 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5127 * conditionally restarts channels
5128 *
5129 * PARAMETERS :
5130 * @ restartChannels: re-start all channels
5131 *
5132 *
5133 * RETURN :
5134 * 0 on success
5135 * Error code on failure
5136 *==========================================================================*/
5137int QCamera3HardwareInterface::flush(bool restartChannels)
5138{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005139 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005140 int32_t rc = NO_ERROR;
5141
5142 LOGD("Unblocking Process Capture Request");
5143 pthread_mutex_lock(&mMutex);
5144 mFlush = true;
5145 pthread_mutex_unlock(&mMutex);
5146
5147 rc = stopAllChannels();
5148 // unlink of dualcam
5149 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005150 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5151 &m_pDualCamCmdPtr->bundle_info;
5152 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005153 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5154 pthread_mutex_lock(&gCamLock);
5155
5156 if (mIsMainCamera == 1) {
5157 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5158 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005159 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005160 // related session id should be session id of linked session
5161 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5162 } else {
5163 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5164 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005165 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5167 }
5168 pthread_mutex_unlock(&gCamLock);
5169
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005170 rc = mCameraHandle->ops->set_dual_cam_cmd(
5171 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005172 if (rc < 0) {
5173 LOGE("Dualcam: Unlink failed, but still proceed to close");
5174 }
5175 }
5176
5177 if (rc < 0) {
5178 LOGE("stopAllChannels failed");
5179 return rc;
5180 }
5181 if (mChannelHandle) {
5182 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5183 mChannelHandle);
5184 }
5185
5186 // Reset bundle info
5187 rc = setBundleInfo();
5188 if (rc < 0) {
5189 LOGE("setBundleInfo failed %d", rc);
5190 return rc;
5191 }
5192
5193 // Mutex Lock
5194 pthread_mutex_lock(&mMutex);
5195
5196 // Unblock process_capture_request
5197 mPendingLiveRequest = 0;
5198 pthread_cond_signal(&mRequestCond);
5199
5200 rc = notifyErrorForPendingRequests();
5201 if (rc < 0) {
5202 LOGE("notifyErrorForPendingRequests failed");
5203 pthread_mutex_unlock(&mMutex);
5204 return rc;
5205 }
5206
5207 mFlush = false;
5208
5209 // Start the Streams/Channels
5210 if (restartChannels) {
5211 rc = startAllChannels();
5212 if (rc < 0) {
5213 LOGE("startAllChannels failed");
5214 pthread_mutex_unlock(&mMutex);
5215 return rc;
5216 }
5217 }
5218
5219 if (mChannelHandle) {
5220 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5221 mChannelHandle);
5222 if (rc < 0) {
5223 LOGE("start_channel failed");
5224 pthread_mutex_unlock(&mMutex);
5225 return rc;
5226 }
5227 }
5228
5229 pthread_mutex_unlock(&mMutex);
5230
5231 return 0;
5232}
5233
5234/*===========================================================================
5235 * FUNCTION : flushPerf
5236 *
5237 * DESCRIPTION: This is the performance optimization version of flush that does
5238 * not use stream off, rather flushes the system
5239 *
5240 * PARAMETERS :
5241 *
5242 *
5243 * RETURN : 0 : success
5244 * -EINVAL: input is malformed (device is not valid)
5245 * -ENODEV: if the device has encountered a serious error
5246 *==========================================================================*/
5247int QCamera3HardwareInterface::flushPerf()
5248{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005249 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 int32_t rc = 0;
5251 struct timespec timeout;
5252 bool timed_wait = false;
5253
5254 pthread_mutex_lock(&mMutex);
5255 mFlushPerf = true;
5256 mPendingBuffersMap.numPendingBufsAtFlush =
5257 mPendingBuffersMap.get_num_overall_buffers();
5258 LOGD("Calling flush. Wait for %d buffers to return",
5259 mPendingBuffersMap.numPendingBufsAtFlush);
5260
5261 /* send the flush event to the backend */
5262 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5263 if (rc < 0) {
5264 LOGE("Error in flush: IOCTL failure");
5265 mFlushPerf = false;
5266 pthread_mutex_unlock(&mMutex);
5267 return -ENODEV;
5268 }
5269
5270 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5271 LOGD("No pending buffers in HAL, return flush");
5272 mFlushPerf = false;
5273 pthread_mutex_unlock(&mMutex);
5274 return rc;
5275 }
5276
5277 /* wait on a signal that buffers were received */
5278 rc = clock_gettime(CLOCK_REALTIME, &timeout);
5279 if (rc < 0) {
5280 LOGE("Error reading the real time clock, cannot use timed wait");
5281 } else {
5282 timeout.tv_sec += FLUSH_TIMEOUT;
5283 timed_wait = true;
5284 }
5285
5286 //Block on conditional variable
5287 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5288 LOGD("Waiting on mBuffersCond");
5289 if (!timed_wait) {
5290 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5291 if (rc != 0) {
5292 LOGE("pthread_cond_wait failed due to rc = %s",
5293 strerror(rc));
5294 break;
5295 }
5296 } else {
5297 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5298 if (rc != 0) {
5299 LOGE("pthread_cond_timedwait failed due to rc = %s",
5300 strerror(rc));
5301 break;
5302 }
5303 }
5304 }
5305 if (rc != 0) {
5306 mFlushPerf = false;
5307 pthread_mutex_unlock(&mMutex);
5308 return -ENODEV;
5309 }
5310
5311 LOGD("Received buffers, now safe to return them");
5312
5313 //make sure the channels handle flush
5314 //currently only required for the picture channel to release snapshot resources
5315 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5316 it != mStreamInfo.end(); it++) {
5317 QCamera3Channel *channel = (*it)->channel;
5318 if (channel) {
5319 rc = channel->flush();
5320 if (rc) {
5321 LOGE("Flushing the channels failed with error %d", rc);
5322 // even though the channel flush failed we need to continue and
5323 // return the buffers we have to the framework, however the return
5324 // value will be an error
5325 rc = -ENODEV;
5326 }
5327 }
5328 }
5329
5330 /* notify the frameworks and send errored results */
5331 rc = notifyErrorForPendingRequests();
5332 if (rc < 0) {
5333 LOGE("notifyErrorForPendingRequests failed");
5334 pthread_mutex_unlock(&mMutex);
5335 return rc;
5336 }
5337
5338 //unblock process_capture_request
5339 mPendingLiveRequest = 0;
5340 unblockRequestIfNecessary();
5341
5342 mFlushPerf = false;
5343 pthread_mutex_unlock(&mMutex);
5344 LOGD ("Flush Operation complete. rc = %d", rc);
5345 return rc;
5346}
5347
5348/*===========================================================================
5349 * FUNCTION : handleCameraDeviceError
5350 *
5351 * DESCRIPTION: This function calls internal flush and notifies the error to
5352 * framework and updates the state variable.
5353 *
5354 * PARAMETERS : None
5355 *
5356 * RETURN : NO_ERROR on Success
5357 * Error code on failure
5358 *==========================================================================*/
5359int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5360{
5361 int32_t rc = NO_ERROR;
5362
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005363 {
5364 Mutex::Autolock lock(mFlushLock);
5365 pthread_mutex_lock(&mMutex);
5366 if (mState != ERROR) {
5367 //if mState != ERROR, nothing to be done
5368 pthread_mutex_unlock(&mMutex);
5369 return NO_ERROR;
5370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005372
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005373 rc = flush(false /* restart channels */);
5374 if (NO_ERROR != rc) {
5375 LOGE("internal flush to handle mState = ERROR failed");
5376 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005377
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005378 pthread_mutex_lock(&mMutex);
5379 mState = DEINIT;
5380 pthread_mutex_unlock(&mMutex);
5381 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005382
5383 camera3_notify_msg_t notify_msg;
5384 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5385 notify_msg.type = CAMERA3_MSG_ERROR;
5386 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5387 notify_msg.message.error.error_stream = NULL;
5388 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005389 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005390
5391 return rc;
5392}
5393
5394/*===========================================================================
5395 * FUNCTION : captureResultCb
5396 *
5397 * DESCRIPTION: Callback handler for all capture result
5398 * (streams, as well as metadata)
5399 *
5400 * PARAMETERS :
5401 * @metadata : metadata information
5402 * @buffer : actual gralloc buffer to be returned to frameworks.
5403 * NULL if metadata.
5404 *
5405 * RETURN : NONE
5406 *==========================================================================*/
5407void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5408 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5409{
5410 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005411 pthread_mutex_lock(&mMutex);
5412 uint8_t batchSize = mBatchSize;
5413 pthread_mutex_unlock(&mMutex);
5414 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 handleBatchMetadata(metadata_buf,
5416 true /* free_and_bufdone_meta_buf */);
5417 } else { /* mBatchSize = 0 */
5418 hdrPlusPerfLock(metadata_buf);
5419 pthread_mutex_lock(&mMutex);
5420 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005421 true /* free_and_bufdone_meta_buf */,
5422 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 pthread_mutex_unlock(&mMutex);
5424 }
5425 } else if (isInputBuffer) {
5426 pthread_mutex_lock(&mMutex);
5427 handleInputBufferWithLock(frame_number);
5428 pthread_mutex_unlock(&mMutex);
5429 } else {
5430 pthread_mutex_lock(&mMutex);
5431 handleBufferWithLock(buffer, frame_number);
5432 pthread_mutex_unlock(&mMutex);
5433 }
5434 return;
5435}
5436
5437/*===========================================================================
5438 * FUNCTION : getReprocessibleOutputStreamId
5439 *
5440 * DESCRIPTION: Get source output stream id for the input reprocess stream
5441 * based on size and format, which would be the largest
5442 * output stream if an input stream exists.
5443 *
5444 * PARAMETERS :
5445 * @id : return the stream id if found
5446 *
5447 * RETURN : int32_t type of status
5448 * NO_ERROR -- success
5449 * none-zero failure code
5450 *==========================================================================*/
5451int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5452{
5453 /* check if any output or bidirectional stream with the same size and format
5454 and return that stream */
5455 if ((mInputStreamInfo.dim.width > 0) &&
5456 (mInputStreamInfo.dim.height > 0)) {
5457 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5458 it != mStreamInfo.end(); it++) {
5459
5460 camera3_stream_t *stream = (*it)->stream;
5461 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5462 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5463 (stream->format == mInputStreamInfo.format)) {
5464 // Usage flag for an input stream and the source output stream
5465 // may be different.
5466 LOGD("Found reprocessible output stream! %p", *it);
5467 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5468 stream->usage, mInputStreamInfo.usage);
5469
5470 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5471 if (channel != NULL && channel->mStreams[0]) {
5472 id = channel->mStreams[0]->getMyServerID();
5473 return NO_ERROR;
5474 }
5475 }
5476 }
5477 } else {
5478 LOGD("No input stream, so no reprocessible output stream");
5479 }
5480 return NAME_NOT_FOUND;
5481}
5482
5483/*===========================================================================
5484 * FUNCTION : lookupFwkName
5485 *
5486 * DESCRIPTION: In case the enum is not same in fwk and backend
5487 * make sure the parameter is correctly propogated
5488 *
5489 * PARAMETERS :
5490 * @arr : map between the two enums
5491 * @len : len of the map
5492 * @hal_name : name of the hal_parm to map
5493 *
5494 * RETURN : int type of status
5495 * fwk_name -- success
5496 * none-zero failure code
5497 *==========================================================================*/
5498template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5499 size_t len, halType hal_name)
5500{
5501
5502 for (size_t i = 0; i < len; i++) {
5503 if (arr[i].hal_name == hal_name) {
5504 return arr[i].fwk_name;
5505 }
5506 }
5507
5508 /* Not able to find matching framework type is not necessarily
5509 * an error case. This happens when mm-camera supports more attributes
5510 * than the frameworks do */
5511 LOGH("Cannot find matching framework type");
5512 return NAME_NOT_FOUND;
5513}
5514
5515/*===========================================================================
5516 * FUNCTION : lookupHalName
5517 *
5518 * DESCRIPTION: In case the enum is not same in fwk and backend
5519 * make sure the parameter is correctly propogated
5520 *
5521 * PARAMETERS :
5522 * @arr : map between the two enums
5523 * @len : len of the map
5524 * @fwk_name : name of the hal_parm to map
5525 *
5526 * RETURN : int32_t type of status
5527 * hal_name -- success
5528 * none-zero failure code
5529 *==========================================================================*/
5530template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5531 size_t len, fwkType fwk_name)
5532{
5533 for (size_t i = 0; i < len; i++) {
5534 if (arr[i].fwk_name == fwk_name) {
5535 return arr[i].hal_name;
5536 }
5537 }
5538
5539 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5540 return NAME_NOT_FOUND;
5541}
5542
5543/*===========================================================================
5544 * FUNCTION : lookupProp
5545 *
5546 * DESCRIPTION: lookup a value by its name
5547 *
5548 * PARAMETERS :
5549 * @arr : map between the two enums
5550 * @len : size of the map
5551 * @name : name to be looked up
5552 *
5553 * RETURN : Value if found
5554 * CAM_CDS_MODE_MAX if not found
5555 *==========================================================================*/
5556template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5557 size_t len, const char *name)
5558{
5559 if (name) {
5560 for (size_t i = 0; i < len; i++) {
5561 if (!strcmp(arr[i].desc, name)) {
5562 return arr[i].val;
5563 }
5564 }
5565 }
5566 return CAM_CDS_MODE_MAX;
5567}
5568
5569/*===========================================================================
5570 *
5571 * DESCRIPTION:
5572 *
5573 * PARAMETERS :
5574 * @metadata : metadata information from callback
5575 * @timestamp: metadata buffer timestamp
5576 * @request_id: request id
5577 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005578 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5579 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005580 * @pprocDone: whether internal offline postprocsesing is done
5581 *
5582 * RETURN : camera_metadata_t*
5583 * metadata in a format specified by fwk
5584 *==========================================================================*/
5585camera_metadata_t*
5586QCamera3HardwareInterface::translateFromHalMetadata(
5587 metadata_buffer_t *metadata,
5588 nsecs_t timestamp,
5589 int32_t request_id,
5590 const CameraMetadata& jpegMetadata,
5591 uint8_t pipeline_depth,
5592 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005593 /* DevCamDebug metadata translateFromHalMetadata argument */
5594 uint8_t DevCamDebug_meta_enable,
5595 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005596 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005597 uint8_t fwk_cacMode,
5598 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005599{
5600 CameraMetadata camMetadata;
5601 camera_metadata_t *resultMetadata;
5602
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005603 if (mBatchSize && !firstMetadataInBatch) {
5604 /* In batch mode, use cached metadata from the first metadata
5605 in the batch */
5606 camMetadata.clear();
5607 camMetadata = mCachedMetadata;
5608 }
5609
Thierry Strudel3d639192016-09-09 11:52:26 -07005610 if (jpegMetadata.entryCount())
5611 camMetadata.append(jpegMetadata);
5612
5613 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5614 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5615 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5616 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005617 if (mBatchSize == 0) {
5618 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5619 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5620 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005621
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005622 if (mBatchSize && !firstMetadataInBatch) {
5623 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5624 resultMetadata = camMetadata.release();
5625 return resultMetadata;
5626 }
5627
Samuel Ha68ba5172016-12-15 18:41:12 -08005628 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5629 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5630 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5631 // DevCamDebug metadata translateFromHalMetadata AF
5632 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5633 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5634 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5635 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5636 }
5637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5638 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5639 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5640 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5641 }
5642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5643 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5644 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5645 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5646 }
5647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5648 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5649 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5650 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5651 }
5652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5653 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5654 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5655 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5656 }
5657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5658 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5659 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5660 *DevCamDebug_af_monitor_pdaf_target_pos;
5661 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5662 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5663 }
5664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5665 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5666 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5667 *DevCamDebug_af_monitor_pdaf_confidence;
5668 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5669 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5670 }
5671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5672 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5673 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5674 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5675 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5676 }
5677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5678 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5679 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5680 *DevCamDebug_af_monitor_tof_target_pos;
5681 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5682 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5683 }
5684 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5685 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5686 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5687 *DevCamDebug_af_monitor_tof_confidence;
5688 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5689 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5690 }
5691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5692 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5693 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5694 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5695 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5696 }
5697 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5698 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5699 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5700 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5701 &fwk_DevCamDebug_af_monitor_type_select, 1);
5702 }
5703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5704 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5705 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5706 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5707 &fwk_DevCamDebug_af_monitor_refocus, 1);
5708 }
5709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5710 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5711 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5712 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5713 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5714 }
5715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5716 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5717 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5718 *DevCamDebug_af_search_pdaf_target_pos;
5719 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5720 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5721 }
5722 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5723 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5724 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5725 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5726 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5727 }
5728 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5729 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5730 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5731 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5732 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5733 }
5734 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5735 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5736 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5737 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5738 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5739 }
5740 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5741 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5742 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5743 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5744 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5745 }
5746 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5747 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5748 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5749 *DevCamDebug_af_search_tof_target_pos;
5750 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5751 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5752 }
5753 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5754 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5755 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5756 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5757 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5758 }
5759 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5760 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5761 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5762 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5763 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5764 }
5765 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5766 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5767 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5768 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5769 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5770 }
5771 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5772 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5773 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5774 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5775 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5776 }
5777 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5778 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5779 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5780 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5781 &fwk_DevCamDebug_af_search_type_select, 1);
5782 }
5783 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5784 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5785 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5786 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5787 &fwk_DevCamDebug_af_search_next_pos, 1);
5788 }
5789 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5790 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5791 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5792 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5793 &fwk_DevCamDebug_af_search_target_pos, 1);
5794 }
5795 // DevCamDebug metadata translateFromHalMetadata AEC
5796 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5797 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5798 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5799 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5800 }
5801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5802 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5803 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5804 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5805 }
5806 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5807 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5808 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5809 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5810 }
5811 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5812 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5813 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5814 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5815 }
5816 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5817 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5818 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5819 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5820 }
5821 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5822 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5823 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5824 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5825 }
5826 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5827 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5828 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5829 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5830 }
5831 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5832 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5833 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5834 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5835 }
5836 // DevCamDebug metadata translateFromHalMetadata AWB
5837 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5838 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5839 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5840 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5841 }
5842 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5843 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5844 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5845 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5846 }
5847 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5848 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5849 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5850 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5851 }
5852 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5853 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5854 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5855 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5856 }
5857 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5858 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5859 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5860 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5861 }
5862 }
5863 // atrace_end(ATRACE_TAG_ALWAYS);
5864
Thierry Strudel3d639192016-09-09 11:52:26 -07005865 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5866 int64_t fwk_frame_number = *frame_number;
5867 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5868 }
5869
5870 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5871 int32_t fps_range[2];
5872 fps_range[0] = (int32_t)float_range->min_fps;
5873 fps_range[1] = (int32_t)float_range->max_fps;
5874 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5875 fps_range, 2);
5876 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5877 fps_range[0], fps_range[1]);
5878 }
5879
5880 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5881 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5882 }
5883
5884 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5885 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5886 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5887 *sceneMode);
5888 if (NAME_NOT_FOUND != val) {
5889 uint8_t fwkSceneMode = (uint8_t)val;
5890 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5891 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5892 fwkSceneMode);
5893 }
5894 }
5895
5896 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5897 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5898 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5899 }
5900
5901 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5902 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5903 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5904 }
5905
5906 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5907 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5908 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5909 }
5910
5911 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5912 CAM_INTF_META_EDGE_MODE, metadata) {
5913 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5914 }
5915
5916 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5917 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5918 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5919 }
5920
5921 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5922 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5923 }
5924
5925 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5926 if (0 <= *flashState) {
5927 uint8_t fwk_flashState = (uint8_t) *flashState;
5928 if (!gCamCapability[mCameraId]->flash_available) {
5929 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5930 }
5931 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5932 }
5933 }
5934
5935 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5936 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5937 if (NAME_NOT_FOUND != val) {
5938 uint8_t fwk_flashMode = (uint8_t)val;
5939 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5940 }
5941 }
5942
5943 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5944 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5945 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5946 }
5947
5948 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5949 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5950 }
5951
5952 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5953 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5954 }
5955
5956 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5957 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5958 }
5959
5960 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5961 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5962 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5963 }
5964
5965 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5966 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5967 LOGD("fwk_videoStab = %d", fwk_videoStab);
5968 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5969 } else {
5970 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5971 // and so hardcoding the Video Stab result to OFF mode.
5972 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5973 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005974 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 }
5976
5977 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5978 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5979 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5980 }
5981
5982 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5983 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5984 }
5985
5986 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5987 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5988
5989 LOGD("dynamicblackLevel = %f %f %f %f",
5990 blackLevelSourcePattern->cam_black_level[0],
5991 blackLevelSourcePattern->cam_black_level[1],
5992 blackLevelSourcePattern->cam_black_level[2],
5993 blackLevelSourcePattern->cam_black_level[3]);
5994 }
5995
5996 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5997 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5998 float fwk_blackLevelInd[4];
5999
6000 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
6001 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
6002 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
6003 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
6004
6005 LOGD("applied dynamicblackLevel = %f %f %f %f",
6006 blackLevelAppliedPattern->cam_black_level[0],
6007 blackLevelAppliedPattern->cam_black_level[1],
6008 blackLevelAppliedPattern->cam_black_level[2],
6009 blackLevelAppliedPattern->cam_black_level[3]);
6010 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006011
6012#ifndef USE_HAL_3_3
6013 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006014 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6015 // depth space.
6016 fwk_blackLevelInd[0] /= 4.0;
6017 fwk_blackLevelInd[1] /= 4.0;
6018 fwk_blackLevelInd[2] /= 4.0;
6019 fwk_blackLevelInd[3] /= 4.0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006020 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
6021#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006022 }
6023
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006024#ifndef USE_HAL_3_3
6025 // Fixed whitelevel is used by ISP/Sensor
6026 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6027 &gCamCapability[mCameraId]->white_level, 1);
6028#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006029
6030 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6031 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6032 int32_t scalerCropRegion[4];
6033 scalerCropRegion[0] = hScalerCropRegion->left;
6034 scalerCropRegion[1] = hScalerCropRegion->top;
6035 scalerCropRegion[2] = hScalerCropRegion->width;
6036 scalerCropRegion[3] = hScalerCropRegion->height;
6037
6038 // Adjust crop region from sensor output coordinate system to active
6039 // array coordinate system.
6040 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6041 scalerCropRegion[2], scalerCropRegion[3]);
6042
6043 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6044 }
6045
6046 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6047 LOGD("sensorExpTime = %lld", *sensorExpTime);
6048 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6049 }
6050
6051 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6052 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6053 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6054 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6055 }
6056
6057 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6058 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6059 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6060 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6061 sensorRollingShutterSkew, 1);
6062 }
6063
6064 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6065 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6066 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6067
6068 //calculate the noise profile based on sensitivity
6069 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6070 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6071 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6072 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6073 noise_profile[i] = noise_profile_S;
6074 noise_profile[i+1] = noise_profile_O;
6075 }
6076 LOGD("noise model entry (S, O) is (%f, %f)",
6077 noise_profile_S, noise_profile_O);
6078 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6079 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6080 }
6081
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006082#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006083 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006084 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006085 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006086 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006087 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6088 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6089 }
6090 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006091#endif
6092
Thierry Strudel3d639192016-09-09 11:52:26 -07006093 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6094 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6095 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6096 }
6097
6098 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6099 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6100 *faceDetectMode);
6101 if (NAME_NOT_FOUND != val) {
6102 uint8_t fwk_faceDetectMode = (uint8_t)val;
6103 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6104
6105 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6106 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6107 CAM_INTF_META_FACE_DETECTION, metadata) {
6108 uint8_t numFaces = MIN(
6109 faceDetectionInfo->num_faces_detected, MAX_ROI);
6110 int32_t faceIds[MAX_ROI];
6111 uint8_t faceScores[MAX_ROI];
6112 int32_t faceRectangles[MAX_ROI * 4];
6113 int32_t faceLandmarks[MAX_ROI * 6];
6114 size_t j = 0, k = 0;
6115
6116 for (size_t i = 0; i < numFaces; i++) {
6117 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6118 // Adjust crop region from sensor output coordinate system to active
6119 // array coordinate system.
6120 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6121 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6122 rect.width, rect.height);
6123
6124 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6125 faceRectangles+j, -1);
6126
6127 j+= 4;
6128 }
6129 if (numFaces <= 0) {
6130 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6131 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6132 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6133 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6134 }
6135
6136 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6137 numFaces);
6138 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6139 faceRectangles, numFaces * 4U);
6140 if (fwk_faceDetectMode ==
6141 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6142 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6143 CAM_INTF_META_FACE_LANDMARK, metadata) {
6144
6145 for (size_t i = 0; i < numFaces; i++) {
6146 // Map the co-ordinate sensor output coordinate system to active
6147 // array coordinate system.
6148 mCropRegionMapper.toActiveArray(
6149 landmarks->face_landmarks[i].left_eye_center.x,
6150 landmarks->face_landmarks[i].left_eye_center.y);
6151 mCropRegionMapper.toActiveArray(
6152 landmarks->face_landmarks[i].right_eye_center.x,
6153 landmarks->face_landmarks[i].right_eye_center.y);
6154 mCropRegionMapper.toActiveArray(
6155 landmarks->face_landmarks[i].mouth_center.x,
6156 landmarks->face_landmarks[i].mouth_center.y);
6157
6158 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006159 k+= TOTAL_LANDMARK_INDICES;
6160 }
6161 } else {
6162 for (size_t i = 0; i < numFaces; i++) {
6163 setInvalidLandmarks(faceLandmarks+k);
6164 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006165 }
6166 }
6167
6168 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6169 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6170 faceLandmarks, numFaces * 6U);
6171 }
6172 }
6173 }
6174 }
6175 }
6176
6177 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6178 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6179 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006180
6181 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6182 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6183 // process histogram statistics info
6184 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6185 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6186 cam_histogram_data_t rHistData, gHistData, bHistData;
6187 memset(&rHistData, 0, sizeof(rHistData));
6188 memset(&gHistData, 0, sizeof(gHistData));
6189 memset(&bHistData, 0, sizeof(bHistData));
6190
6191 switch (stats_data->type) {
6192 case CAM_HISTOGRAM_TYPE_BAYER:
6193 switch (stats_data->bayer_stats.data_type) {
6194 case CAM_STATS_CHANNEL_GR:
6195 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6196 break;
6197 case CAM_STATS_CHANNEL_GB:
6198 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6199 break;
6200 case CAM_STATS_CHANNEL_B:
6201 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6202 break;
6203 case CAM_STATS_CHANNEL_ALL:
6204 rHistData = stats_data->bayer_stats.r_stats;
6205 //Framework expects only 3 channels. So, for now,
6206 //use gb stats for G channel.
6207 gHistData = stats_data->bayer_stats.gb_stats;
6208 bHistData = stats_data->bayer_stats.b_stats;
6209 break;
6210 case CAM_STATS_CHANNEL_Y:
6211 case CAM_STATS_CHANNEL_R:
6212 default:
6213 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6214 break;
6215 }
6216 break;
6217 case CAM_HISTOGRAM_TYPE_YUV:
6218 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6219 break;
6220 }
6221
6222 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6223 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6224 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6225
6226 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6227 }
6228 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006229 }
6230
6231 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6232 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6233 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6234 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6235 }
6236
6237 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6238 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6239 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6240 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6241 }
6242
6243 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6244 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6245 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6246 CAM_MAX_SHADING_MAP_HEIGHT);
6247 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6248 CAM_MAX_SHADING_MAP_WIDTH);
6249 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6250 lensShadingMap->lens_shading, 4U * map_width * map_height);
6251 }
6252
6253 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6254 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6255 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6256 }
6257
6258 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6259 //Populate CAM_INTF_META_TONEMAP_CURVES
6260 /* ch0 = G, ch 1 = B, ch 2 = R*/
6261 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6262 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6263 tonemap->tonemap_points_cnt,
6264 CAM_MAX_TONEMAP_CURVE_SIZE);
6265 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6266 }
6267
6268 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6269 &tonemap->curves[0].tonemap_points[0][0],
6270 tonemap->tonemap_points_cnt * 2);
6271
6272 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6273 &tonemap->curves[1].tonemap_points[0][0],
6274 tonemap->tonemap_points_cnt * 2);
6275
6276 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6277 &tonemap->curves[2].tonemap_points[0][0],
6278 tonemap->tonemap_points_cnt * 2);
6279 }
6280
6281 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6282 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6283 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6284 CC_GAIN_MAX);
6285 }
6286
6287 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6288 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6289 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6290 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6291 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6292 }
6293
6294 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6295 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6296 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6297 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6298 toneCurve->tonemap_points_cnt,
6299 CAM_MAX_TONEMAP_CURVE_SIZE);
6300 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6301 }
6302 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6303 (float*)toneCurve->curve.tonemap_points,
6304 toneCurve->tonemap_points_cnt * 2);
6305 }
6306
6307 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6308 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6309 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6310 predColorCorrectionGains->gains, 4);
6311 }
6312
6313 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6314 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6315 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6316 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6317 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6318 }
6319
6320 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6321 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6322 }
6323
6324 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6325 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6326 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6327 }
6328
6329 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6330 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6331 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6332 }
6333
6334 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6335 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6336 *effectMode);
6337 if (NAME_NOT_FOUND != val) {
6338 uint8_t fwk_effectMode = (uint8_t)val;
6339 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6340 }
6341 }
6342
6343 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6344 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6345 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6346 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6347 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6348 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6349 }
6350 int32_t fwk_testPatternData[4];
6351 fwk_testPatternData[0] = testPatternData->r;
6352 fwk_testPatternData[3] = testPatternData->b;
6353 switch (gCamCapability[mCameraId]->color_arrangement) {
6354 case CAM_FILTER_ARRANGEMENT_RGGB:
6355 case CAM_FILTER_ARRANGEMENT_GRBG:
6356 fwk_testPatternData[1] = testPatternData->gr;
6357 fwk_testPatternData[2] = testPatternData->gb;
6358 break;
6359 case CAM_FILTER_ARRANGEMENT_GBRG:
6360 case CAM_FILTER_ARRANGEMENT_BGGR:
6361 fwk_testPatternData[2] = testPatternData->gr;
6362 fwk_testPatternData[1] = testPatternData->gb;
6363 break;
6364 default:
6365 LOGE("color arrangement %d is not supported",
6366 gCamCapability[mCameraId]->color_arrangement);
6367 break;
6368 }
6369 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6370 }
6371
6372 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6373 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6374 }
6375
6376 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6377 String8 str((const char *)gps_methods);
6378 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6379 }
6380
6381 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6382 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6383 }
6384
6385 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6386 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6387 }
6388
6389 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6390 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6391 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6392 }
6393
6394 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6395 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6396 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6397 }
6398
6399 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6400 int32_t fwk_thumb_size[2];
6401 fwk_thumb_size[0] = thumb_size->width;
6402 fwk_thumb_size[1] = thumb_size->height;
6403 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6404 }
6405
6406 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6407 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6408 privateData,
6409 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6410 }
6411
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006412 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6413 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6414 meteringMode, 1);
6415 }
6416
Thierry Strudel3d639192016-09-09 11:52:26 -07006417 if (metadata->is_tuning_params_valid) {
6418 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6419 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6420 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6421
6422
6423 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6424 sizeof(uint32_t));
6425 data += sizeof(uint32_t);
6426
6427 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6428 sizeof(uint32_t));
6429 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6430 data += sizeof(uint32_t);
6431
6432 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6433 sizeof(uint32_t));
6434 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6435 data += sizeof(uint32_t);
6436
6437 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6438 sizeof(uint32_t));
6439 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6440 data += sizeof(uint32_t);
6441
6442 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6443 sizeof(uint32_t));
6444 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6445 data += sizeof(uint32_t);
6446
6447 metadata->tuning_params.tuning_mod3_data_size = 0;
6448 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6449 sizeof(uint32_t));
6450 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6451 data += sizeof(uint32_t);
6452
6453 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6454 TUNING_SENSOR_DATA_MAX);
6455 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6456 count);
6457 data += count;
6458
6459 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6460 TUNING_VFE_DATA_MAX);
6461 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6462 count);
6463 data += count;
6464
6465 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6466 TUNING_CPP_DATA_MAX);
6467 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6468 count);
6469 data += count;
6470
6471 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6472 TUNING_CAC_DATA_MAX);
6473 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6474 count);
6475 data += count;
6476
6477 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6478 (int32_t *)(void *)tuning_meta_data_blob,
6479 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6480 }
6481
6482 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6483 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6484 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6485 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6486 NEUTRAL_COL_POINTS);
6487 }
6488
6489 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6490 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6491 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6492 }
6493
6494 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6495 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6496 // Adjust crop region from sensor output coordinate system to active
6497 // array coordinate system.
6498 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6499 hAeRegions->rect.width, hAeRegions->rect.height);
6500
6501 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6502 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6503 REGIONS_TUPLE_COUNT);
6504 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6505 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6506 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6507 hAeRegions->rect.height);
6508 }
6509
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006510 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6511 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6512 if (NAME_NOT_FOUND != val) {
6513 uint8_t fwkAfMode = (uint8_t)val;
6514 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6515 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6516 } else {
6517 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6518 val);
6519 }
6520 }
6521
Thierry Strudel3d639192016-09-09 11:52:26 -07006522 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6523 uint8_t fwk_afState = (uint8_t) *afState;
6524 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006525 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006526 }
6527
6528 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6529 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6530 }
6531
6532 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6533 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6534 }
6535
6536 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6537 uint8_t fwk_lensState = *lensState;
6538 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6539 }
6540
6541 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6542 /*af regions*/
6543 int32_t afRegions[REGIONS_TUPLE_COUNT];
6544 // Adjust crop region from sensor output coordinate system to active
6545 // array coordinate system.
6546 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6547 hAfRegions->rect.width, hAfRegions->rect.height);
6548
6549 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6550 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6551 REGIONS_TUPLE_COUNT);
6552 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6553 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6554 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6555 hAfRegions->rect.height);
6556 }
6557
6558 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006559 uint32_t ab_mode = *hal_ab_mode;
6560 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6561 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6562 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006564 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006565 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006566 if (NAME_NOT_FOUND != val) {
6567 uint8_t fwk_ab_mode = (uint8_t)val;
6568 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6569 }
6570 }
6571
6572 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6573 int val = lookupFwkName(SCENE_MODES_MAP,
6574 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6575 if (NAME_NOT_FOUND != val) {
6576 uint8_t fwkBestshotMode = (uint8_t)val;
6577 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6578 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6579 } else {
6580 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6581 }
6582 }
6583
6584 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6585 uint8_t fwk_mode = (uint8_t) *mode;
6586 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6587 }
6588
6589 /* Constant metadata values to be update*/
6590 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6591 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6592
6593 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6594 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6595
6596 int32_t hotPixelMap[2];
6597 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6598
6599 // CDS
6600 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6601 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6602 }
6603
Thierry Strudel04e026f2016-10-10 11:27:36 -07006604 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6605 int32_t fwk_hdr;
6606 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6607 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6608 } else {
6609 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6610 }
6611 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6612 }
6613
6614 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006615 int32_t fwk_ir = (int32_t) *ir;
6616 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006617 }
6618
Thierry Strudel269c81a2016-10-12 12:13:59 -07006619 // AEC SPEED
6620 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6621 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6622 }
6623
6624 // AWB SPEED
6625 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6626 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6627 }
6628
Thierry Strudel3d639192016-09-09 11:52:26 -07006629 // TNR
6630 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6631 uint8_t tnr_enable = tnr->denoise_enable;
6632 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6633
6634 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6635 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6636 }
6637
6638 // Reprocess crop data
6639 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6640 uint8_t cnt = crop_data->num_of_streams;
6641 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6642 // mm-qcamera-daemon only posts crop_data for streams
6643 // not linked to pproc. So no valid crop metadata is not
6644 // necessarily an error case.
6645 LOGD("No valid crop metadata entries");
6646 } else {
6647 uint32_t reproc_stream_id;
6648 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6649 LOGD("No reprocessible stream found, ignore crop data");
6650 } else {
6651 int rc = NO_ERROR;
6652 Vector<int32_t> roi_map;
6653 int32_t *crop = new int32_t[cnt*4];
6654 if (NULL == crop) {
6655 rc = NO_MEMORY;
6656 }
6657 if (NO_ERROR == rc) {
6658 int32_t streams_found = 0;
6659 for (size_t i = 0; i < cnt; i++) {
6660 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6661 if (pprocDone) {
6662 // HAL already does internal reprocessing,
6663 // either via reprocessing before JPEG encoding,
6664 // or offline postprocessing for pproc bypass case.
6665 crop[0] = 0;
6666 crop[1] = 0;
6667 crop[2] = mInputStreamInfo.dim.width;
6668 crop[3] = mInputStreamInfo.dim.height;
6669 } else {
6670 crop[0] = crop_data->crop_info[i].crop.left;
6671 crop[1] = crop_data->crop_info[i].crop.top;
6672 crop[2] = crop_data->crop_info[i].crop.width;
6673 crop[3] = crop_data->crop_info[i].crop.height;
6674 }
6675 roi_map.add(crop_data->crop_info[i].roi_map.left);
6676 roi_map.add(crop_data->crop_info[i].roi_map.top);
6677 roi_map.add(crop_data->crop_info[i].roi_map.width);
6678 roi_map.add(crop_data->crop_info[i].roi_map.height);
6679 streams_found++;
6680 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6681 crop[0], crop[1], crop[2], crop[3]);
6682 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6683 crop_data->crop_info[i].roi_map.left,
6684 crop_data->crop_info[i].roi_map.top,
6685 crop_data->crop_info[i].roi_map.width,
6686 crop_data->crop_info[i].roi_map.height);
6687 break;
6688
6689 }
6690 }
6691 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6692 &streams_found, 1);
6693 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6694 crop, (size_t)(streams_found * 4));
6695 if (roi_map.array()) {
6696 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6697 roi_map.array(), roi_map.size());
6698 }
6699 }
6700 if (crop) {
6701 delete [] crop;
6702 }
6703 }
6704 }
6705 }
6706
6707 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6708 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6709 // so hardcoding the CAC result to OFF mode.
6710 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6711 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6712 } else {
6713 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6714 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6715 *cacMode);
6716 if (NAME_NOT_FOUND != val) {
6717 uint8_t resultCacMode = (uint8_t)val;
6718 // check whether CAC result from CB is equal to Framework set CAC mode
6719 // If not equal then set the CAC mode came in corresponding request
6720 if (fwk_cacMode != resultCacMode) {
6721 resultCacMode = fwk_cacMode;
6722 }
6723 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6724 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6725 } else {
6726 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6727 }
6728 }
6729 }
6730
6731 // Post blob of cam_cds_data through vendor tag.
6732 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6733 uint8_t cnt = cdsInfo->num_of_streams;
6734 cam_cds_data_t cdsDataOverride;
6735 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6736 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6737 cdsDataOverride.num_of_streams = 1;
6738 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6739 uint32_t reproc_stream_id;
6740 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6741 LOGD("No reprocessible stream found, ignore cds data");
6742 } else {
6743 for (size_t i = 0; i < cnt; i++) {
6744 if (cdsInfo->cds_info[i].stream_id ==
6745 reproc_stream_id) {
6746 cdsDataOverride.cds_info[0].cds_enable =
6747 cdsInfo->cds_info[i].cds_enable;
6748 break;
6749 }
6750 }
6751 }
6752 } else {
6753 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6754 }
6755 camMetadata.update(QCAMERA3_CDS_INFO,
6756 (uint8_t *)&cdsDataOverride,
6757 sizeof(cam_cds_data_t));
6758 }
6759
6760 // Ldaf calibration data
6761 if (!mLdafCalibExist) {
6762 IF_META_AVAILABLE(uint32_t, ldafCalib,
6763 CAM_INTF_META_LDAF_EXIF, metadata) {
6764 mLdafCalibExist = true;
6765 mLdafCalib[0] = ldafCalib[0];
6766 mLdafCalib[1] = ldafCalib[1];
6767 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6768 ldafCalib[0], ldafCalib[1]);
6769 }
6770 }
6771
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006772 // Reprocess and DDM debug data through vendor tag
6773 cam_reprocess_info_t repro_info;
6774 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006775 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6776 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006777 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006778 }
6779 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6780 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006781 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006782 }
6783 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6784 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006785 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006786 }
6787 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6788 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006789 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006790 }
6791 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6792 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006793 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006794 }
6795 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006796 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006797 }
6798 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6799 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006800 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006801 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006802 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6803 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6804 }
6805 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6806 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6807 }
6808 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6809 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006810
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006811 // INSTANT AEC MODE
6812 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6813 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6814 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6815 }
6816
Shuzhen Wange763e802016-03-31 10:24:29 -07006817 // AF scene change
6818 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6819 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6820 }
6821
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006822 /* In batch mode, cache the first metadata in the batch */
6823 if (mBatchSize && firstMetadataInBatch) {
6824 mCachedMetadata.clear();
6825 mCachedMetadata = camMetadata;
6826 }
6827
Thierry Strudel3d639192016-09-09 11:52:26 -07006828 resultMetadata = camMetadata.release();
6829 return resultMetadata;
6830}
6831
6832/*===========================================================================
6833 * FUNCTION : saveExifParams
6834 *
6835 * DESCRIPTION:
6836 *
6837 * PARAMETERS :
6838 * @metadata : metadata information from callback
6839 *
6840 * RETURN : none
6841 *
6842 *==========================================================================*/
6843void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6844{
6845 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6846 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6847 if (mExifParams.debug_params) {
6848 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6849 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6850 }
6851 }
6852 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6853 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6854 if (mExifParams.debug_params) {
6855 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6856 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6857 }
6858 }
6859 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6860 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6861 if (mExifParams.debug_params) {
6862 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6863 mExifParams.debug_params->af_debug_params_valid = TRUE;
6864 }
6865 }
6866 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6867 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6868 if (mExifParams.debug_params) {
6869 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6870 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6871 }
6872 }
6873 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6874 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6875 if (mExifParams.debug_params) {
6876 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6877 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6878 }
6879 }
6880 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6881 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6882 if (mExifParams.debug_params) {
6883 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6884 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6885 }
6886 }
6887 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6888 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6889 if (mExifParams.debug_params) {
6890 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6891 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6892 }
6893 }
6894 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6895 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6896 if (mExifParams.debug_params) {
6897 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6898 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6899 }
6900 }
6901}
6902
6903/*===========================================================================
6904 * FUNCTION : get3AExifParams
6905 *
6906 * DESCRIPTION:
6907 *
6908 * PARAMETERS : none
6909 *
6910 *
6911 * RETURN : mm_jpeg_exif_params_t
6912 *
6913 *==========================================================================*/
6914mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6915{
6916 return mExifParams;
6917}
6918
6919/*===========================================================================
6920 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6921 *
6922 * DESCRIPTION:
6923 *
6924 * PARAMETERS :
6925 * @metadata : metadata information from callback
6926 *
6927 * RETURN : camera_metadata_t*
6928 * metadata in a format specified by fwk
6929 *==========================================================================*/
6930camera_metadata_t*
6931QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6932 (metadata_buffer_t *metadata)
6933{
6934 CameraMetadata camMetadata;
6935 camera_metadata_t *resultMetadata;
6936
6937
6938 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6939 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6940 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6941 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6942 }
6943
6944 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6945 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6946 &aecTrigger->trigger, 1);
6947 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6948 &aecTrigger->trigger_id, 1);
6949 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6950 aecTrigger->trigger);
6951 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6952 aecTrigger->trigger_id);
6953 }
6954
6955 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6956 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6957 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6958 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6959 }
6960
Thierry Strudel3d639192016-09-09 11:52:26 -07006961 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6962 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6963 &af_trigger->trigger, 1);
6964 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6965 af_trigger->trigger);
6966 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6967 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6968 af_trigger->trigger_id);
6969 }
6970
6971 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6972 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6973 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6974 if (NAME_NOT_FOUND != val) {
6975 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6976 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6977 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6978 } else {
6979 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6980 }
6981 }
6982
6983 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6984 uint32_t aeMode = CAM_AE_MODE_MAX;
6985 int32_t flashMode = CAM_FLASH_MODE_MAX;
6986 int32_t redeye = -1;
6987 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6988 aeMode = *pAeMode;
6989 }
6990 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6991 flashMode = *pFlashMode;
6992 }
6993 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6994 redeye = *pRedeye;
6995 }
6996
6997 if (1 == redeye) {
6998 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6999 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7000 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7001 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7002 flashMode);
7003 if (NAME_NOT_FOUND != val) {
7004 fwk_aeMode = (uint8_t)val;
7005 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7006 } else {
7007 LOGE("Unsupported flash mode %d", flashMode);
7008 }
7009 } else if (aeMode == CAM_AE_MODE_ON) {
7010 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7011 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7012 } else if (aeMode == CAM_AE_MODE_OFF) {
7013 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7014 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7015 } else {
7016 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7017 "flashMode:%d, aeMode:%u!!!",
7018 redeye, flashMode, aeMode);
7019 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007020 if (mInstantAEC) {
7021 // Increment frame Idx count untill a bound reached for instant AEC.
7022 mInstantAecFrameIdxCount++;
7023 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7024 CAM_INTF_META_AEC_INFO, metadata) {
7025 LOGH("ae_params->settled = %d",ae_params->settled);
7026 // If AEC settled, or if number of frames reached bound value,
7027 // should reset instant AEC.
7028 if (ae_params->settled ||
7029 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7030 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7031 mInstantAEC = false;
7032 mResetInstantAEC = true;
7033 mInstantAecFrameIdxCount = 0;
7034 }
7035 }
7036 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007037 resultMetadata = camMetadata.release();
7038 return resultMetadata;
7039}
7040
7041/*===========================================================================
7042 * FUNCTION : dumpMetadataToFile
7043 *
7044 * DESCRIPTION: Dumps tuning metadata to file system
7045 *
7046 * PARAMETERS :
7047 * @meta : tuning metadata
7048 * @dumpFrameCount : current dump frame count
7049 * @enabled : Enable mask
7050 *
7051 *==========================================================================*/
7052void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7053 uint32_t &dumpFrameCount,
7054 bool enabled,
7055 const char *type,
7056 uint32_t frameNumber)
7057{
7058 //Some sanity checks
7059 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7060 LOGE("Tuning sensor data size bigger than expected %d: %d",
7061 meta.tuning_sensor_data_size,
7062 TUNING_SENSOR_DATA_MAX);
7063 return;
7064 }
7065
7066 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7067 LOGE("Tuning VFE data size bigger than expected %d: %d",
7068 meta.tuning_vfe_data_size,
7069 TUNING_VFE_DATA_MAX);
7070 return;
7071 }
7072
7073 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7074 LOGE("Tuning CPP data size bigger than expected %d: %d",
7075 meta.tuning_cpp_data_size,
7076 TUNING_CPP_DATA_MAX);
7077 return;
7078 }
7079
7080 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7081 LOGE("Tuning CAC data size bigger than expected %d: %d",
7082 meta.tuning_cac_data_size,
7083 TUNING_CAC_DATA_MAX);
7084 return;
7085 }
7086 //
7087
7088 if(enabled){
7089 char timeBuf[FILENAME_MAX];
7090 char buf[FILENAME_MAX];
7091 memset(buf, 0, sizeof(buf));
7092 memset(timeBuf, 0, sizeof(timeBuf));
7093 time_t current_time;
7094 struct tm * timeinfo;
7095 time (&current_time);
7096 timeinfo = localtime (&current_time);
7097 if (timeinfo != NULL) {
7098 strftime (timeBuf, sizeof(timeBuf),
7099 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7100 }
7101 String8 filePath(timeBuf);
7102 snprintf(buf,
7103 sizeof(buf),
7104 "%dm_%s_%d.bin",
7105 dumpFrameCount,
7106 type,
7107 frameNumber);
7108 filePath.append(buf);
7109 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7110 if (file_fd >= 0) {
7111 ssize_t written_len = 0;
7112 meta.tuning_data_version = TUNING_DATA_VERSION;
7113 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7114 written_len += write(file_fd, data, sizeof(uint32_t));
7115 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7116 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7117 written_len += write(file_fd, data, sizeof(uint32_t));
7118 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7119 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7120 written_len += write(file_fd, data, sizeof(uint32_t));
7121 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7122 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7123 written_len += write(file_fd, data, sizeof(uint32_t));
7124 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7125 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7126 written_len += write(file_fd, data, sizeof(uint32_t));
7127 meta.tuning_mod3_data_size = 0;
7128 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7129 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7130 written_len += write(file_fd, data, sizeof(uint32_t));
7131 size_t total_size = meta.tuning_sensor_data_size;
7132 data = (void *)((uint8_t *)&meta.data);
7133 written_len += write(file_fd, data, total_size);
7134 total_size = meta.tuning_vfe_data_size;
7135 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7136 written_len += write(file_fd, data, total_size);
7137 total_size = meta.tuning_cpp_data_size;
7138 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7139 written_len += write(file_fd, data, total_size);
7140 total_size = meta.tuning_cac_data_size;
7141 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7142 written_len += write(file_fd, data, total_size);
7143 close(file_fd);
7144 }else {
7145 LOGE("fail to open file for metadata dumping");
7146 }
7147 }
7148}
7149
7150/*===========================================================================
7151 * FUNCTION : cleanAndSortStreamInfo
7152 *
7153 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7154 * and sort them such that raw stream is at the end of the list
7155 * This is a workaround for camera daemon constraint.
7156 *
7157 * PARAMETERS : None
7158 *
7159 *==========================================================================*/
7160void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7161{
7162 List<stream_info_t *> newStreamInfo;
7163
7164 /*clean up invalid streams*/
7165 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7166 it != mStreamInfo.end();) {
7167 if(((*it)->status) == INVALID){
7168 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7169 delete channel;
7170 free(*it);
7171 it = mStreamInfo.erase(it);
7172 } else {
7173 it++;
7174 }
7175 }
7176
7177 // Move preview/video/callback/snapshot streams into newList
7178 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7179 it != mStreamInfo.end();) {
7180 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7181 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7182 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7183 newStreamInfo.push_back(*it);
7184 it = mStreamInfo.erase(it);
7185 } else
7186 it++;
7187 }
7188 // Move raw streams into newList
7189 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7190 it != mStreamInfo.end();) {
7191 newStreamInfo.push_back(*it);
7192 it = mStreamInfo.erase(it);
7193 }
7194
7195 mStreamInfo = newStreamInfo;
7196}
7197
7198/*===========================================================================
7199 * FUNCTION : extractJpegMetadata
7200 *
7201 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7202 * JPEG metadata is cached in HAL, and return as part of capture
7203 * result when metadata is returned from camera daemon.
7204 *
7205 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7206 * @request: capture request
7207 *
7208 *==========================================================================*/
7209void QCamera3HardwareInterface::extractJpegMetadata(
7210 CameraMetadata& jpegMetadata,
7211 const camera3_capture_request_t *request)
7212{
7213 CameraMetadata frame_settings;
7214 frame_settings = request->settings;
7215
7216 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7217 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7218 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7219 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7220
7221 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7222 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7223 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7224 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7225
7226 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7227 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7228 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7229 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7230
7231 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7232 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7233 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7234 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7235
7236 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7237 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7238 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7239 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7240
7241 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7242 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7243 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7244 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7245
7246 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7247 int32_t thumbnail_size[2];
7248 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7249 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7250 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7251 int32_t orientation =
7252 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007253 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007254 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7255 int32_t temp;
7256 temp = thumbnail_size[0];
7257 thumbnail_size[0] = thumbnail_size[1];
7258 thumbnail_size[1] = temp;
7259 }
7260 }
7261 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7262 thumbnail_size,
7263 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7264 }
7265
7266}
7267
7268/*===========================================================================
7269 * FUNCTION : convertToRegions
7270 *
7271 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7272 *
7273 * PARAMETERS :
7274 * @rect : cam_rect_t struct to convert
7275 * @region : int32_t destination array
7276 * @weight : if we are converting from cam_area_t, weight is valid
7277 * else weight = -1
7278 *
7279 *==========================================================================*/
7280void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7281 int32_t *region, int weight)
7282{
7283 region[0] = rect.left;
7284 region[1] = rect.top;
7285 region[2] = rect.left + rect.width;
7286 region[3] = rect.top + rect.height;
7287 if (weight > -1) {
7288 region[4] = weight;
7289 }
7290}
7291
7292/*===========================================================================
7293 * FUNCTION : convertFromRegions
7294 *
7295 * DESCRIPTION: helper method to convert from array to cam_rect_t
7296 *
7297 * PARAMETERS :
7298 * @rect : cam_rect_t struct to convert
7299 * @region : int32_t destination array
7300 * @weight : if we are converting from cam_area_t, weight is valid
7301 * else weight = -1
7302 *
7303 *==========================================================================*/
7304void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7305 const camera_metadata_t *settings, uint32_t tag)
7306{
7307 CameraMetadata frame_settings;
7308 frame_settings = settings;
7309 int32_t x_min = frame_settings.find(tag).data.i32[0];
7310 int32_t y_min = frame_settings.find(tag).data.i32[1];
7311 int32_t x_max = frame_settings.find(tag).data.i32[2];
7312 int32_t y_max = frame_settings.find(tag).data.i32[3];
7313 roi.weight = frame_settings.find(tag).data.i32[4];
7314 roi.rect.left = x_min;
7315 roi.rect.top = y_min;
7316 roi.rect.width = x_max - x_min;
7317 roi.rect.height = y_max - y_min;
7318}
7319
7320/*===========================================================================
7321 * FUNCTION : resetIfNeededROI
7322 *
7323 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7324 * crop region
7325 *
7326 * PARAMETERS :
7327 * @roi : cam_area_t struct to resize
7328 * @scalerCropRegion : cam_crop_region_t region to compare against
7329 *
7330 *
7331 *==========================================================================*/
7332bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7333 const cam_crop_region_t* scalerCropRegion)
7334{
7335 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7336 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7337 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7338 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7339
7340 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7341 * without having this check the calculations below to validate if the roi
7342 * is inside scalar crop region will fail resulting in the roi not being
7343 * reset causing algorithm to continue to use stale roi window
7344 */
7345 if (roi->weight == 0) {
7346 return true;
7347 }
7348
7349 if ((roi_x_max < scalerCropRegion->left) ||
7350 // right edge of roi window is left of scalar crop's left edge
7351 (roi_y_max < scalerCropRegion->top) ||
7352 // bottom edge of roi window is above scalar crop's top edge
7353 (roi->rect.left > crop_x_max) ||
7354 // left edge of roi window is beyond(right) of scalar crop's right edge
7355 (roi->rect.top > crop_y_max)){
7356 // top edge of roi windo is above scalar crop's top edge
7357 return false;
7358 }
7359 if (roi->rect.left < scalerCropRegion->left) {
7360 roi->rect.left = scalerCropRegion->left;
7361 }
7362 if (roi->rect.top < scalerCropRegion->top) {
7363 roi->rect.top = scalerCropRegion->top;
7364 }
7365 if (roi_x_max > crop_x_max) {
7366 roi_x_max = crop_x_max;
7367 }
7368 if (roi_y_max > crop_y_max) {
7369 roi_y_max = crop_y_max;
7370 }
7371 roi->rect.width = roi_x_max - roi->rect.left;
7372 roi->rect.height = roi_y_max - roi->rect.top;
7373 return true;
7374}
7375
7376/*===========================================================================
7377 * FUNCTION : convertLandmarks
7378 *
7379 * DESCRIPTION: helper method to extract the landmarks from face detection info
7380 *
7381 * PARAMETERS :
7382 * @landmark_data : input landmark data to be converted
7383 * @landmarks : int32_t destination array
7384 *
7385 *
7386 *==========================================================================*/
7387void QCamera3HardwareInterface::convertLandmarks(
7388 cam_face_landmarks_info_t landmark_data,
7389 int32_t *landmarks)
7390{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007391 if (landmark_data.is_left_eye_valid) {
7392 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7393 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7394 } else {
7395 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7396 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7397 }
7398
7399 if (landmark_data.is_right_eye_valid) {
7400 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7401 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7402 } else {
7403 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7404 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7405 }
7406
7407 if (landmark_data.is_mouth_valid) {
7408 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7409 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7410 } else {
7411 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7412 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7413 }
7414}
7415
7416/*===========================================================================
7417 * FUNCTION : setInvalidLandmarks
7418 *
7419 * DESCRIPTION: helper method to set invalid landmarks
7420 *
7421 * PARAMETERS :
7422 * @landmarks : int32_t destination array
7423 *
7424 *
7425 *==========================================================================*/
7426void QCamera3HardwareInterface::setInvalidLandmarks(
7427 int32_t *landmarks)
7428{
7429 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7430 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7431 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7432 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7433 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7434 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007435}
7436
7437#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007438
7439/*===========================================================================
7440 * FUNCTION : getCapabilities
7441 *
7442 * DESCRIPTION: query camera capability from back-end
7443 *
7444 * PARAMETERS :
7445 * @ops : mm-interface ops structure
7446 * @cam_handle : camera handle for which we need capability
7447 *
7448 * RETURN : ptr type of capability structure
7449 * capability for success
7450 * NULL for failure
7451 *==========================================================================*/
7452cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7453 uint32_t cam_handle)
7454{
7455 int rc = NO_ERROR;
7456 QCamera3HeapMemory *capabilityHeap = NULL;
7457 cam_capability_t *cap_ptr = NULL;
7458
7459 if (ops == NULL) {
7460 LOGE("Invalid arguments");
7461 return NULL;
7462 }
7463
7464 capabilityHeap = new QCamera3HeapMemory(1);
7465 if (capabilityHeap == NULL) {
7466 LOGE("creation of capabilityHeap failed");
7467 return NULL;
7468 }
7469
7470 /* Allocate memory for capability buffer */
7471 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7472 if(rc != OK) {
7473 LOGE("No memory for cappability");
7474 goto allocate_failed;
7475 }
7476
7477 /* Map memory for capability buffer */
7478 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7479
7480 rc = ops->map_buf(cam_handle,
7481 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7482 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7483 if(rc < 0) {
7484 LOGE("failed to map capability buffer");
7485 rc = FAILED_TRANSACTION;
7486 goto map_failed;
7487 }
7488
7489 /* Query Capability */
7490 rc = ops->query_capability(cam_handle);
7491 if(rc < 0) {
7492 LOGE("failed to query capability");
7493 rc = FAILED_TRANSACTION;
7494 goto query_failed;
7495 }
7496
7497 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7498 if (cap_ptr == NULL) {
7499 LOGE("out of memory");
7500 rc = NO_MEMORY;
7501 goto query_failed;
7502 }
7503
7504 memset(cap_ptr, 0, sizeof(cam_capability_t));
7505 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7506
7507 int index;
7508 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7509 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7510 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7511 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7512 }
7513
7514query_failed:
7515 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7516map_failed:
7517 capabilityHeap->deallocate();
7518allocate_failed:
7519 delete capabilityHeap;
7520
7521 if (rc != NO_ERROR) {
7522 return NULL;
7523 } else {
7524 return cap_ptr;
7525 }
7526}
7527
Thierry Strudel3d639192016-09-09 11:52:26 -07007528/*===========================================================================
7529 * FUNCTION : initCapabilities
7530 *
7531 * DESCRIPTION: initialize camera capabilities in static data struct
7532 *
7533 * PARAMETERS :
7534 * @cameraId : camera Id
7535 *
7536 * RETURN : int32_t type of status
7537 * NO_ERROR -- success
7538 * none-zero failure code
7539 *==========================================================================*/
7540int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7541{
7542 int rc = 0;
7543 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007544 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007545
7546 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7547 if (rc) {
7548 LOGE("camera_open failed. rc = %d", rc);
7549 goto open_failed;
7550 }
7551 if (!cameraHandle) {
7552 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7553 goto open_failed;
7554 }
7555
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007556 handle = get_main_camera_handle(cameraHandle->camera_handle);
7557 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7558 if (gCamCapability[cameraId] == NULL) {
7559 rc = FAILED_TRANSACTION;
7560 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007561 }
7562
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007563 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007564 if (is_dual_camera_by_idx(cameraId)) {
7565 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7566 gCamCapability[cameraId]->aux_cam_cap =
7567 getCapabilities(cameraHandle->ops, handle);
7568 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7569 rc = FAILED_TRANSACTION;
7570 free(gCamCapability[cameraId]);
7571 goto failed_op;
7572 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007573
7574 // Copy the main camera capability to main_cam_cap struct
7575 gCamCapability[cameraId]->main_cam_cap =
7576 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7577 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7578 LOGE("out of memory");
7579 rc = NO_MEMORY;
7580 goto failed_op;
7581 }
7582 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7583 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007584 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007585failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007586 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7587 cameraHandle = NULL;
7588open_failed:
7589 return rc;
7590}
7591
7592/*==========================================================================
7593 * FUNCTION : get3Aversion
7594 *
7595 * DESCRIPTION: get the Q3A S/W version
7596 *
7597 * PARAMETERS :
7598 * @sw_version: Reference of Q3A structure which will hold version info upon
7599 * return
7600 *
7601 * RETURN : None
7602 *
7603 *==========================================================================*/
7604void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7605{
7606 if(gCamCapability[mCameraId])
7607 sw_version = gCamCapability[mCameraId]->q3a_version;
7608 else
7609 LOGE("Capability structure NULL!");
7610}
7611
7612
7613/*===========================================================================
7614 * FUNCTION : initParameters
7615 *
7616 * DESCRIPTION: initialize camera parameters
7617 *
7618 * PARAMETERS :
7619 *
7620 * RETURN : int32_t type of status
7621 * NO_ERROR -- success
7622 * none-zero failure code
7623 *==========================================================================*/
7624int QCamera3HardwareInterface::initParameters()
7625{
7626 int rc = 0;
7627
7628 //Allocate Set Param Buffer
7629 mParamHeap = new QCamera3HeapMemory(1);
7630 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7631 if(rc != OK) {
7632 rc = NO_MEMORY;
7633 LOGE("Failed to allocate SETPARM Heap memory");
7634 delete mParamHeap;
7635 mParamHeap = NULL;
7636 return rc;
7637 }
7638
7639 //Map memory for parameters buffer
7640 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7641 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7642 mParamHeap->getFd(0),
7643 sizeof(metadata_buffer_t),
7644 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7645 if(rc < 0) {
7646 LOGE("failed to map SETPARM buffer");
7647 rc = FAILED_TRANSACTION;
7648 mParamHeap->deallocate();
7649 delete mParamHeap;
7650 mParamHeap = NULL;
7651 return rc;
7652 }
7653
7654 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7655
7656 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7657 return rc;
7658}
7659
7660/*===========================================================================
7661 * FUNCTION : deinitParameters
7662 *
7663 * DESCRIPTION: de-initialize camera parameters
7664 *
7665 * PARAMETERS :
7666 *
7667 * RETURN : NONE
7668 *==========================================================================*/
7669void QCamera3HardwareInterface::deinitParameters()
7670{
7671 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7672 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7673
7674 mParamHeap->deallocate();
7675 delete mParamHeap;
7676 mParamHeap = NULL;
7677
7678 mParameters = NULL;
7679
7680 free(mPrevParameters);
7681 mPrevParameters = NULL;
7682}
7683
7684/*===========================================================================
7685 * FUNCTION : calcMaxJpegSize
7686 *
7687 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7688 *
7689 * PARAMETERS :
7690 *
7691 * RETURN : max_jpeg_size
7692 *==========================================================================*/
7693size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7694{
7695 size_t max_jpeg_size = 0;
7696 size_t temp_width, temp_height;
7697 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7698 MAX_SIZES_CNT);
7699 for (size_t i = 0; i < count; i++) {
7700 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7701 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7702 if (temp_width * temp_height > max_jpeg_size ) {
7703 max_jpeg_size = temp_width * temp_height;
7704 }
7705 }
7706 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7707 return max_jpeg_size;
7708}
7709
7710/*===========================================================================
7711 * FUNCTION : getMaxRawSize
7712 *
7713 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7714 *
7715 * PARAMETERS :
7716 *
7717 * RETURN : Largest supported Raw Dimension
7718 *==========================================================================*/
7719cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7720{
7721 int max_width = 0;
7722 cam_dimension_t maxRawSize;
7723
7724 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7725 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7726 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7727 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7728 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7729 }
7730 }
7731 return maxRawSize;
7732}
7733
7734
7735/*===========================================================================
7736 * FUNCTION : calcMaxJpegDim
7737 *
7738 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7739 *
7740 * PARAMETERS :
7741 *
7742 * RETURN : max_jpeg_dim
7743 *==========================================================================*/
7744cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7745{
7746 cam_dimension_t max_jpeg_dim;
7747 cam_dimension_t curr_jpeg_dim;
7748 max_jpeg_dim.width = 0;
7749 max_jpeg_dim.height = 0;
7750 curr_jpeg_dim.width = 0;
7751 curr_jpeg_dim.height = 0;
7752 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7753 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7754 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7755 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7756 max_jpeg_dim.width * max_jpeg_dim.height ) {
7757 max_jpeg_dim.width = curr_jpeg_dim.width;
7758 max_jpeg_dim.height = curr_jpeg_dim.height;
7759 }
7760 }
7761 return max_jpeg_dim;
7762}
7763
7764/*===========================================================================
7765 * FUNCTION : addStreamConfig
7766 *
7767 * DESCRIPTION: adds the stream configuration to the array
7768 *
7769 * PARAMETERS :
7770 * @available_stream_configs : pointer to stream configuration array
7771 * @scalar_format : scalar format
7772 * @dim : configuration dimension
7773 * @config_type : input or output configuration type
7774 *
7775 * RETURN : NONE
7776 *==========================================================================*/
7777void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7778 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7779{
7780 available_stream_configs.add(scalar_format);
7781 available_stream_configs.add(dim.width);
7782 available_stream_configs.add(dim.height);
7783 available_stream_configs.add(config_type);
7784}
7785
7786/*===========================================================================
7787 * FUNCTION : suppportBurstCapture
7788 *
7789 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7790 *
7791 * PARAMETERS :
7792 * @cameraId : camera Id
7793 *
7794 * RETURN : true if camera supports BURST_CAPTURE
7795 * false otherwise
7796 *==========================================================================*/
7797bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7798{
7799 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7800 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7801 const int32_t highResWidth = 3264;
7802 const int32_t highResHeight = 2448;
7803
7804 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7805 // Maximum resolution images cannot be captured at >= 10fps
7806 // -> not supporting BURST_CAPTURE
7807 return false;
7808 }
7809
7810 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7811 // Maximum resolution images can be captured at >= 20fps
7812 // --> supporting BURST_CAPTURE
7813 return true;
7814 }
7815
7816 // Find the smallest highRes resolution, or largest resolution if there is none
7817 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7818 MAX_SIZES_CNT);
7819 size_t highRes = 0;
7820 while ((highRes + 1 < totalCnt) &&
7821 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7822 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7823 highResWidth * highResHeight)) {
7824 highRes++;
7825 }
7826 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7827 return true;
7828 } else {
7829 return false;
7830 }
7831}
7832
7833/*===========================================================================
7834 * FUNCTION : initStaticMetadata
7835 *
7836 * DESCRIPTION: initialize the static metadata
7837 *
7838 * PARAMETERS :
7839 * @cameraId : camera Id
7840 *
7841 * RETURN : int32_t type of status
7842 * 0 -- success
7843 * non-zero failure code
7844 *==========================================================================*/
7845int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7846{
7847 int rc = 0;
7848 CameraMetadata staticInfo;
7849 size_t count = 0;
7850 bool limitedDevice = false;
7851 char prop[PROPERTY_VALUE_MAX];
7852 bool supportBurst = false;
7853
7854 supportBurst = supportBurstCapture(cameraId);
7855
7856 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7857 * guaranteed or if min fps of max resolution is less than 20 fps, its
7858 * advertised as limited device*/
7859 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7860 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7861 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7862 !supportBurst;
7863
7864 uint8_t supportedHwLvl = limitedDevice ?
7865 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007866#ifndef USE_HAL_3_3
7867 // LEVEL_3 - This device will support level 3.
7868 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7869#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007870 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007871#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007872
7873 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7874 &supportedHwLvl, 1);
7875
7876 bool facingBack = false;
7877 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7878 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7879 facingBack = true;
7880 }
7881 /*HAL 3 only*/
7882 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7883 &gCamCapability[cameraId]->min_focus_distance, 1);
7884
7885 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7886 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7887
7888 /*should be using focal lengths but sensor doesn't provide that info now*/
7889 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7890 &gCamCapability[cameraId]->focal_length,
7891 1);
7892
7893 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7894 gCamCapability[cameraId]->apertures,
7895 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7896
7897 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7898 gCamCapability[cameraId]->filter_densities,
7899 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7900
7901
7902 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7903 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7904 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7905
7906 int32_t lens_shading_map_size[] = {
7907 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7908 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7909 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7910 lens_shading_map_size,
7911 sizeof(lens_shading_map_size)/sizeof(int32_t));
7912
7913 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7914 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7915
7916 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7917 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7918
7919 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7920 &gCamCapability[cameraId]->max_frame_duration, 1);
7921
7922 camera_metadata_rational baseGainFactor = {
7923 gCamCapability[cameraId]->base_gain_factor.numerator,
7924 gCamCapability[cameraId]->base_gain_factor.denominator};
7925 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7926 &baseGainFactor, 1);
7927
7928 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7929 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7930
7931 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7932 gCamCapability[cameraId]->pixel_array_size.height};
7933 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7934 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7935
7936 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7937 gCamCapability[cameraId]->active_array_size.top,
7938 gCamCapability[cameraId]->active_array_size.width,
7939 gCamCapability[cameraId]->active_array_size.height};
7940 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7941 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7942
7943 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7944 &gCamCapability[cameraId]->white_level, 1);
7945
7946 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7947 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
7948
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007949#ifndef USE_HAL_3_3
7950 bool hasBlackRegions = false;
7951 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7952 LOGW("black_region_count: %d is bounded to %d",
7953 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7954 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7955 }
7956 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7957 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7958 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7959 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7960 }
7961 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7962 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7963 hasBlackRegions = true;
7964 }
7965#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007966 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7967 &gCamCapability[cameraId]->flash_charge_duration, 1);
7968
7969 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7970 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7971
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007972 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7973 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7974 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7975 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7977 &timestampSource, 1);
7978
7979 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7980 &gCamCapability[cameraId]->histogram_size, 1);
7981
7982 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7983 &gCamCapability[cameraId]->max_histogram_count, 1);
7984
7985 int32_t sharpness_map_size[] = {
7986 gCamCapability[cameraId]->sharpness_map_size.width,
7987 gCamCapability[cameraId]->sharpness_map_size.height};
7988
7989 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7990 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7991
7992 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7993 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7994
7995 int32_t scalar_formats[] = {
7996 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7997 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7998 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7999 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8000 HAL_PIXEL_FORMAT_RAW10,
8001 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8002 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8003 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8004 scalar_formats,
8005 scalar_formats_count);
8006
8007 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8008 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8009 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8010 count, MAX_SIZES_CNT, available_processed_sizes);
8011 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8012 available_processed_sizes, count * 2);
8013
8014 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8015 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8016 makeTable(gCamCapability[cameraId]->raw_dim,
8017 count, MAX_SIZES_CNT, available_raw_sizes);
8018 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8019 available_raw_sizes, count * 2);
8020
8021 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8022 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8023 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8024 count, MAX_SIZES_CNT, available_fps_ranges);
8025 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8026 available_fps_ranges, count * 2);
8027
8028 camera_metadata_rational exposureCompensationStep = {
8029 gCamCapability[cameraId]->exp_compensation_step.numerator,
8030 gCamCapability[cameraId]->exp_compensation_step.denominator};
8031 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8032 &exposureCompensationStep, 1);
8033
8034 Vector<uint8_t> availableVstabModes;
8035 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8036 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008037 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008038 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008039 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008040 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008041 count = IS_TYPE_MAX;
8042 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8043 for (size_t i = 0; i < count; i++) {
8044 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8045 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8046 eisSupported = true;
8047 break;
8048 }
8049 }
8050 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008051 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8052 }
8053 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8054 availableVstabModes.array(), availableVstabModes.size());
8055
8056 /*HAL 1 and HAL 3 common*/
8057 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8058 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8059 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8060 float maxZoom = maxZoomStep/minZoomStep;
8061 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8062 &maxZoom, 1);
8063
8064 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8065 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8066
8067 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8068 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8069 max3aRegions[2] = 0; /* AF not supported */
8070 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8071 max3aRegions, 3);
8072
8073 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8074 memset(prop, 0, sizeof(prop));
8075 property_get("persist.camera.facedetect", prop, "1");
8076 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8077 LOGD("Support face detection mode: %d",
8078 supportedFaceDetectMode);
8079
8080 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008081 /* support mode should be OFF if max number of face is 0 */
8082 if (maxFaces <= 0) {
8083 supportedFaceDetectMode = 0;
8084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008085 Vector<uint8_t> availableFaceDetectModes;
8086 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8087 if (supportedFaceDetectMode == 1) {
8088 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8089 } else if (supportedFaceDetectMode == 2) {
8090 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8091 } else if (supportedFaceDetectMode == 3) {
8092 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8093 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8094 } else {
8095 maxFaces = 0;
8096 }
8097 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8098 availableFaceDetectModes.array(),
8099 availableFaceDetectModes.size());
8100 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8101 (int32_t *)&maxFaces, 1);
8102
8103 int32_t exposureCompensationRange[] = {
8104 gCamCapability[cameraId]->exposure_compensation_min,
8105 gCamCapability[cameraId]->exposure_compensation_max};
8106 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8107 exposureCompensationRange,
8108 sizeof(exposureCompensationRange)/sizeof(int32_t));
8109
8110 uint8_t lensFacing = (facingBack) ?
8111 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8112 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8113
8114 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8115 available_thumbnail_sizes,
8116 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8117
8118 /*all sizes will be clubbed into this tag*/
8119 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8120 /*android.scaler.availableStreamConfigurations*/
8121 Vector<int32_t> available_stream_configs;
8122 cam_dimension_t active_array_dim;
8123 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8124 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8125 /* Add input/output stream configurations for each scalar formats*/
8126 for (size_t j = 0; j < scalar_formats_count; j++) {
8127 switch (scalar_formats[j]) {
8128 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8129 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8130 case HAL_PIXEL_FORMAT_RAW10:
8131 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8132 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8133 addStreamConfig(available_stream_configs, scalar_formats[j],
8134 gCamCapability[cameraId]->raw_dim[i],
8135 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8136 }
8137 break;
8138 case HAL_PIXEL_FORMAT_BLOB:
8139 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8140 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8141 addStreamConfig(available_stream_configs, scalar_formats[j],
8142 gCamCapability[cameraId]->picture_sizes_tbl[i],
8143 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8144 }
8145 break;
8146 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8147 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8148 default:
8149 cam_dimension_t largest_picture_size;
8150 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8151 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8152 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8153 addStreamConfig(available_stream_configs, scalar_formats[j],
8154 gCamCapability[cameraId]->picture_sizes_tbl[i],
8155 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8156 /* Book keep largest */
8157 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8158 >= largest_picture_size.width &&
8159 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8160 >= largest_picture_size.height)
8161 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8162 }
8163 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8164 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8165 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8166 addStreamConfig(available_stream_configs, scalar_formats[j],
8167 largest_picture_size,
8168 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8169 }
8170 break;
8171 }
8172 }
8173
8174 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8175 available_stream_configs.array(), available_stream_configs.size());
8176 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8177 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8178
8179 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8180 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8181
8182 /* android.scaler.availableMinFrameDurations */
8183 Vector<int64_t> available_min_durations;
8184 for (size_t j = 0; j < scalar_formats_count; j++) {
8185 switch (scalar_formats[j]) {
8186 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8187 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8188 case HAL_PIXEL_FORMAT_RAW10:
8189 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8190 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8191 available_min_durations.add(scalar_formats[j]);
8192 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8193 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8194 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8195 }
8196 break;
8197 default:
8198 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8199 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8200 available_min_durations.add(scalar_formats[j]);
8201 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8202 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8203 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8204 }
8205 break;
8206 }
8207 }
8208 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8209 available_min_durations.array(), available_min_durations.size());
8210
8211 Vector<int32_t> available_hfr_configs;
8212 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8213 int32_t fps = 0;
8214 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8215 case CAM_HFR_MODE_60FPS:
8216 fps = 60;
8217 break;
8218 case CAM_HFR_MODE_90FPS:
8219 fps = 90;
8220 break;
8221 case CAM_HFR_MODE_120FPS:
8222 fps = 120;
8223 break;
8224 case CAM_HFR_MODE_150FPS:
8225 fps = 150;
8226 break;
8227 case CAM_HFR_MODE_180FPS:
8228 fps = 180;
8229 break;
8230 case CAM_HFR_MODE_210FPS:
8231 fps = 210;
8232 break;
8233 case CAM_HFR_MODE_240FPS:
8234 fps = 240;
8235 break;
8236 case CAM_HFR_MODE_480FPS:
8237 fps = 480;
8238 break;
8239 case CAM_HFR_MODE_OFF:
8240 case CAM_HFR_MODE_MAX:
8241 default:
8242 break;
8243 }
8244
8245 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8246 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8247 /* For each HFR frame rate, need to advertise one variable fps range
8248 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8249 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8250 * set by the app. When video recording is started, [120, 120] is
8251 * set. This way sensor configuration does not change when recording
8252 * is started */
8253
8254 /* (width, height, fps_min, fps_max, batch_size_max) */
8255 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8256 j < MAX_SIZES_CNT; j++) {
8257 available_hfr_configs.add(
8258 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8259 available_hfr_configs.add(
8260 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8261 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8262 available_hfr_configs.add(fps);
8263 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8264
8265 /* (width, height, fps_min, fps_max, batch_size_max) */
8266 available_hfr_configs.add(
8267 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8268 available_hfr_configs.add(
8269 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8270 available_hfr_configs.add(fps);
8271 available_hfr_configs.add(fps);
8272 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8273 }
8274 }
8275 }
8276 //Advertise HFR capability only if the property is set
8277 memset(prop, 0, sizeof(prop));
8278 property_get("persist.camera.hal3hfr.enable", prop, "1");
8279 uint8_t hfrEnable = (uint8_t)atoi(prop);
8280
8281 if(hfrEnable && available_hfr_configs.array()) {
8282 staticInfo.update(
8283 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8284 available_hfr_configs.array(), available_hfr_configs.size());
8285 }
8286
8287 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8288 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8289 &max_jpeg_size, 1);
8290
8291 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8292 size_t size = 0;
8293 count = CAM_EFFECT_MODE_MAX;
8294 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8295 for (size_t i = 0; i < count; i++) {
8296 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8297 gCamCapability[cameraId]->supported_effects[i]);
8298 if (NAME_NOT_FOUND != val) {
8299 avail_effects[size] = (uint8_t)val;
8300 size++;
8301 }
8302 }
8303 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8304 avail_effects,
8305 size);
8306
8307 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8308 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8309 size_t supported_scene_modes_cnt = 0;
8310 count = CAM_SCENE_MODE_MAX;
8311 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8312 for (size_t i = 0; i < count; i++) {
8313 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8314 CAM_SCENE_MODE_OFF) {
8315 int val = lookupFwkName(SCENE_MODES_MAP,
8316 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8317 gCamCapability[cameraId]->supported_scene_modes[i]);
8318 if (NAME_NOT_FOUND != val) {
8319 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8320 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8321 supported_scene_modes_cnt++;
8322 }
8323 }
8324 }
8325 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8326 avail_scene_modes,
8327 supported_scene_modes_cnt);
8328
8329 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8330 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8331 supported_scene_modes_cnt,
8332 CAM_SCENE_MODE_MAX,
8333 scene_mode_overrides,
8334 supported_indexes,
8335 cameraId);
8336
8337 if (supported_scene_modes_cnt == 0) {
8338 supported_scene_modes_cnt = 1;
8339 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8340 }
8341
8342 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8343 scene_mode_overrides, supported_scene_modes_cnt * 3);
8344
8345 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8346 ANDROID_CONTROL_MODE_AUTO,
8347 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8348 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8349 available_control_modes,
8350 3);
8351
8352 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8353 size = 0;
8354 count = CAM_ANTIBANDING_MODE_MAX;
8355 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8356 for (size_t i = 0; i < count; i++) {
8357 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8358 gCamCapability[cameraId]->supported_antibandings[i]);
8359 if (NAME_NOT_FOUND != val) {
8360 avail_antibanding_modes[size] = (uint8_t)val;
8361 size++;
8362 }
8363
8364 }
8365 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8366 avail_antibanding_modes,
8367 size);
8368
8369 uint8_t avail_abberation_modes[] = {
8370 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8371 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8372 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8373 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8374 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8375 if (0 == count) {
8376 // If no aberration correction modes are available for a device, this advertise OFF mode
8377 size = 1;
8378 } else {
8379 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8380 // So, advertize all 3 modes if atleast any one mode is supported as per the
8381 // new M requirement
8382 size = 3;
8383 }
8384 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8385 avail_abberation_modes,
8386 size);
8387
8388 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8389 size = 0;
8390 count = CAM_FOCUS_MODE_MAX;
8391 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8392 for (size_t i = 0; i < count; i++) {
8393 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8394 gCamCapability[cameraId]->supported_focus_modes[i]);
8395 if (NAME_NOT_FOUND != val) {
8396 avail_af_modes[size] = (uint8_t)val;
8397 size++;
8398 }
8399 }
8400 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8401 avail_af_modes,
8402 size);
8403
8404 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8405 size = 0;
8406 count = CAM_WB_MODE_MAX;
8407 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8408 for (size_t i = 0; i < count; i++) {
8409 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8410 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8411 gCamCapability[cameraId]->supported_white_balances[i]);
8412 if (NAME_NOT_FOUND != val) {
8413 avail_awb_modes[size] = (uint8_t)val;
8414 size++;
8415 }
8416 }
8417 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8418 avail_awb_modes,
8419 size);
8420
8421 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8422 count = CAM_FLASH_FIRING_LEVEL_MAX;
8423 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8424 count);
8425 for (size_t i = 0; i < count; i++) {
8426 available_flash_levels[i] =
8427 gCamCapability[cameraId]->supported_firing_levels[i];
8428 }
8429 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8430 available_flash_levels, count);
8431
8432 uint8_t flashAvailable;
8433 if (gCamCapability[cameraId]->flash_available)
8434 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8435 else
8436 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8437 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8438 &flashAvailable, 1);
8439
8440 Vector<uint8_t> avail_ae_modes;
8441 count = CAM_AE_MODE_MAX;
8442 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8443 for (size_t i = 0; i < count; i++) {
8444 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8445 }
8446 if (flashAvailable) {
8447 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8448 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8449 }
8450 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8451 avail_ae_modes.array(),
8452 avail_ae_modes.size());
8453
8454 int32_t sensitivity_range[2];
8455 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8456 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8457 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8458 sensitivity_range,
8459 sizeof(sensitivity_range) / sizeof(int32_t));
8460
8461 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8462 &gCamCapability[cameraId]->max_analog_sensitivity,
8463 1);
8464
8465 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8466 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8467 &sensor_orientation,
8468 1);
8469
8470 int32_t max_output_streams[] = {
8471 MAX_STALLING_STREAMS,
8472 MAX_PROCESSED_STREAMS,
8473 MAX_RAW_STREAMS};
8474 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8475 max_output_streams,
8476 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8477
8478 uint8_t avail_leds = 0;
8479 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8480 &avail_leds, 0);
8481
8482 uint8_t focus_dist_calibrated;
8483 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8484 gCamCapability[cameraId]->focus_dist_calibrated);
8485 if (NAME_NOT_FOUND != val) {
8486 focus_dist_calibrated = (uint8_t)val;
8487 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8488 &focus_dist_calibrated, 1);
8489 }
8490
8491 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8492 size = 0;
8493 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8494 MAX_TEST_PATTERN_CNT);
8495 for (size_t i = 0; i < count; i++) {
8496 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8497 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8498 if (NAME_NOT_FOUND != testpatternMode) {
8499 avail_testpattern_modes[size] = testpatternMode;
8500 size++;
8501 }
8502 }
8503 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8504 avail_testpattern_modes,
8505 size);
8506
8507 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8508 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8509 &max_pipeline_depth,
8510 1);
8511
8512 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8513 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8514 &partial_result_count,
8515 1);
8516
8517 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8518 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8519
8520 Vector<uint8_t> available_capabilities;
8521 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8522 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8523 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8524 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8525 if (supportBurst) {
8526 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8527 }
8528 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8529 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8530 if (hfrEnable && available_hfr_configs.array()) {
8531 available_capabilities.add(
8532 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8533 }
8534
8535 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8536 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8537 }
8538 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8539 available_capabilities.array(),
8540 available_capabilities.size());
8541
8542 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8543 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8544 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8545 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8546
8547 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8548 &aeLockAvailable, 1);
8549
8550 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8551 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8552 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8553 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8554
8555 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8556 &awbLockAvailable, 1);
8557
8558 int32_t max_input_streams = 1;
8559 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8560 &max_input_streams,
8561 1);
8562
8563 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8564 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8565 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8566 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8567 HAL_PIXEL_FORMAT_YCbCr_420_888};
8568 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8569 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8570
8571 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8572 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8573 &max_latency,
8574 1);
8575
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008576#ifndef USE_HAL_3_3
8577 int32_t isp_sensitivity_range[2];
8578 isp_sensitivity_range[0] =
8579 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8580 isp_sensitivity_range[1] =
8581 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8582 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8583 isp_sensitivity_range,
8584 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8585#endif
8586
Thierry Strudel3d639192016-09-09 11:52:26 -07008587 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8588 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8589 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8590 available_hot_pixel_modes,
8591 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8592
8593 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8594 ANDROID_SHADING_MODE_FAST,
8595 ANDROID_SHADING_MODE_HIGH_QUALITY};
8596 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8597 available_shading_modes,
8598 3);
8599
8600 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8601 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8602 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8603 available_lens_shading_map_modes,
8604 2);
8605
8606 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8607 ANDROID_EDGE_MODE_FAST,
8608 ANDROID_EDGE_MODE_HIGH_QUALITY,
8609 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8610 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8611 available_edge_modes,
8612 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8613
8614 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8615 ANDROID_NOISE_REDUCTION_MODE_FAST,
8616 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8617 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8618 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8619 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8620 available_noise_red_modes,
8621 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8622
8623 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8624 ANDROID_TONEMAP_MODE_FAST,
8625 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8626 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8627 available_tonemap_modes,
8628 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8629
8630 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8631 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8632 available_hot_pixel_map_modes,
8633 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8634
8635 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8636 gCamCapability[cameraId]->reference_illuminant1);
8637 if (NAME_NOT_FOUND != val) {
8638 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8639 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8640 }
8641
8642 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8643 gCamCapability[cameraId]->reference_illuminant2);
8644 if (NAME_NOT_FOUND != val) {
8645 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8646 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8647 }
8648
8649 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8650 (void *)gCamCapability[cameraId]->forward_matrix1,
8651 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8652
8653 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8654 (void *)gCamCapability[cameraId]->forward_matrix2,
8655 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8656
8657 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8658 (void *)gCamCapability[cameraId]->color_transform1,
8659 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8660
8661 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8662 (void *)gCamCapability[cameraId]->color_transform2,
8663 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8664
8665 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8666 (void *)gCamCapability[cameraId]->calibration_transform1,
8667 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8668
8669 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8670 (void *)gCamCapability[cameraId]->calibration_transform2,
8671 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8672
8673 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8674 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8675 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8676 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8677 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8678 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8679 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8680 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8681 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8682 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8683 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8684 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8685 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8686 ANDROID_JPEG_GPS_COORDINATES,
8687 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8688 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8689 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8690 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8691 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8692 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8693 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8694 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8695 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8696 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008697#ifndef USE_HAL_3_3
8698 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8699#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008700 ANDROID_STATISTICS_FACE_DETECT_MODE,
8701 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8702 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8703 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008704 ANDROID_BLACK_LEVEL_LOCK,
8705 /* DevCamDebug metadata request_keys_basic */
8706 DEVCAMDEBUG_META_ENABLE,
8707 /* DevCamDebug metadata end */
8708 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008709
8710 size_t request_keys_cnt =
8711 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8712 Vector<int32_t> available_request_keys;
8713 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8714 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8715 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8716 }
8717
8718 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8719 available_request_keys.array(), available_request_keys.size());
8720
8721 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8722 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8723 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8724 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8725 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8726 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8727 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8728 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8729 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8730 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8731 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8732 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8733 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8734 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8735 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8736 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8737 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8738 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8739 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8740 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8741 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008742 ANDROID_STATISTICS_FACE_SCORES,
8743#ifndef USE_HAL_3_3
8744 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8745#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07008746 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008747 // DevCamDebug metadata result_keys_basic
8748 DEVCAMDEBUG_META_ENABLE,
8749 // DevCamDebug metadata result_keys AF
8750 DEVCAMDEBUG_AF_LENS_POSITION,
8751 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8752 DEVCAMDEBUG_AF_TOF_DISTANCE,
8753 DEVCAMDEBUG_AF_LUMA,
8754 DEVCAMDEBUG_AF_HAF_STATE,
8755 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8756 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8757 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8758 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8759 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8760 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8761 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8762 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8763 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8764 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8765 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8766 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8767 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8768 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8769 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8770 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8771 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8772 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8773 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8774 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8775 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8776 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8777 // DevCamDebug metadata result_keys AEC
8778 DEVCAMDEBUG_AEC_TARGET_LUMA,
8779 DEVCAMDEBUG_AEC_COMP_LUMA,
8780 DEVCAMDEBUG_AEC_AVG_LUMA,
8781 DEVCAMDEBUG_AEC_CUR_LUMA,
8782 DEVCAMDEBUG_AEC_LINECOUNT,
8783 DEVCAMDEBUG_AEC_REAL_GAIN,
8784 DEVCAMDEBUG_AEC_EXP_INDEX,
8785 DEVCAMDEBUG_AEC_LUX_IDX,
8786 // DevCamDebug metadata result_keys AWB
8787 DEVCAMDEBUG_AWB_R_GAIN,
8788 DEVCAMDEBUG_AWB_G_GAIN,
8789 DEVCAMDEBUG_AWB_B_GAIN,
8790 DEVCAMDEBUG_AWB_CCT,
8791 DEVCAMDEBUG_AWB_DECISION,
8792 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008793 };
8794
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 size_t result_keys_cnt =
8796 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8797
8798 Vector<int32_t> available_result_keys;
8799 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8800 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8801 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8802 }
8803 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8804 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8805 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8806 }
8807 if (supportedFaceDetectMode == 1) {
8808 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8809 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8810 } else if ((supportedFaceDetectMode == 2) ||
8811 (supportedFaceDetectMode == 3)) {
8812 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8813 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8814 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008815#ifndef USE_HAL_3_3
8816 if (hasBlackRegions) {
8817 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8818 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8819 }
8820#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008821 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8822 available_result_keys.array(), available_result_keys.size());
8823
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008824 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008825 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8826 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8827 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8828 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8829 ANDROID_SCALER_CROPPING_TYPE,
8830 ANDROID_SYNC_MAX_LATENCY,
8831 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8832 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8833 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8834 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8835 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8836 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8837 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8838 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8839 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8840 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8841 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8842 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8843 ANDROID_LENS_FACING,
8844 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8845 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8846 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8847 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8848 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8849 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8850 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8851 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8852 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8853 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8854 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8855 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8856 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8857 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8858 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8859 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8860 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8861 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8862 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8863 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8864 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8865 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8866 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8867 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8868 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8869 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8870 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8871 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8872 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8873 ANDROID_CONTROL_AVAILABLE_MODES,
8874 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8875 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8876 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8877 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008878 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8879#ifndef USE_HAL_3_3
8880 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8881 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8882#endif
8883 };
8884
8885 Vector<int32_t> available_characteristics_keys;
8886 available_characteristics_keys.appendArray(characteristics_keys_basic,
8887 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8888#ifndef USE_HAL_3_3
8889 if (hasBlackRegions) {
8890 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8891 }
8892#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008893 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008894 available_characteristics_keys.array(),
8895 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008896
8897 /*available stall durations depend on the hw + sw and will be different for different devices */
8898 /*have to add for raw after implementation*/
8899 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8900 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8901
8902 Vector<int64_t> available_stall_durations;
8903 for (uint32_t j = 0; j < stall_formats_count; j++) {
8904 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8905 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8906 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8907 available_stall_durations.add(stall_formats[j]);
8908 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8909 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8910 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8911 }
8912 } else {
8913 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8914 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8915 available_stall_durations.add(stall_formats[j]);
8916 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8917 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8918 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8919 }
8920 }
8921 }
8922 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8923 available_stall_durations.array(),
8924 available_stall_durations.size());
8925
8926 //QCAMERA3_OPAQUE_RAW
8927 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8928 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8929 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8930 case LEGACY_RAW:
8931 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8932 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8933 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8934 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8935 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8936 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8937 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8938 break;
8939 case MIPI_RAW:
8940 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8941 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8942 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8943 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8944 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8945 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8946 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8947 break;
8948 default:
8949 LOGE("unknown opaque_raw_format %d",
8950 gCamCapability[cameraId]->opaque_raw_fmt);
8951 break;
8952 }
8953 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8954
8955 Vector<int32_t> strides;
8956 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8957 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8958 cam_stream_buf_plane_info_t buf_planes;
8959 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8960 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8961 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8962 &gCamCapability[cameraId]->padding_info, &buf_planes);
8963 strides.add(buf_planes.plane_info.mp[0].stride);
8964 }
8965 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8966 strides.size());
8967
Thierry Strudel04e026f2016-10-10 11:27:36 -07008968 //Video HDR default
8969 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8970 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8971 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8972 int32_t vhdr_mode[] = {
8973 QCAMERA3_VIDEO_HDR_MODE_OFF,
8974 QCAMERA3_VIDEO_HDR_MODE_ON};
8975
8976 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8977 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8978 vhdr_mode, vhdr_mode_count);
8979 }
8980
Thierry Strudel3d639192016-09-09 11:52:26 -07008981 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8982 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8983 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8984
8985 uint8_t isMonoOnly =
8986 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8987 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8988 &isMonoOnly, 1);
8989
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008990#ifndef USE_HAL_3_3
8991 Vector<int32_t> opaque_size;
8992 for (size_t j = 0; j < scalar_formats_count; j++) {
8993 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8994 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8995 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8996 cam_stream_buf_plane_info_t buf_planes;
8997
8998 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8999 &gCamCapability[cameraId]->padding_info, &buf_planes);
9000
9001 if (rc == 0) {
9002 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9003 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9004 opaque_size.add(buf_planes.plane_info.frame_len);
9005 }else {
9006 LOGE("raw frame calculation failed!");
9007 }
9008 }
9009 }
9010 }
9011
9012 if ((opaque_size.size() > 0) &&
9013 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9014 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9015 else
9016 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9017#endif
9018
Thierry Strudel04e026f2016-10-10 11:27:36 -07009019 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9020 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9021 size = 0;
9022 count = CAM_IR_MODE_MAX;
9023 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9024 for (size_t i = 0; i < count; i++) {
9025 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9026 gCamCapability[cameraId]->supported_ir_modes[i]);
9027 if (NAME_NOT_FOUND != val) {
9028 avail_ir_modes[size] = (int32_t)val;
9029 size++;
9030 }
9031 }
9032 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9033 avail_ir_modes, size);
9034 }
9035
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009036 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9037 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9038 size = 0;
9039 count = CAM_AEC_CONVERGENCE_MAX;
9040 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9041 for (size_t i = 0; i < count; i++) {
9042 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9043 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9044 if (NAME_NOT_FOUND != val) {
9045 available_instant_aec_modes[size] = (int32_t)val;
9046 size++;
9047 }
9048 }
9049 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9050 available_instant_aec_modes, size);
9051 }
9052
Thierry Strudel3d639192016-09-09 11:52:26 -07009053 gStaticMetadata[cameraId] = staticInfo.release();
9054 return rc;
9055}
9056
9057/*===========================================================================
9058 * FUNCTION : makeTable
9059 *
9060 * DESCRIPTION: make a table of sizes
9061 *
9062 * PARAMETERS :
9063 *
9064 *
9065 *==========================================================================*/
9066void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9067 size_t max_size, int32_t *sizeTable)
9068{
9069 size_t j = 0;
9070 if (size > max_size) {
9071 size = max_size;
9072 }
9073 for (size_t i = 0; i < size; i++) {
9074 sizeTable[j] = dimTable[i].width;
9075 sizeTable[j+1] = dimTable[i].height;
9076 j+=2;
9077 }
9078}
9079
9080/*===========================================================================
9081 * FUNCTION : makeFPSTable
9082 *
9083 * DESCRIPTION: make a table of fps ranges
9084 *
9085 * PARAMETERS :
9086 *
9087 *==========================================================================*/
9088void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9089 size_t max_size, int32_t *fpsRangesTable)
9090{
9091 size_t j = 0;
9092 if (size > max_size) {
9093 size = max_size;
9094 }
9095 for (size_t i = 0; i < size; i++) {
9096 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9097 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9098 j+=2;
9099 }
9100}
9101
9102/*===========================================================================
9103 * FUNCTION : makeOverridesList
9104 *
9105 * DESCRIPTION: make a list of scene mode overrides
9106 *
9107 * PARAMETERS :
9108 *
9109 *
9110 *==========================================================================*/
9111void QCamera3HardwareInterface::makeOverridesList(
9112 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9113 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9114{
9115 /*daemon will give a list of overrides for all scene modes.
9116 However we should send the fwk only the overrides for the scene modes
9117 supported by the framework*/
9118 size_t j = 0;
9119 if (size > max_size) {
9120 size = max_size;
9121 }
9122 size_t focus_count = CAM_FOCUS_MODE_MAX;
9123 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9124 focus_count);
9125 for (size_t i = 0; i < size; i++) {
9126 bool supt = false;
9127 size_t index = supported_indexes[i];
9128 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9129 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9130 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9131 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9132 overridesTable[index].awb_mode);
9133 if (NAME_NOT_FOUND != val) {
9134 overridesList[j+1] = (uint8_t)val;
9135 }
9136 uint8_t focus_override = overridesTable[index].af_mode;
9137 for (size_t k = 0; k < focus_count; k++) {
9138 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9139 supt = true;
9140 break;
9141 }
9142 }
9143 if (supt) {
9144 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9145 focus_override);
9146 if (NAME_NOT_FOUND != val) {
9147 overridesList[j+2] = (uint8_t)val;
9148 }
9149 } else {
9150 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9151 }
9152 j+=3;
9153 }
9154}
9155
9156/*===========================================================================
9157 * FUNCTION : filterJpegSizes
9158 *
9159 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9160 * could be downscaled to
9161 *
9162 * PARAMETERS :
9163 *
9164 * RETURN : length of jpegSizes array
9165 *==========================================================================*/
9166
9167size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9168 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9169 uint8_t downscale_factor)
9170{
9171 if (0 == downscale_factor) {
9172 downscale_factor = 1;
9173 }
9174
9175 int32_t min_width = active_array_size.width / downscale_factor;
9176 int32_t min_height = active_array_size.height / downscale_factor;
9177 size_t jpegSizesCnt = 0;
9178 if (processedSizesCnt > maxCount) {
9179 processedSizesCnt = maxCount;
9180 }
9181 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9182 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9183 jpegSizes[jpegSizesCnt] = processedSizes[i];
9184 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9185 jpegSizesCnt += 2;
9186 }
9187 }
9188 return jpegSizesCnt;
9189}
9190
9191/*===========================================================================
9192 * FUNCTION : computeNoiseModelEntryS
9193 *
9194 * DESCRIPTION: function to map a given sensitivity to the S noise
9195 * model parameters in the DNG noise model.
9196 *
9197 * PARAMETERS : sens : the sensor sensitivity
9198 *
9199 ** RETURN : S (sensor amplification) noise
9200 *
9201 *==========================================================================*/
9202double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9203 double s = gCamCapability[mCameraId]->gradient_S * sens +
9204 gCamCapability[mCameraId]->offset_S;
9205 return ((s < 0.0) ? 0.0 : s);
9206}
9207
9208/*===========================================================================
9209 * FUNCTION : computeNoiseModelEntryO
9210 *
9211 * DESCRIPTION: function to map a given sensitivity to the O noise
9212 * model parameters in the DNG noise model.
9213 *
9214 * PARAMETERS : sens : the sensor sensitivity
9215 *
9216 ** RETURN : O (sensor readout) noise
9217 *
9218 *==========================================================================*/
9219double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9220 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9221 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9222 1.0 : (1.0 * sens / max_analog_sens);
9223 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9224 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9225 return ((o < 0.0) ? 0.0 : o);
9226}
9227
9228/*===========================================================================
9229 * FUNCTION : getSensorSensitivity
9230 *
9231 * DESCRIPTION: convert iso_mode to an integer value
9232 *
9233 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9234 *
9235 ** RETURN : sensitivity supported by sensor
9236 *
9237 *==========================================================================*/
9238int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9239{
9240 int32_t sensitivity;
9241
9242 switch (iso_mode) {
9243 case CAM_ISO_MODE_100:
9244 sensitivity = 100;
9245 break;
9246 case CAM_ISO_MODE_200:
9247 sensitivity = 200;
9248 break;
9249 case CAM_ISO_MODE_400:
9250 sensitivity = 400;
9251 break;
9252 case CAM_ISO_MODE_800:
9253 sensitivity = 800;
9254 break;
9255 case CAM_ISO_MODE_1600:
9256 sensitivity = 1600;
9257 break;
9258 default:
9259 sensitivity = -1;
9260 break;
9261 }
9262 return sensitivity;
9263}
9264
9265/*===========================================================================
9266 * FUNCTION : getCamInfo
9267 *
9268 * DESCRIPTION: query camera capabilities
9269 *
9270 * PARAMETERS :
9271 * @cameraId : camera Id
9272 * @info : camera info struct to be filled in with camera capabilities
9273 *
9274 * RETURN : int type of status
9275 * NO_ERROR -- success
9276 * none-zero failure code
9277 *==========================================================================*/
9278int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9279 struct camera_info *info)
9280{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009281 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009282 int rc = 0;
9283
9284 pthread_mutex_lock(&gCamLock);
9285 if (NULL == gCamCapability[cameraId]) {
9286 rc = initCapabilities(cameraId);
9287 if (rc < 0) {
9288 pthread_mutex_unlock(&gCamLock);
9289 return rc;
9290 }
9291 }
9292
9293 if (NULL == gStaticMetadata[cameraId]) {
9294 rc = initStaticMetadata(cameraId);
9295 if (rc < 0) {
9296 pthread_mutex_unlock(&gCamLock);
9297 return rc;
9298 }
9299 }
9300
9301 switch(gCamCapability[cameraId]->position) {
9302 case CAM_POSITION_BACK:
9303 case CAM_POSITION_BACK_AUX:
9304 info->facing = CAMERA_FACING_BACK;
9305 break;
9306
9307 case CAM_POSITION_FRONT:
9308 case CAM_POSITION_FRONT_AUX:
9309 info->facing = CAMERA_FACING_FRONT;
9310 break;
9311
9312 default:
9313 LOGE("Unknown position type %d for camera id:%d",
9314 gCamCapability[cameraId]->position, cameraId);
9315 rc = -1;
9316 break;
9317 }
9318
9319
9320 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009321#ifndef USE_HAL_3_3
9322 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9323#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009324 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009325#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009326 info->static_camera_characteristics = gStaticMetadata[cameraId];
9327
9328 //For now assume both cameras can operate independently.
9329 info->conflicting_devices = NULL;
9330 info->conflicting_devices_length = 0;
9331
9332 //resource cost is 100 * MIN(1.0, m/M),
9333 //where m is throughput requirement with maximum stream configuration
9334 //and M is CPP maximum throughput.
9335 float max_fps = 0.0;
9336 for (uint32_t i = 0;
9337 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9338 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9339 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9340 }
9341 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9342 gCamCapability[cameraId]->active_array_size.width *
9343 gCamCapability[cameraId]->active_array_size.height * max_fps /
9344 gCamCapability[cameraId]->max_pixel_bandwidth;
9345 info->resource_cost = 100 * MIN(1.0, ratio);
9346 LOGI("camera %d resource cost is %d", cameraId,
9347 info->resource_cost);
9348
9349 pthread_mutex_unlock(&gCamLock);
9350 return rc;
9351}
9352
9353/*===========================================================================
9354 * FUNCTION : translateCapabilityToMetadata
9355 *
9356 * DESCRIPTION: translate the capability into camera_metadata_t
9357 *
9358 * PARAMETERS : type of the request
9359 *
9360 *
9361 * RETURN : success: camera_metadata_t*
9362 * failure: NULL
9363 *
9364 *==========================================================================*/
9365camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9366{
9367 if (mDefaultMetadata[type] != NULL) {
9368 return mDefaultMetadata[type];
9369 }
9370 //first time we are handling this request
9371 //fill up the metadata structure using the wrapper class
9372 CameraMetadata settings;
9373 //translate from cam_capability_t to camera_metadata_tag_t
9374 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9375 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9376 int32_t defaultRequestID = 0;
9377 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9378
9379 /* OIS disable */
9380 char ois_prop[PROPERTY_VALUE_MAX];
9381 memset(ois_prop, 0, sizeof(ois_prop));
9382 property_get("persist.camera.ois.disable", ois_prop, "0");
9383 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9384
9385 /* Force video to use OIS */
9386 char videoOisProp[PROPERTY_VALUE_MAX];
9387 memset(videoOisProp, 0, sizeof(videoOisProp));
9388 property_get("persist.camera.ois.video", videoOisProp, "1");
9389 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009390
9391 // Hybrid AE enable/disable
9392 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9393 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9394 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9395 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9396
Thierry Strudel3d639192016-09-09 11:52:26 -07009397 uint8_t controlIntent = 0;
9398 uint8_t focusMode;
9399 uint8_t vsMode;
9400 uint8_t optStabMode;
9401 uint8_t cacMode;
9402 uint8_t edge_mode;
9403 uint8_t noise_red_mode;
9404 uint8_t tonemap_mode;
9405 bool highQualityModeEntryAvailable = FALSE;
9406 bool fastModeEntryAvailable = FALSE;
9407 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9408 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9409 switch (type) {
9410 case CAMERA3_TEMPLATE_PREVIEW:
9411 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9412 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9413 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9414 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9415 edge_mode = ANDROID_EDGE_MODE_FAST;
9416 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9417 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9418 break;
9419 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9420 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9421 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9422 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9423 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9424 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9425 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9426 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9427 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9428 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9429 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9430 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9431 highQualityModeEntryAvailable = TRUE;
9432 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9433 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9434 fastModeEntryAvailable = TRUE;
9435 }
9436 }
9437 if (highQualityModeEntryAvailable) {
9438 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9439 } else if (fastModeEntryAvailable) {
9440 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9441 }
9442 break;
9443 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9444 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9445 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9446 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009447 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9448 edge_mode = ANDROID_EDGE_MODE_FAST;
9449 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9450 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9451 if (forceVideoOis)
9452 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9453 break;
9454 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9455 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9456 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9457 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009458 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9459 edge_mode = ANDROID_EDGE_MODE_FAST;
9460 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9461 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9462 if (forceVideoOis)
9463 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9464 break;
9465 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9466 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9467 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9468 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9469 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9470 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9471 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9472 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9473 break;
9474 case CAMERA3_TEMPLATE_MANUAL:
9475 edge_mode = ANDROID_EDGE_MODE_FAST;
9476 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9477 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9478 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9479 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9480 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9481 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9482 break;
9483 default:
9484 edge_mode = ANDROID_EDGE_MODE_FAST;
9485 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9486 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9487 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9488 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9489 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9490 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9491 break;
9492 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009493 // Set CAC to OFF if underlying device doesn't support
9494 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9495 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9496 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009497 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9498 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9499 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9500 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9501 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9502 }
9503 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9504
9505 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9506 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9507 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9508 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9509 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9510 || ois_disable)
9511 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9512 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9513
9514 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9515 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9516
9517 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9518 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9519
9520 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9521 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9522
9523 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9524 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9525
9526 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9527 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9528
9529 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9530 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9531
9532 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9533 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9534
9535 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9536 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9537
9538 /*flash*/
9539 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9540 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9541
9542 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9543 settings.update(ANDROID_FLASH_FIRING_POWER,
9544 &flashFiringLevel, 1);
9545
9546 /* lens */
9547 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9548 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9549
9550 if (gCamCapability[mCameraId]->filter_densities_count) {
9551 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9552 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9553 gCamCapability[mCameraId]->filter_densities_count);
9554 }
9555
9556 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9557 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9558
9559 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9560 float default_focus_distance = 0;
9561 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9562 }
9563
9564 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9565 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9566
9567 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9568 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9569
9570 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9571 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9572
9573 /* face detection (default to OFF) */
9574 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9575 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9576
9577 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9578 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9579
9580 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9581 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9582
9583 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9584 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9585
9586 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9587 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9588
9589 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9590 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9591
9592 /* Exposure time(Update the Min Exposure Time)*/
9593 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9594 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9595
9596 /* frame duration */
9597 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9598 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9599
9600 /* sensitivity */
9601 static const int32_t default_sensitivity = 100;
9602 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009603#ifndef USE_HAL_3_3
9604 static const int32_t default_isp_sensitivity =
9605 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9606 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9607#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009608
9609 /*edge mode*/
9610 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9611
9612 /*noise reduction mode*/
9613 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9614
9615 /*color correction mode*/
9616 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9617 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9618
9619 /*transform matrix mode*/
9620 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9621
9622 int32_t scaler_crop_region[4];
9623 scaler_crop_region[0] = 0;
9624 scaler_crop_region[1] = 0;
9625 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9626 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9627 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9628
9629 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9630 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9631
9632 /*focus distance*/
9633 float focus_distance = 0.0;
9634 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9635
9636 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009637 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009638 float max_range = 0.0;
9639 float max_fixed_fps = 0.0;
9640 int32_t fps_range[2] = {0, 0};
9641 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9642 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009643 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9644 TEMPLATE_MAX_PREVIEW_FPS) {
9645 continue;
9646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009647 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9648 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9649 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9650 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9651 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9652 if (range > max_range) {
9653 fps_range[0] =
9654 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9655 fps_range[1] =
9656 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9657 max_range = range;
9658 }
9659 } else {
9660 if (range < 0.01 && max_fixed_fps <
9661 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9662 fps_range[0] =
9663 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9664 fps_range[1] =
9665 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9666 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9667 }
9668 }
9669 }
9670 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9671
9672 /*precapture trigger*/
9673 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9674 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9675
9676 /*af trigger*/
9677 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9678 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9679
9680 /* ae & af regions */
9681 int32_t active_region[] = {
9682 gCamCapability[mCameraId]->active_array_size.left,
9683 gCamCapability[mCameraId]->active_array_size.top,
9684 gCamCapability[mCameraId]->active_array_size.left +
9685 gCamCapability[mCameraId]->active_array_size.width,
9686 gCamCapability[mCameraId]->active_array_size.top +
9687 gCamCapability[mCameraId]->active_array_size.height,
9688 0};
9689 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9690 sizeof(active_region) / sizeof(active_region[0]));
9691 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9692 sizeof(active_region) / sizeof(active_region[0]));
9693
9694 /* black level lock */
9695 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9696 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9697
9698 /* lens shading map mode */
9699 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9700 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9701 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9702 }
9703 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9704
9705 //special defaults for manual template
9706 if (type == CAMERA3_TEMPLATE_MANUAL) {
9707 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9708 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9709
9710 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9711 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9712
9713 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9714 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9715
9716 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9717 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9718
9719 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9720 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9721
9722 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9723 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9724 }
9725
9726
9727 /* TNR
9728 * We'll use this location to determine which modes TNR will be set.
9729 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9730 * This is not to be confused with linking on a per stream basis that decision
9731 * is still on per-session basis and will be handled as part of config stream
9732 */
9733 uint8_t tnr_enable = 0;
9734
9735 if (m_bTnrPreview || m_bTnrVideo) {
9736
9737 switch (type) {
9738 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9739 tnr_enable = 1;
9740 break;
9741
9742 default:
9743 tnr_enable = 0;
9744 break;
9745 }
9746
9747 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9748 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9749 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9750
9751 LOGD("TNR:%d with process plate %d for template:%d",
9752 tnr_enable, tnr_process_type, type);
9753 }
9754
9755 //Update Link tags to default
9756 int32_t sync_type = CAM_TYPE_STANDALONE;
9757 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9758
9759 int32_t is_main = 0; //this doesn't matter as app should overwrite
9760 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9761
9762 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9763
9764 /* CDS default */
9765 char prop[PROPERTY_VALUE_MAX];
9766 memset(prop, 0, sizeof(prop));
9767 property_get("persist.camera.CDS", prop, "Auto");
9768 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9769 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9770 if (CAM_CDS_MODE_MAX == cds_mode) {
9771 cds_mode = CAM_CDS_MODE_AUTO;
9772 }
9773
9774 /* Disabling CDS in templates which have TNR enabled*/
9775 if (tnr_enable)
9776 cds_mode = CAM_CDS_MODE_OFF;
9777
9778 int32_t mode = cds_mode;
9779 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009780
9781 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9782 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9783
9784 /* IR Mode Default Off */
9785 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9786 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9787
Thierry Strudel269c81a2016-10-12 12:13:59 -07009788 /* Manual Convergence AEC Speed is disabled by default*/
9789 float default_aec_speed = 0;
9790 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9791
9792 /* Manual Convergence AWB Speed is disabled by default*/
9793 float default_awb_speed = 0;
9794 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9795
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009796 // Set instant AEC to normal convergence by default
9797 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9798 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9799
Shuzhen Wang19463d72016-03-08 11:09:52 -08009800 /* hybrid ae */
9801 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9802
Thierry Strudel3d639192016-09-09 11:52:26 -07009803 mDefaultMetadata[type] = settings.release();
9804
9805 return mDefaultMetadata[type];
9806}
9807
9808/*===========================================================================
9809 * FUNCTION : setFrameParameters
9810 *
9811 * DESCRIPTION: set parameters per frame as requested in the metadata from
9812 * framework
9813 *
9814 * PARAMETERS :
9815 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009816 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009817 * @blob_request: Whether this request is a blob request or not
9818 *
9819 * RETURN : success: NO_ERROR
9820 * failure:
9821 *==========================================================================*/
9822int QCamera3HardwareInterface::setFrameParameters(
9823 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009824 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009825 int blob_request,
9826 uint32_t snapshotStreamId)
9827{
9828 /*translate from camera_metadata_t type to parm_type_t*/
9829 int rc = 0;
9830 int32_t hal_version = CAM_HAL_V3;
9831
9832 clear_metadata_buffer(mParameters);
9833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9834 LOGE("Failed to set hal version in the parameters");
9835 return BAD_VALUE;
9836 }
9837
9838 /*we need to update the frame number in the parameters*/
9839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9840 request->frame_number)) {
9841 LOGE("Failed to set the frame number in the parameters");
9842 return BAD_VALUE;
9843 }
9844
9845 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009846 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009847 LOGE("Failed to set stream type mask in the parameters");
9848 return BAD_VALUE;
9849 }
9850
9851 if (mUpdateDebugLevel) {
9852 uint32_t dummyDebugLevel = 0;
9853 /* The value of dummyDebugLevel is irrelavent. On
9854 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9855 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9856 dummyDebugLevel)) {
9857 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9858 return BAD_VALUE;
9859 }
9860 mUpdateDebugLevel = false;
9861 }
9862
9863 if(request->settings != NULL){
9864 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9865 if (blob_request)
9866 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9867 }
9868
9869 return rc;
9870}
9871
9872/*===========================================================================
9873 * FUNCTION : setReprocParameters
9874 *
9875 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9876 * return it.
9877 *
9878 * PARAMETERS :
9879 * @request : request that needs to be serviced
9880 *
9881 * RETURN : success: NO_ERROR
9882 * failure:
9883 *==========================================================================*/
9884int32_t QCamera3HardwareInterface::setReprocParameters(
9885 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9886 uint32_t snapshotStreamId)
9887{
9888 /*translate from camera_metadata_t type to parm_type_t*/
9889 int rc = 0;
9890
9891 if (NULL == request->settings){
9892 LOGE("Reprocess settings cannot be NULL");
9893 return BAD_VALUE;
9894 }
9895
9896 if (NULL == reprocParam) {
9897 LOGE("Invalid reprocessing metadata buffer");
9898 return BAD_VALUE;
9899 }
9900 clear_metadata_buffer(reprocParam);
9901
9902 /*we need to update the frame number in the parameters*/
9903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9904 request->frame_number)) {
9905 LOGE("Failed to set the frame number in the parameters");
9906 return BAD_VALUE;
9907 }
9908
9909 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9910 if (rc < 0) {
9911 LOGE("Failed to translate reproc request");
9912 return rc;
9913 }
9914
9915 CameraMetadata frame_settings;
9916 frame_settings = request->settings;
9917 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9918 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9919 int32_t *crop_count =
9920 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9921 int32_t *crop_data =
9922 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9923 int32_t *roi_map =
9924 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9925 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9926 cam_crop_data_t crop_meta;
9927 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9928 crop_meta.num_of_streams = 1;
9929 crop_meta.crop_info[0].crop.left = crop_data[0];
9930 crop_meta.crop_info[0].crop.top = crop_data[1];
9931 crop_meta.crop_info[0].crop.width = crop_data[2];
9932 crop_meta.crop_info[0].crop.height = crop_data[3];
9933
9934 crop_meta.crop_info[0].roi_map.left =
9935 roi_map[0];
9936 crop_meta.crop_info[0].roi_map.top =
9937 roi_map[1];
9938 crop_meta.crop_info[0].roi_map.width =
9939 roi_map[2];
9940 crop_meta.crop_info[0].roi_map.height =
9941 roi_map[3];
9942
9943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9944 rc = BAD_VALUE;
9945 }
9946 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9947 request->input_buffer->stream,
9948 crop_meta.crop_info[0].crop.left,
9949 crop_meta.crop_info[0].crop.top,
9950 crop_meta.crop_info[0].crop.width,
9951 crop_meta.crop_info[0].crop.height);
9952 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9953 request->input_buffer->stream,
9954 crop_meta.crop_info[0].roi_map.left,
9955 crop_meta.crop_info[0].roi_map.top,
9956 crop_meta.crop_info[0].roi_map.width,
9957 crop_meta.crop_info[0].roi_map.height);
9958 } else {
9959 LOGE("Invalid reprocess crop count %d!", *crop_count);
9960 }
9961 } else {
9962 LOGE("No crop data from matching output stream");
9963 }
9964
9965 /* These settings are not needed for regular requests so handle them specially for
9966 reprocess requests; information needed for EXIF tags */
9967 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9968 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9969 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9970 if (NAME_NOT_FOUND != val) {
9971 uint32_t flashMode = (uint32_t)val;
9972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9973 rc = BAD_VALUE;
9974 }
9975 } else {
9976 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9977 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9978 }
9979 } else {
9980 LOGH("No flash mode in reprocess settings");
9981 }
9982
9983 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9984 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9986 rc = BAD_VALUE;
9987 }
9988 } else {
9989 LOGH("No flash state in reprocess settings");
9990 }
9991
9992 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9993 uint8_t *reprocessFlags =
9994 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9995 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9996 *reprocessFlags)) {
9997 rc = BAD_VALUE;
9998 }
9999 }
10000
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010001 // Add metadata which reprocess needs
10002 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10003 cam_reprocess_info_t *repro_info =
10004 (cam_reprocess_info_t *)frame_settings.find
10005 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010006 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010007 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010009 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010010 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010011 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010012 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010013 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010014 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010015 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010016 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010017 repro_info->pipeline_flip);
10018 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10019 repro_info->af_roi);
10020 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10021 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010022 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10023 CAM_INTF_PARM_ROTATION metadata then has been added in
10024 translateToHalMetadata. HAL need to keep this new rotation
10025 metadata. Otherwise, the old rotation info saved in the vendor tag
10026 would be used */
10027 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10028 CAM_INTF_PARM_ROTATION, reprocParam) {
10029 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10030 } else {
10031 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010032 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010033 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010034 }
10035
10036 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10037 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10038 roi.width and roi.height would be the final JPEG size.
10039 For now, HAL only checks this for reprocess request */
10040 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10041 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10042 uint8_t *enable =
10043 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10044 if (*enable == TRUE) {
10045 int32_t *crop_data =
10046 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10047 cam_stream_crop_info_t crop_meta;
10048 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10049 crop_meta.stream_id = 0;
10050 crop_meta.crop.left = crop_data[0];
10051 crop_meta.crop.top = crop_data[1];
10052 crop_meta.crop.width = crop_data[2];
10053 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010054 // The JPEG crop roi should match cpp output size
10055 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10056 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10057 crop_meta.roi_map.left = 0;
10058 crop_meta.roi_map.top = 0;
10059 crop_meta.roi_map.width = cpp_crop->crop.width;
10060 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010061 }
10062 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10063 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010064 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010065 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010066 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10067 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010068 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010069 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10070
10071 // Add JPEG scale information
10072 cam_dimension_t scale_dim;
10073 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10074 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10075 int32_t *roi =
10076 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10077 scale_dim.width = roi[2];
10078 scale_dim.height = roi[3];
10079 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10080 scale_dim);
10081 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10082 scale_dim.width, scale_dim.height, mCameraId);
10083 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010084 }
10085 }
10086
10087 return rc;
10088}
10089
10090/*===========================================================================
10091 * FUNCTION : saveRequestSettings
10092 *
10093 * DESCRIPTION: Add any settings that might have changed to the request settings
10094 * and save the settings to be applied on the frame
10095 *
10096 * PARAMETERS :
10097 * @jpegMetadata : the extracted and/or modified jpeg metadata
10098 * @request : request with initial settings
10099 *
10100 * RETURN :
10101 * camera_metadata_t* : pointer to the saved request settings
10102 *==========================================================================*/
10103camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10104 const CameraMetadata &jpegMetadata,
10105 camera3_capture_request_t *request)
10106{
10107 camera_metadata_t *resultMetadata;
10108 CameraMetadata camMetadata;
10109 camMetadata = request->settings;
10110
10111 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10112 int32_t thumbnail_size[2];
10113 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10114 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10115 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10116 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10117 }
10118
10119 if (request->input_buffer != NULL) {
10120 uint8_t reprocessFlags = 1;
10121 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10122 (uint8_t*)&reprocessFlags,
10123 sizeof(reprocessFlags));
10124 }
10125
10126 resultMetadata = camMetadata.release();
10127 return resultMetadata;
10128}
10129
10130/*===========================================================================
10131 * FUNCTION : setHalFpsRange
10132 *
10133 * DESCRIPTION: set FPS range parameter
10134 *
10135 *
10136 * PARAMETERS :
10137 * @settings : Metadata from framework
10138 * @hal_metadata: Metadata buffer
10139 *
10140 *
10141 * RETURN : success: NO_ERROR
10142 * failure:
10143 *==========================================================================*/
10144int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10145 metadata_buffer_t *hal_metadata)
10146{
10147 int32_t rc = NO_ERROR;
10148 cam_fps_range_t fps_range;
10149 fps_range.min_fps = (float)
10150 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10151 fps_range.max_fps = (float)
10152 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10153 fps_range.video_min_fps = fps_range.min_fps;
10154 fps_range.video_max_fps = fps_range.max_fps;
10155
10156 LOGD("aeTargetFpsRange fps: [%f %f]",
10157 fps_range.min_fps, fps_range.max_fps);
10158 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10159 * follows:
10160 * ---------------------------------------------------------------|
10161 * Video stream is absent in configure_streams |
10162 * (Camcorder preview before the first video record |
10163 * ---------------------------------------------------------------|
10164 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10165 * | | | vid_min/max_fps|
10166 * ---------------------------------------------------------------|
10167 * NO | [ 30, 240] | 240 | [240, 240] |
10168 * |-------------|-------------|----------------|
10169 * | [240, 240] | 240 | [240, 240] |
10170 * ---------------------------------------------------------------|
10171 * Video stream is present in configure_streams |
10172 * ---------------------------------------------------------------|
10173 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10174 * | | | vid_min/max_fps|
10175 * ---------------------------------------------------------------|
10176 * NO | [ 30, 240] | 240 | [240, 240] |
10177 * (camcorder prev |-------------|-------------|----------------|
10178 * after video rec | [240, 240] | 240 | [240, 240] |
10179 * is stopped) | | | |
10180 * ---------------------------------------------------------------|
10181 * YES | [ 30, 240] | 240 | [240, 240] |
10182 * |-------------|-------------|----------------|
10183 * | [240, 240] | 240 | [240, 240] |
10184 * ---------------------------------------------------------------|
10185 * When Video stream is absent in configure_streams,
10186 * preview fps = sensor_fps / batchsize
10187 * Eg: for 240fps at batchSize 4, preview = 60fps
10188 * for 120fps at batchSize 4, preview = 30fps
10189 *
10190 * When video stream is present in configure_streams, preview fps is as per
10191 * the ratio of preview buffers to video buffers requested in process
10192 * capture request
10193 */
10194 mBatchSize = 0;
10195 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10196 fps_range.min_fps = fps_range.video_max_fps;
10197 fps_range.video_min_fps = fps_range.video_max_fps;
10198 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10199 fps_range.max_fps);
10200 if (NAME_NOT_FOUND != val) {
10201 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10203 return BAD_VALUE;
10204 }
10205
10206 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10207 /* If batchmode is currently in progress and the fps changes,
10208 * set the flag to restart the sensor */
10209 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10210 (mHFRVideoFps != fps_range.max_fps)) {
10211 mNeedSensorRestart = true;
10212 }
10213 mHFRVideoFps = fps_range.max_fps;
10214 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10215 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10216 mBatchSize = MAX_HFR_BATCH_SIZE;
10217 }
10218 }
10219 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10220
10221 }
10222 } else {
10223 /* HFR mode is session param in backend/ISP. This should be reset when
10224 * in non-HFR mode */
10225 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10226 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10227 return BAD_VALUE;
10228 }
10229 }
10230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10231 return BAD_VALUE;
10232 }
10233 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10234 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10235 return rc;
10236}
10237
10238/*===========================================================================
10239 * FUNCTION : translateToHalMetadata
10240 *
10241 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10242 *
10243 *
10244 * PARAMETERS :
10245 * @request : request sent from framework
10246 *
10247 *
10248 * RETURN : success: NO_ERROR
10249 * failure:
10250 *==========================================================================*/
10251int QCamera3HardwareInterface::translateToHalMetadata
10252 (const camera3_capture_request_t *request,
10253 metadata_buffer_t *hal_metadata,
10254 uint32_t snapshotStreamId)
10255{
10256 int rc = 0;
10257 CameraMetadata frame_settings;
10258 frame_settings = request->settings;
10259
10260 /* Do not change the order of the following list unless you know what you are
10261 * doing.
10262 * The order is laid out in such a way that parameters in the front of the table
10263 * may be used to override the parameters later in the table. Examples are:
10264 * 1. META_MODE should precede AEC/AWB/AF MODE
10265 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10266 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10267 * 4. Any mode should precede it's corresponding settings
10268 */
10269 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10270 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10272 rc = BAD_VALUE;
10273 }
10274 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10275 if (rc != NO_ERROR) {
10276 LOGE("extractSceneMode failed");
10277 }
10278 }
10279
10280 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10281 uint8_t fwk_aeMode =
10282 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10283 uint8_t aeMode;
10284 int32_t redeye;
10285
10286 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10287 aeMode = CAM_AE_MODE_OFF;
10288 } else {
10289 aeMode = CAM_AE_MODE_ON;
10290 }
10291 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10292 redeye = 1;
10293 } else {
10294 redeye = 0;
10295 }
10296
10297 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10298 fwk_aeMode);
10299 if (NAME_NOT_FOUND != val) {
10300 int32_t flashMode = (int32_t)val;
10301 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10302 }
10303
10304 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10306 rc = BAD_VALUE;
10307 }
10308 }
10309
10310 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10311 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10312 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10313 fwk_whiteLevel);
10314 if (NAME_NOT_FOUND != val) {
10315 uint8_t whiteLevel = (uint8_t)val;
10316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10317 rc = BAD_VALUE;
10318 }
10319 }
10320 }
10321
10322 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10323 uint8_t fwk_cacMode =
10324 frame_settings.find(
10325 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10326 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10327 fwk_cacMode);
10328 if (NAME_NOT_FOUND != val) {
10329 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10330 bool entryAvailable = FALSE;
10331 // Check whether Frameworks set CAC mode is supported in device or not
10332 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10333 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10334 entryAvailable = TRUE;
10335 break;
10336 }
10337 }
10338 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10339 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10340 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10341 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10342 if (entryAvailable == FALSE) {
10343 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10344 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10345 } else {
10346 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10347 // High is not supported and so set the FAST as spec say's underlying
10348 // device implementation can be the same for both modes.
10349 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10350 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10351 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10352 // in order to avoid the fps drop due to high quality
10353 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10354 } else {
10355 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10356 }
10357 }
10358 }
10359 LOGD("Final cacMode is %d", cacMode);
10360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10361 rc = BAD_VALUE;
10362 }
10363 } else {
10364 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10365 }
10366 }
10367
10368 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10369 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10370 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10371 fwk_focusMode);
10372 if (NAME_NOT_FOUND != val) {
10373 uint8_t focusMode = (uint8_t)val;
10374 LOGD("set focus mode %d", focusMode);
10375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10376 rc = BAD_VALUE;
10377 }
10378 }
10379 }
10380
10381 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10382 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10384 focalDistance)) {
10385 rc = BAD_VALUE;
10386 }
10387 }
10388
10389 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10390 uint8_t fwk_antibandingMode =
10391 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10392 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10393 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10394 if (NAME_NOT_FOUND != val) {
10395 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010396 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10397 if (m60HzZone) {
10398 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10399 } else {
10400 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10401 }
10402 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010403 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10404 hal_antibandingMode)) {
10405 rc = BAD_VALUE;
10406 }
10407 }
10408 }
10409
10410 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10411 int32_t expCompensation = frame_settings.find(
10412 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10413 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10414 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10415 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10416 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010417 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10419 expCompensation)) {
10420 rc = BAD_VALUE;
10421 }
10422 }
10423
10424 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10425 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10427 rc = BAD_VALUE;
10428 }
10429 }
10430 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10431 rc = setHalFpsRange(frame_settings, hal_metadata);
10432 if (rc != NO_ERROR) {
10433 LOGE("setHalFpsRange failed");
10434 }
10435 }
10436
10437 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10438 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10440 rc = BAD_VALUE;
10441 }
10442 }
10443
10444 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10445 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10446 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10447 fwk_effectMode);
10448 if (NAME_NOT_FOUND != val) {
10449 uint8_t effectMode = (uint8_t)val;
10450 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10451 rc = BAD_VALUE;
10452 }
10453 }
10454 }
10455
10456 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10457 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10459 colorCorrectMode)) {
10460 rc = BAD_VALUE;
10461 }
10462 }
10463
10464 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10465 cam_color_correct_gains_t colorCorrectGains;
10466 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10467 colorCorrectGains.gains[i] =
10468 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10469 }
10470 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10471 colorCorrectGains)) {
10472 rc = BAD_VALUE;
10473 }
10474 }
10475
10476 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10477 cam_color_correct_matrix_t colorCorrectTransform;
10478 cam_rational_type_t transform_elem;
10479 size_t num = 0;
10480 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10481 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10482 transform_elem.numerator =
10483 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10484 transform_elem.denominator =
10485 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10486 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10487 num++;
10488 }
10489 }
10490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10491 colorCorrectTransform)) {
10492 rc = BAD_VALUE;
10493 }
10494 }
10495
10496 cam_trigger_t aecTrigger;
10497 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10498 aecTrigger.trigger_id = -1;
10499 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10500 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10501 aecTrigger.trigger =
10502 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10503 aecTrigger.trigger_id =
10504 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10506 aecTrigger)) {
10507 rc = BAD_VALUE;
10508 }
10509 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10510 aecTrigger.trigger, aecTrigger.trigger_id);
10511 }
10512
10513 /*af_trigger must come with a trigger id*/
10514 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10515 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10516 cam_trigger_t af_trigger;
10517 af_trigger.trigger =
10518 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10519 af_trigger.trigger_id =
10520 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10522 rc = BAD_VALUE;
10523 }
10524 LOGD("AfTrigger: %d AfTriggerID: %d",
10525 af_trigger.trigger, af_trigger.trigger_id);
10526 }
10527
10528 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10529 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10531 rc = BAD_VALUE;
10532 }
10533 }
10534 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10535 cam_edge_application_t edge_application;
10536 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10537 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10538 edge_application.sharpness = 0;
10539 } else {
10540 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10541 }
10542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10543 rc = BAD_VALUE;
10544 }
10545 }
10546
10547 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10548 int32_t respectFlashMode = 1;
10549 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10550 uint8_t fwk_aeMode =
10551 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10552 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10553 respectFlashMode = 0;
10554 LOGH("AE Mode controls flash, ignore android.flash.mode");
10555 }
10556 }
10557 if (respectFlashMode) {
10558 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10559 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10560 LOGH("flash mode after mapping %d", val);
10561 // To check: CAM_INTF_META_FLASH_MODE usage
10562 if (NAME_NOT_FOUND != val) {
10563 uint8_t flashMode = (uint8_t)val;
10564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10565 rc = BAD_VALUE;
10566 }
10567 }
10568 }
10569 }
10570
10571 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10572 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10574 rc = BAD_VALUE;
10575 }
10576 }
10577
10578 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10579 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10581 flashFiringTime)) {
10582 rc = BAD_VALUE;
10583 }
10584 }
10585
10586 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10587 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10589 hotPixelMode)) {
10590 rc = BAD_VALUE;
10591 }
10592 }
10593
10594 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10595 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10597 lensAperture)) {
10598 rc = BAD_VALUE;
10599 }
10600 }
10601
10602 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10603 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10604 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10605 filterDensity)) {
10606 rc = BAD_VALUE;
10607 }
10608 }
10609
10610 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10611 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10612 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10613 focalLength)) {
10614 rc = BAD_VALUE;
10615 }
10616 }
10617
10618 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10619 uint8_t optStabMode =
10620 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10622 optStabMode)) {
10623 rc = BAD_VALUE;
10624 }
10625 }
10626
10627 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10628 uint8_t videoStabMode =
10629 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10630 LOGD("videoStabMode from APP = %d", videoStabMode);
10631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10632 videoStabMode)) {
10633 rc = BAD_VALUE;
10634 }
10635 }
10636
10637
10638 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10639 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10641 noiseRedMode)) {
10642 rc = BAD_VALUE;
10643 }
10644 }
10645
10646 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10647 float reprocessEffectiveExposureFactor =
10648 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10649 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10650 reprocessEffectiveExposureFactor)) {
10651 rc = BAD_VALUE;
10652 }
10653 }
10654
10655 cam_crop_region_t scalerCropRegion;
10656 bool scalerCropSet = false;
10657 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10658 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10659 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10660 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10661 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10662
10663 // Map coordinate system from active array to sensor output.
10664 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10665 scalerCropRegion.width, scalerCropRegion.height);
10666
10667 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10668 scalerCropRegion)) {
10669 rc = BAD_VALUE;
10670 }
10671 scalerCropSet = true;
10672 }
10673
10674 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10675 int64_t sensorExpTime =
10676 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10677 LOGD("setting sensorExpTime %lld", sensorExpTime);
10678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10679 sensorExpTime)) {
10680 rc = BAD_VALUE;
10681 }
10682 }
10683
10684 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10685 int64_t sensorFrameDuration =
10686 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10687 int64_t minFrameDuration = getMinFrameDuration(request);
10688 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10689 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10690 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10691 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10693 sensorFrameDuration)) {
10694 rc = BAD_VALUE;
10695 }
10696 }
10697
10698 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10699 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10700 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10701 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10702 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10703 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10704 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10706 sensorSensitivity)) {
10707 rc = BAD_VALUE;
10708 }
10709 }
10710
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010711#ifndef USE_HAL_3_3
10712 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10713 int32_t ispSensitivity =
10714 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10715 if (ispSensitivity <
10716 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10717 ispSensitivity =
10718 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10719 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10720 }
10721 if (ispSensitivity >
10722 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10723 ispSensitivity =
10724 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10725 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10726 }
10727 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10728 ispSensitivity)) {
10729 rc = BAD_VALUE;
10730 }
10731 }
10732#endif
10733
Thierry Strudel3d639192016-09-09 11:52:26 -070010734 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10735 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10736 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10737 rc = BAD_VALUE;
10738 }
10739 }
10740
10741 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10742 uint8_t fwk_facedetectMode =
10743 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10744
10745 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10746 fwk_facedetectMode);
10747
10748 if (NAME_NOT_FOUND != val) {
10749 uint8_t facedetectMode = (uint8_t)val;
10750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10751 facedetectMode)) {
10752 rc = BAD_VALUE;
10753 }
10754 }
10755 }
10756
10757 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10758 uint8_t histogramMode =
10759 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10760 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10761 histogramMode)) {
10762 rc = BAD_VALUE;
10763 }
10764 }
10765
10766 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10767 uint8_t sharpnessMapMode =
10768 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10770 sharpnessMapMode)) {
10771 rc = BAD_VALUE;
10772 }
10773 }
10774
10775 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10776 uint8_t tonemapMode =
10777 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10778 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10779 rc = BAD_VALUE;
10780 }
10781 }
10782 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10783 /*All tonemap channels will have the same number of points*/
10784 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10785 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10786 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10787 cam_rgb_tonemap_curves tonemapCurves;
10788 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10789 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10790 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10791 tonemapCurves.tonemap_points_cnt,
10792 CAM_MAX_TONEMAP_CURVE_SIZE);
10793 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10794 }
10795
10796 /* ch0 = G*/
10797 size_t point = 0;
10798 cam_tonemap_curve_t tonemapCurveGreen;
10799 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10800 for (size_t j = 0; j < 2; j++) {
10801 tonemapCurveGreen.tonemap_points[i][j] =
10802 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10803 point++;
10804 }
10805 }
10806 tonemapCurves.curves[0] = tonemapCurveGreen;
10807
10808 /* ch 1 = B */
10809 point = 0;
10810 cam_tonemap_curve_t tonemapCurveBlue;
10811 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10812 for (size_t j = 0; j < 2; j++) {
10813 tonemapCurveBlue.tonemap_points[i][j] =
10814 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10815 point++;
10816 }
10817 }
10818 tonemapCurves.curves[1] = tonemapCurveBlue;
10819
10820 /* ch 2 = R */
10821 point = 0;
10822 cam_tonemap_curve_t tonemapCurveRed;
10823 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10824 for (size_t j = 0; j < 2; j++) {
10825 tonemapCurveRed.tonemap_points[i][j] =
10826 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10827 point++;
10828 }
10829 }
10830 tonemapCurves.curves[2] = tonemapCurveRed;
10831
10832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10833 tonemapCurves)) {
10834 rc = BAD_VALUE;
10835 }
10836 }
10837
10838 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10839 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10841 captureIntent)) {
10842 rc = BAD_VALUE;
10843 }
10844 }
10845
10846 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10847 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10849 blackLevelLock)) {
10850 rc = BAD_VALUE;
10851 }
10852 }
10853
10854 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10855 uint8_t lensShadingMapMode =
10856 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10858 lensShadingMapMode)) {
10859 rc = BAD_VALUE;
10860 }
10861 }
10862
10863 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10864 cam_area_t roi;
10865 bool reset = true;
10866 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10867
10868 // Map coordinate system from active array to sensor output.
10869 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10870 roi.rect.height);
10871
10872 if (scalerCropSet) {
10873 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10874 }
10875 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10876 rc = BAD_VALUE;
10877 }
10878 }
10879
10880 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10881 cam_area_t roi;
10882 bool reset = true;
10883 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10884
10885 // Map coordinate system from active array to sensor output.
10886 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10887 roi.rect.height);
10888
10889 if (scalerCropSet) {
10890 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10891 }
10892 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10893 rc = BAD_VALUE;
10894 }
10895 }
10896
10897 // CDS for non-HFR non-video mode
10898 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10899 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10900 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10901 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10902 LOGE("Invalid CDS mode %d!", *fwk_cds);
10903 } else {
10904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10905 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10906 rc = BAD_VALUE;
10907 }
10908 }
10909 }
10910
Thierry Strudel04e026f2016-10-10 11:27:36 -070010911 // Video HDR
10912 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10913 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10914 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10915 rc = setVideoHdrMode(mParameters, vhdr);
10916 if (rc != NO_ERROR) {
10917 LOGE("setVideoHDR is failed");
10918 }
10919 }
10920
10921 //IR
10922 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10923 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10924 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10925 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10926 LOGE("Invalid IR mode %d!", fwk_ir);
10927 } else {
10928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10929 CAM_INTF_META_IR_MODE, fwk_ir)) {
10930 rc = BAD_VALUE;
10931 }
10932 }
10933 }
10934
Thierry Strudel269c81a2016-10-12 12:13:59 -070010935 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10936 float aec_speed;
10937 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10938 LOGD("AEC Speed :%f", aec_speed);
10939 if ( aec_speed < 0 ) {
10940 LOGE("Invalid AEC mode %f!", aec_speed);
10941 } else {
10942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10943 aec_speed)) {
10944 rc = BAD_VALUE;
10945 }
10946 }
10947 }
10948
10949 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10950 float awb_speed;
10951 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10952 LOGD("AWB Speed :%f", awb_speed);
10953 if ( awb_speed < 0 ) {
10954 LOGE("Invalid AWB mode %f!", awb_speed);
10955 } else {
10956 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10957 awb_speed)) {
10958 rc = BAD_VALUE;
10959 }
10960 }
10961 }
10962
Thierry Strudel3d639192016-09-09 11:52:26 -070010963 // TNR
10964 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10965 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10966 uint8_t b_TnrRequested = 0;
10967 cam_denoise_param_t tnr;
10968 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10969 tnr.process_plates =
10970 (cam_denoise_process_type_t)frame_settings.find(
10971 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10972 b_TnrRequested = tnr.denoise_enable;
10973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10974 rc = BAD_VALUE;
10975 }
10976 }
10977
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010978 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10979 int32_t* exposure_metering_mode =
10980 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10982 *exposure_metering_mode)) {
10983 rc = BAD_VALUE;
10984 }
10985 }
10986
Thierry Strudel3d639192016-09-09 11:52:26 -070010987 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10988 int32_t fwk_testPatternMode =
10989 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10990 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10991 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10992
10993 if (NAME_NOT_FOUND != testPatternMode) {
10994 cam_test_pattern_data_t testPatternData;
10995 memset(&testPatternData, 0, sizeof(testPatternData));
10996 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10997 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10998 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10999 int32_t *fwk_testPatternData =
11000 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11001 testPatternData.r = fwk_testPatternData[0];
11002 testPatternData.b = fwk_testPatternData[3];
11003 switch (gCamCapability[mCameraId]->color_arrangement) {
11004 case CAM_FILTER_ARRANGEMENT_RGGB:
11005 case CAM_FILTER_ARRANGEMENT_GRBG:
11006 testPatternData.gr = fwk_testPatternData[1];
11007 testPatternData.gb = fwk_testPatternData[2];
11008 break;
11009 case CAM_FILTER_ARRANGEMENT_GBRG:
11010 case CAM_FILTER_ARRANGEMENT_BGGR:
11011 testPatternData.gr = fwk_testPatternData[2];
11012 testPatternData.gb = fwk_testPatternData[1];
11013 break;
11014 default:
11015 LOGE("color arrangement %d is not supported",
11016 gCamCapability[mCameraId]->color_arrangement);
11017 break;
11018 }
11019 }
11020 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11021 testPatternData)) {
11022 rc = BAD_VALUE;
11023 }
11024 } else {
11025 LOGE("Invalid framework sensor test pattern mode %d",
11026 fwk_testPatternMode);
11027 }
11028 }
11029
11030 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11031 size_t count = 0;
11032 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11033 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11034 gps_coords.data.d, gps_coords.count, count);
11035 if (gps_coords.count != count) {
11036 rc = BAD_VALUE;
11037 }
11038 }
11039
11040 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11041 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11042 size_t count = 0;
11043 const char *gps_methods_src = (const char *)
11044 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11045 memset(gps_methods, '\0', sizeof(gps_methods));
11046 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11047 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11048 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11049 if (GPS_PROCESSING_METHOD_SIZE != count) {
11050 rc = BAD_VALUE;
11051 }
11052 }
11053
11054 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11055 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11057 gps_timestamp)) {
11058 rc = BAD_VALUE;
11059 }
11060 }
11061
11062 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11063 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11064 cam_rotation_info_t rotation_info;
11065 if (orientation == 0) {
11066 rotation_info.rotation = ROTATE_0;
11067 } else if (orientation == 90) {
11068 rotation_info.rotation = ROTATE_90;
11069 } else if (orientation == 180) {
11070 rotation_info.rotation = ROTATE_180;
11071 } else if (orientation == 270) {
11072 rotation_info.rotation = ROTATE_270;
11073 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011074 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011075 rotation_info.streamId = snapshotStreamId;
11076 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11078 rc = BAD_VALUE;
11079 }
11080 }
11081
11082 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11083 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11085 rc = BAD_VALUE;
11086 }
11087 }
11088
11089 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11090 uint32_t thumb_quality = (uint32_t)
11091 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11093 thumb_quality)) {
11094 rc = BAD_VALUE;
11095 }
11096 }
11097
11098 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11099 cam_dimension_t dim;
11100 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11101 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11103 rc = BAD_VALUE;
11104 }
11105 }
11106
11107 // Internal metadata
11108 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11109 size_t count = 0;
11110 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11111 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11112 privatedata.data.i32, privatedata.count, count);
11113 if (privatedata.count != count) {
11114 rc = BAD_VALUE;
11115 }
11116 }
11117
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011118 // ISO/Exposure Priority
11119 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11120 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11121 cam_priority_mode_t mode =
11122 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11123 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11124 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11125 use_iso_exp_pty.previewOnly = FALSE;
11126 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11127 use_iso_exp_pty.value = *ptr;
11128
11129 if(CAM_ISO_PRIORITY == mode) {
11130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11131 use_iso_exp_pty)) {
11132 rc = BAD_VALUE;
11133 }
11134 }
11135 else {
11136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11137 use_iso_exp_pty)) {
11138 rc = BAD_VALUE;
11139 }
11140 }
11141 }
11142 }
11143
11144 // Saturation
11145 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11146 int32_t* use_saturation =
11147 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11149 rc = BAD_VALUE;
11150 }
11151 }
11152
Thierry Strudel3d639192016-09-09 11:52:26 -070011153 // EV step
11154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11155 gCamCapability[mCameraId]->exp_compensation_step)) {
11156 rc = BAD_VALUE;
11157 }
11158
11159 // CDS info
11160 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11161 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11162 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11163
11164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11165 CAM_INTF_META_CDS_DATA, *cdsData)) {
11166 rc = BAD_VALUE;
11167 }
11168 }
11169
Shuzhen Wang19463d72016-03-08 11:09:52 -080011170 // Hybrid AE
11171 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11172 uint8_t *hybrid_ae = (uint8_t *)
11173 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11174
11175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11176 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11177 rc = BAD_VALUE;
11178 }
11179 }
11180
Thierry Strudel3d639192016-09-09 11:52:26 -070011181 return rc;
11182}
11183
11184/*===========================================================================
11185 * FUNCTION : captureResultCb
11186 *
11187 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11188 *
11189 * PARAMETERS :
11190 * @frame : frame information from mm-camera-interface
11191 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11192 * @userdata: userdata
11193 *
11194 * RETURN : NONE
11195 *==========================================================================*/
11196void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11197 camera3_stream_buffer_t *buffer,
11198 uint32_t frame_number, bool isInputBuffer, void *userdata)
11199{
11200 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11201 if (hw == NULL) {
11202 LOGE("Invalid hw %p", hw);
11203 return;
11204 }
11205
11206 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11207 return;
11208}
11209
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011210/*===========================================================================
11211 * FUNCTION : setBufferErrorStatus
11212 *
11213 * DESCRIPTION: Callback handler for channels to report any buffer errors
11214 *
11215 * PARAMETERS :
11216 * @ch : Channel on which buffer error is reported from
11217 * @frame_number : frame number on which buffer error is reported on
11218 * @buffer_status : buffer error status
11219 * @userdata: userdata
11220 *
11221 * RETURN : NONE
11222 *==========================================================================*/
11223void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11224 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11225{
11226 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11227 if (hw == NULL) {
11228 LOGE("Invalid hw %p", hw);
11229 return;
11230 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011231
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011232 hw->setBufferErrorStatus(ch, frame_number, err);
11233 return;
11234}
11235
11236void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11237 uint32_t frameNumber, camera3_buffer_status_t err)
11238{
11239 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11240 pthread_mutex_lock(&mMutex);
11241
11242 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11243 if (req.frame_number != frameNumber)
11244 continue;
11245 for (auto& k : req.mPendingBufferList) {
11246 if(k.stream->priv == ch) {
11247 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11248 }
11249 }
11250 }
11251
11252 pthread_mutex_unlock(&mMutex);
11253 return;
11254}
Thierry Strudel3d639192016-09-09 11:52:26 -070011255/*===========================================================================
11256 * FUNCTION : initialize
11257 *
11258 * DESCRIPTION: Pass framework callback pointers to HAL
11259 *
11260 * PARAMETERS :
11261 *
11262 *
11263 * RETURN : Success : 0
11264 * Failure: -ENODEV
11265 *==========================================================================*/
11266
11267int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11268 const camera3_callback_ops_t *callback_ops)
11269{
11270 LOGD("E");
11271 QCamera3HardwareInterface *hw =
11272 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11273 if (!hw) {
11274 LOGE("NULL camera device");
11275 return -ENODEV;
11276 }
11277
11278 int rc = hw->initialize(callback_ops);
11279 LOGD("X");
11280 return rc;
11281}
11282
11283/*===========================================================================
11284 * FUNCTION : configure_streams
11285 *
11286 * DESCRIPTION:
11287 *
11288 * PARAMETERS :
11289 *
11290 *
11291 * RETURN : Success: 0
11292 * Failure: -EINVAL (if stream configuration is invalid)
11293 * -ENODEV (fatal error)
11294 *==========================================================================*/
11295
11296int QCamera3HardwareInterface::configure_streams(
11297 const struct camera3_device *device,
11298 camera3_stream_configuration_t *stream_list)
11299{
11300 LOGD("E");
11301 QCamera3HardwareInterface *hw =
11302 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11303 if (!hw) {
11304 LOGE("NULL camera device");
11305 return -ENODEV;
11306 }
11307 int rc = hw->configureStreams(stream_list);
11308 LOGD("X");
11309 return rc;
11310}
11311
11312/*===========================================================================
11313 * FUNCTION : construct_default_request_settings
11314 *
11315 * DESCRIPTION: Configure a settings buffer to meet the required use case
11316 *
11317 * PARAMETERS :
11318 *
11319 *
11320 * RETURN : Success: Return valid metadata
11321 * Failure: Return NULL
11322 *==========================================================================*/
11323const camera_metadata_t* QCamera3HardwareInterface::
11324 construct_default_request_settings(const struct camera3_device *device,
11325 int type)
11326{
11327
11328 LOGD("E");
11329 camera_metadata_t* fwk_metadata = NULL;
11330 QCamera3HardwareInterface *hw =
11331 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11332 if (!hw) {
11333 LOGE("NULL camera device");
11334 return NULL;
11335 }
11336
11337 fwk_metadata = hw->translateCapabilityToMetadata(type);
11338
11339 LOGD("X");
11340 return fwk_metadata;
11341}
11342
11343/*===========================================================================
11344 * FUNCTION : process_capture_request
11345 *
11346 * DESCRIPTION:
11347 *
11348 * PARAMETERS :
11349 *
11350 *
11351 * RETURN :
11352 *==========================================================================*/
11353int QCamera3HardwareInterface::process_capture_request(
11354 const struct camera3_device *device,
11355 camera3_capture_request_t *request)
11356{
11357 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011358 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011359 QCamera3HardwareInterface *hw =
11360 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11361 if (!hw) {
11362 LOGE("NULL camera device");
11363 return -EINVAL;
11364 }
11365
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011366 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011367 LOGD("X");
11368 return rc;
11369}
11370
11371/*===========================================================================
11372 * FUNCTION : dump
11373 *
11374 * DESCRIPTION:
11375 *
11376 * PARAMETERS :
11377 *
11378 *
11379 * RETURN :
11380 *==========================================================================*/
11381
11382void QCamera3HardwareInterface::dump(
11383 const struct camera3_device *device, int fd)
11384{
11385 /* Log level property is read when "adb shell dumpsys media.camera" is
11386 called so that the log level can be controlled without restarting
11387 the media server */
11388 getLogLevel();
11389
11390 LOGD("E");
11391 QCamera3HardwareInterface *hw =
11392 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11393 if (!hw) {
11394 LOGE("NULL camera device");
11395 return;
11396 }
11397
11398 hw->dump(fd);
11399 LOGD("X");
11400 return;
11401}
11402
11403/*===========================================================================
11404 * FUNCTION : flush
11405 *
11406 * DESCRIPTION:
11407 *
11408 * PARAMETERS :
11409 *
11410 *
11411 * RETURN :
11412 *==========================================================================*/
11413
11414int QCamera3HardwareInterface::flush(
11415 const struct camera3_device *device)
11416{
11417 int rc;
11418 LOGD("E");
11419 QCamera3HardwareInterface *hw =
11420 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11421 if (!hw) {
11422 LOGE("NULL camera device");
11423 return -EINVAL;
11424 }
11425
11426 pthread_mutex_lock(&hw->mMutex);
11427 // Validate current state
11428 switch (hw->mState) {
11429 case STARTED:
11430 /* valid state */
11431 break;
11432
11433 case ERROR:
11434 pthread_mutex_unlock(&hw->mMutex);
11435 hw->handleCameraDeviceError();
11436 return -ENODEV;
11437
11438 default:
11439 LOGI("Flush returned during state %d", hw->mState);
11440 pthread_mutex_unlock(&hw->mMutex);
11441 return 0;
11442 }
11443 pthread_mutex_unlock(&hw->mMutex);
11444
11445 rc = hw->flush(true /* restart channels */ );
11446 LOGD("X");
11447 return rc;
11448}
11449
11450/*===========================================================================
11451 * FUNCTION : close_camera_device
11452 *
11453 * DESCRIPTION:
11454 *
11455 * PARAMETERS :
11456 *
11457 *
11458 * RETURN :
11459 *==========================================================================*/
11460int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11461{
11462 int ret = NO_ERROR;
11463 QCamera3HardwareInterface *hw =
11464 reinterpret_cast<QCamera3HardwareInterface *>(
11465 reinterpret_cast<camera3_device_t *>(device)->priv);
11466 if (!hw) {
11467 LOGE("NULL camera device");
11468 return BAD_VALUE;
11469 }
11470
11471 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11472 delete hw;
11473 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011474 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011475 return ret;
11476}
11477
11478/*===========================================================================
11479 * FUNCTION : getWaveletDenoiseProcessPlate
11480 *
11481 * DESCRIPTION: query wavelet denoise process plate
11482 *
11483 * PARAMETERS : None
11484 *
11485 * RETURN : WNR prcocess plate value
11486 *==========================================================================*/
11487cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11488{
11489 char prop[PROPERTY_VALUE_MAX];
11490 memset(prop, 0, sizeof(prop));
11491 property_get("persist.denoise.process.plates", prop, "0");
11492 int processPlate = atoi(prop);
11493 switch(processPlate) {
11494 case 0:
11495 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11496 case 1:
11497 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11498 case 2:
11499 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11500 case 3:
11501 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11502 default:
11503 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11504 }
11505}
11506
11507
11508/*===========================================================================
11509 * FUNCTION : getTemporalDenoiseProcessPlate
11510 *
11511 * DESCRIPTION: query temporal denoise process plate
11512 *
11513 * PARAMETERS : None
11514 *
11515 * RETURN : TNR prcocess plate value
11516 *==========================================================================*/
11517cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11518{
11519 char prop[PROPERTY_VALUE_MAX];
11520 memset(prop, 0, sizeof(prop));
11521 property_get("persist.tnr.process.plates", prop, "0");
11522 int processPlate = atoi(prop);
11523 switch(processPlate) {
11524 case 0:
11525 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11526 case 1:
11527 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11528 case 2:
11529 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11530 case 3:
11531 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11532 default:
11533 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11534 }
11535}
11536
11537
11538/*===========================================================================
11539 * FUNCTION : extractSceneMode
11540 *
11541 * DESCRIPTION: Extract scene mode from frameworks set metadata
11542 *
11543 * PARAMETERS :
11544 * @frame_settings: CameraMetadata reference
11545 * @metaMode: ANDROID_CONTORL_MODE
11546 * @hal_metadata: hal metadata structure
11547 *
11548 * RETURN : None
11549 *==========================================================================*/
11550int32_t QCamera3HardwareInterface::extractSceneMode(
11551 const CameraMetadata &frame_settings, uint8_t metaMode,
11552 metadata_buffer_t *hal_metadata)
11553{
11554 int32_t rc = NO_ERROR;
11555
11556 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11557 camera_metadata_ro_entry entry =
11558 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11559 if (0 == entry.count)
11560 return rc;
11561
11562 uint8_t fwk_sceneMode = entry.data.u8[0];
11563
11564 int val = lookupHalName(SCENE_MODES_MAP,
11565 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11566 fwk_sceneMode);
11567 if (NAME_NOT_FOUND != val) {
11568 uint8_t sceneMode = (uint8_t)val;
11569 LOGD("sceneMode: %d", sceneMode);
11570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11571 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11572 rc = BAD_VALUE;
11573 }
11574 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011575
11576 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11577 cam_hdr_param_t hdr_params;
11578 hdr_params.hdr_enable = 1;
11579 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11580 hdr_params.hdr_need_1x = false;
11581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11582 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11583 rc = BAD_VALUE;
11584 }
11585 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011586 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11587 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11588 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11589 LOGD("sceneMode: %d", sceneMode);
11590 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11591 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11592 rc = BAD_VALUE;
11593 }
11594 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011595
11596 if (mForceHdrSnapshot) {
11597 cam_hdr_param_t hdr_params;
11598 hdr_params.hdr_enable = 1;
11599 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11600 hdr_params.hdr_need_1x = false;
11601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11602 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11603 rc = BAD_VALUE;
11604 }
11605 }
11606
Thierry Strudel3d639192016-09-09 11:52:26 -070011607 return rc;
11608}
11609
11610/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011611 * FUNCTION : setVideoHdrMode
11612 *
11613 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11614 *
11615 * PARAMETERS :
11616 * @hal_metadata: hal metadata structure
11617 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11618 *
11619 * RETURN : None
11620 *==========================================================================*/
11621int32_t QCamera3HardwareInterface::setVideoHdrMode(
11622 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11623{
11624 int32_t rc = NO_ERROR;
11625 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11626 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11627 rc = BAD_VALUE;
11628 } else {
11629 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11630 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11631 LOGD("Setting HDR mode Off");
11632 vhdr_type = CAM_SENSOR_HDR_OFF;
11633 } else {
11634 char video_hdr_prop[PROPERTY_VALUE_MAX];
11635 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11636 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11637 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11638 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11639 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11640 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11641 LOGD("Setting HDR mode In Sensor");
11642 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11643 }
11644 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11645 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11646 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11647 LOGD("Setting HDR mode Zigzag");
11648 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11649 }
11650 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11651 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11652 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11653 LOGD("Setting HDR mode Staggered");
11654 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11655 }
11656 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11657 LOGD("HDR mode not supported");
11658 rc = BAD_VALUE;
11659 }
11660 }
11661 if(rc == NO_ERROR) {
11662 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11663 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11664 rc = BAD_VALUE;
11665 }
11666 }
11667 }
11668 return rc;
11669}
11670
11671/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011672 * FUNCTION : needRotationReprocess
11673 *
11674 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11675 *
11676 * PARAMETERS : none
11677 *
11678 * RETURN : true: needed
11679 * false: no need
11680 *==========================================================================*/
11681bool QCamera3HardwareInterface::needRotationReprocess()
11682{
11683 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11684 // current rotation is not zero, and pp has the capability to process rotation
11685 LOGH("need do reprocess for rotation");
11686 return true;
11687 }
11688
11689 return false;
11690}
11691
11692/*===========================================================================
11693 * FUNCTION : needReprocess
11694 *
11695 * DESCRIPTION: if reprocess in needed
11696 *
11697 * PARAMETERS : none
11698 *
11699 * RETURN : true: needed
11700 * false: no need
11701 *==========================================================================*/
11702bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11703{
11704 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11705 // TODO: add for ZSL HDR later
11706 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11707 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11708 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11709 return true;
11710 } else {
11711 LOGH("already post processed frame");
11712 return false;
11713 }
11714 }
11715 return needRotationReprocess();
11716}
11717
11718/*===========================================================================
11719 * FUNCTION : needJpegExifRotation
11720 *
11721 * DESCRIPTION: if rotation from jpeg is needed
11722 *
11723 * PARAMETERS : none
11724 *
11725 * RETURN : true: needed
11726 * false: no need
11727 *==========================================================================*/
11728bool QCamera3HardwareInterface::needJpegExifRotation()
11729{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011730 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011731 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11732 LOGD("Need use Jpeg EXIF Rotation");
11733 return true;
11734 }
11735 return false;
11736}
11737
11738/*===========================================================================
11739 * FUNCTION : addOfflineReprocChannel
11740 *
11741 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11742 * coming from input channel
11743 *
11744 * PARAMETERS :
11745 * @config : reprocess configuration
11746 * @inputChHandle : pointer to the input (source) channel
11747 *
11748 *
11749 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11750 *==========================================================================*/
11751QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11752 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11753{
11754 int32_t rc = NO_ERROR;
11755 QCamera3ReprocessChannel *pChannel = NULL;
11756
11757 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011758 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11759 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011760 if (NULL == pChannel) {
11761 LOGE("no mem for reprocess channel");
11762 return NULL;
11763 }
11764
11765 rc = pChannel->initialize(IS_TYPE_NONE);
11766 if (rc != NO_ERROR) {
11767 LOGE("init reprocess channel failed, ret = %d", rc);
11768 delete pChannel;
11769 return NULL;
11770 }
11771
11772 // pp feature config
11773 cam_pp_feature_config_t pp_config;
11774 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11775
11776 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11777 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11778 & CAM_QCOM_FEATURE_DSDN) {
11779 //Use CPP CDS incase h/w supports it.
11780 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11781 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11782 }
11783 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11784 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11785 }
11786
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011787 if (config.hdr_param.hdr_enable) {
11788 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11789 pp_config.hdr_param = config.hdr_param;
11790 }
11791
11792 if (mForceHdrSnapshot) {
11793 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11794 pp_config.hdr_param.hdr_enable = 1;
11795 pp_config.hdr_param.hdr_need_1x = 0;
11796 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11797 }
11798
Thierry Strudel3d639192016-09-09 11:52:26 -070011799 rc = pChannel->addReprocStreamsFromSource(pp_config,
11800 config,
11801 IS_TYPE_NONE,
11802 mMetadataChannel);
11803
11804 if (rc != NO_ERROR) {
11805 delete pChannel;
11806 return NULL;
11807 }
11808 return pChannel;
11809}
11810
11811/*===========================================================================
11812 * FUNCTION : getMobicatMask
11813 *
11814 * DESCRIPTION: returns mobicat mask
11815 *
11816 * PARAMETERS : none
11817 *
11818 * RETURN : mobicat mask
11819 *
11820 *==========================================================================*/
11821uint8_t QCamera3HardwareInterface::getMobicatMask()
11822{
11823 return m_MobicatMask;
11824}
11825
11826/*===========================================================================
11827 * FUNCTION : setMobicat
11828 *
11829 * DESCRIPTION: set Mobicat on/off.
11830 *
11831 * PARAMETERS :
11832 * @params : none
11833 *
11834 * RETURN : int32_t type of status
11835 * NO_ERROR -- success
11836 * none-zero failure code
11837 *==========================================================================*/
11838int32_t QCamera3HardwareInterface::setMobicat()
11839{
11840 char value [PROPERTY_VALUE_MAX];
11841 property_get("persist.camera.mobicat", value, "0");
11842 int32_t ret = NO_ERROR;
11843 uint8_t enableMobi = (uint8_t)atoi(value);
11844
11845 if (enableMobi) {
11846 tune_cmd_t tune_cmd;
11847 tune_cmd.type = SET_RELOAD_CHROMATIX;
11848 tune_cmd.module = MODULE_ALL;
11849 tune_cmd.value = TRUE;
11850 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11851 CAM_INTF_PARM_SET_VFE_COMMAND,
11852 tune_cmd);
11853
11854 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11855 CAM_INTF_PARM_SET_PP_COMMAND,
11856 tune_cmd);
11857 }
11858 m_MobicatMask = enableMobi;
11859
11860 return ret;
11861}
11862
11863/*===========================================================================
11864* FUNCTION : getLogLevel
11865*
11866* DESCRIPTION: Reads the log level property into a variable
11867*
11868* PARAMETERS :
11869* None
11870*
11871* RETURN :
11872* None
11873*==========================================================================*/
11874void QCamera3HardwareInterface::getLogLevel()
11875{
11876 char prop[PROPERTY_VALUE_MAX];
11877 uint32_t globalLogLevel = 0;
11878
11879 property_get("persist.camera.hal.debug", prop, "0");
11880 int val = atoi(prop);
11881 if (0 <= val) {
11882 gCamHal3LogLevel = (uint32_t)val;
11883 }
11884
Thierry Strudel9ec39c62016-12-28 11:30:05 -080011885 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070011886 gKpiDebugLevel = atoi(prop);
11887
11888 property_get("persist.camera.global.debug", prop, "0");
11889 val = atoi(prop);
11890 if (0 <= val) {
11891 globalLogLevel = (uint32_t)val;
11892 }
11893
11894 /* Highest log level among hal.logs and global.logs is selected */
11895 if (gCamHal3LogLevel < globalLogLevel)
11896 gCamHal3LogLevel = globalLogLevel;
11897
11898 return;
11899}
11900
11901/*===========================================================================
11902 * FUNCTION : validateStreamRotations
11903 *
11904 * DESCRIPTION: Check if the rotations requested are supported
11905 *
11906 * PARAMETERS :
11907 * @stream_list : streams to be configured
11908 *
11909 * RETURN : NO_ERROR on success
11910 * -EINVAL on failure
11911 *
11912 *==========================================================================*/
11913int QCamera3HardwareInterface::validateStreamRotations(
11914 camera3_stream_configuration_t *streamList)
11915{
11916 int rc = NO_ERROR;
11917
11918 /*
11919 * Loop through all streams requested in configuration
11920 * Check if unsupported rotations have been requested on any of them
11921 */
11922 for (size_t j = 0; j < streamList->num_streams; j++){
11923 camera3_stream_t *newStream = streamList->streams[j];
11924
11925 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11926 bool isImplDef = (newStream->format ==
11927 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11928 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11929 isImplDef);
11930
11931 if (isRotated && (!isImplDef || isZsl)) {
11932 LOGE("Error: Unsupported rotation of %d requested for stream"
11933 "type:%d and stream format:%d",
11934 newStream->rotation, newStream->stream_type,
11935 newStream->format);
11936 rc = -EINVAL;
11937 break;
11938 }
11939 }
11940
11941 return rc;
11942}
11943
11944/*===========================================================================
11945* FUNCTION : getFlashInfo
11946*
11947* DESCRIPTION: Retrieve information about whether the device has a flash.
11948*
11949* PARAMETERS :
11950* @cameraId : Camera id to query
11951* @hasFlash : Boolean indicating whether there is a flash device
11952* associated with given camera
11953* @flashNode : If a flash device exists, this will be its device node.
11954*
11955* RETURN :
11956* None
11957*==========================================================================*/
11958void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11959 bool& hasFlash,
11960 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11961{
11962 cam_capability_t* camCapability = gCamCapability[cameraId];
11963 if (NULL == camCapability) {
11964 hasFlash = false;
11965 flashNode[0] = '\0';
11966 } else {
11967 hasFlash = camCapability->flash_available;
11968 strlcpy(flashNode,
11969 (char*)camCapability->flash_dev_name,
11970 QCAMERA_MAX_FILEPATH_LENGTH);
11971 }
11972}
11973
11974/*===========================================================================
11975* FUNCTION : getEepromVersionInfo
11976*
11977* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11978*
11979* PARAMETERS : None
11980*
11981* RETURN : string describing EEPROM version
11982* "\0" if no such info available
11983*==========================================================================*/
11984const char *QCamera3HardwareInterface::getEepromVersionInfo()
11985{
11986 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11987}
11988
11989/*===========================================================================
11990* FUNCTION : getLdafCalib
11991*
11992* DESCRIPTION: Retrieve Laser AF calibration data
11993*
11994* PARAMETERS : None
11995*
11996* RETURN : Two uint32_t describing laser AF calibration data
11997* NULL if none is available.
11998*==========================================================================*/
11999const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12000{
12001 if (mLdafCalibExist) {
12002 return &mLdafCalib[0];
12003 } else {
12004 return NULL;
12005 }
12006}
12007
12008/*===========================================================================
12009 * FUNCTION : dynamicUpdateMetaStreamInfo
12010 *
12011 * DESCRIPTION: This function:
12012 * (1) stops all the channels
12013 * (2) returns error on pending requests and buffers
12014 * (3) sends metastream_info in setparams
12015 * (4) starts all channels
12016 * This is useful when sensor has to be restarted to apply any
12017 * settings such as frame rate from a different sensor mode
12018 *
12019 * PARAMETERS : None
12020 *
12021 * RETURN : NO_ERROR on success
12022 * Error codes on failure
12023 *
12024 *==========================================================================*/
12025int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12026{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012027 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012028 int rc = NO_ERROR;
12029
12030 LOGD("E");
12031
12032 rc = stopAllChannels();
12033 if (rc < 0) {
12034 LOGE("stopAllChannels failed");
12035 return rc;
12036 }
12037
12038 rc = notifyErrorForPendingRequests();
12039 if (rc < 0) {
12040 LOGE("notifyErrorForPendingRequests failed");
12041 return rc;
12042 }
12043
12044 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12045 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12046 "Format:%d",
12047 mStreamConfigInfo.type[i],
12048 mStreamConfigInfo.stream_sizes[i].width,
12049 mStreamConfigInfo.stream_sizes[i].height,
12050 mStreamConfigInfo.postprocess_mask[i],
12051 mStreamConfigInfo.format[i]);
12052 }
12053
12054 /* Send meta stream info once again so that ISP can start */
12055 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12056 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12057 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12058 mParameters);
12059 if (rc < 0) {
12060 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12061 }
12062
12063 rc = startAllChannels();
12064 if (rc < 0) {
12065 LOGE("startAllChannels failed");
12066 return rc;
12067 }
12068
12069 LOGD("X");
12070 return rc;
12071}
12072
12073/*===========================================================================
12074 * FUNCTION : stopAllChannels
12075 *
12076 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12077 *
12078 * PARAMETERS : None
12079 *
12080 * RETURN : NO_ERROR on success
12081 * Error codes on failure
12082 *
12083 *==========================================================================*/
12084int32_t QCamera3HardwareInterface::stopAllChannels()
12085{
12086 int32_t rc = NO_ERROR;
12087
12088 LOGD("Stopping all channels");
12089 // Stop the Streams/Channels
12090 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12091 it != mStreamInfo.end(); it++) {
12092 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12093 if (channel) {
12094 channel->stop();
12095 }
12096 (*it)->status = INVALID;
12097 }
12098
12099 if (mSupportChannel) {
12100 mSupportChannel->stop();
12101 }
12102 if (mAnalysisChannel) {
12103 mAnalysisChannel->stop();
12104 }
12105 if (mRawDumpChannel) {
12106 mRawDumpChannel->stop();
12107 }
12108 if (mMetadataChannel) {
12109 /* If content of mStreamInfo is not 0, there is metadata stream */
12110 mMetadataChannel->stop();
12111 }
12112
12113 LOGD("All channels stopped");
12114 return rc;
12115}
12116
12117/*===========================================================================
12118 * FUNCTION : startAllChannels
12119 *
12120 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12121 *
12122 * PARAMETERS : None
12123 *
12124 * RETURN : NO_ERROR on success
12125 * Error codes on failure
12126 *
12127 *==========================================================================*/
12128int32_t QCamera3HardwareInterface::startAllChannels()
12129{
12130 int32_t rc = NO_ERROR;
12131
12132 LOGD("Start all channels ");
12133 // Start the Streams/Channels
12134 if (mMetadataChannel) {
12135 /* If content of mStreamInfo is not 0, there is metadata stream */
12136 rc = mMetadataChannel->start();
12137 if (rc < 0) {
12138 LOGE("META channel start failed");
12139 return rc;
12140 }
12141 }
12142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12143 it != mStreamInfo.end(); it++) {
12144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12145 if (channel) {
12146 rc = channel->start();
12147 if (rc < 0) {
12148 LOGE("channel start failed");
12149 return rc;
12150 }
12151 }
12152 }
12153 if (mAnalysisChannel) {
12154 mAnalysisChannel->start();
12155 }
12156 if (mSupportChannel) {
12157 rc = mSupportChannel->start();
12158 if (rc < 0) {
12159 LOGE("Support channel start failed");
12160 return rc;
12161 }
12162 }
12163 if (mRawDumpChannel) {
12164 rc = mRawDumpChannel->start();
12165 if (rc < 0) {
12166 LOGE("RAW dump channel start failed");
12167 return rc;
12168 }
12169 }
12170
12171 LOGD("All channels started");
12172 return rc;
12173}
12174
12175/*===========================================================================
12176 * FUNCTION : notifyErrorForPendingRequests
12177 *
12178 * DESCRIPTION: This function sends error for all the pending requests/buffers
12179 *
12180 * PARAMETERS : None
12181 *
12182 * RETURN : Error codes
12183 * NO_ERROR on success
12184 *
12185 *==========================================================================*/
12186int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12187{
12188 int32_t rc = NO_ERROR;
12189 unsigned int frameNum = 0;
12190 camera3_capture_result_t result;
12191 camera3_stream_buffer_t *pStream_Buf = NULL;
12192
12193 memset(&result, 0, sizeof(camera3_capture_result_t));
12194
12195 if (mPendingRequestsList.size() > 0) {
12196 pendingRequestIterator i = mPendingRequestsList.begin();
12197 frameNum = i->frame_number;
12198 } else {
12199 /* There might still be pending buffers even though there are
12200 no pending requests. Setting the frameNum to MAX so that
12201 all the buffers with smaller frame numbers are returned */
12202 frameNum = UINT_MAX;
12203 }
12204
12205 LOGH("Oldest frame num on mPendingRequestsList = %u",
12206 frameNum);
12207
12208 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12209 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12210
12211 if (req->frame_number < frameNum) {
12212 // Send Error notify to frameworks for each buffer for which
12213 // metadata buffer is already sent
12214 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12215 req->frame_number, req->mPendingBufferList.size());
12216
12217 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12218 if (NULL == pStream_Buf) {
12219 LOGE("No memory for pending buffers array");
12220 return NO_MEMORY;
12221 }
12222 memset(pStream_Buf, 0,
12223 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12224 result.result = NULL;
12225 result.frame_number = req->frame_number;
12226 result.num_output_buffers = req->mPendingBufferList.size();
12227 result.output_buffers = pStream_Buf;
12228
12229 size_t index = 0;
12230 for (auto info = req->mPendingBufferList.begin();
12231 info != req->mPendingBufferList.end(); ) {
12232
12233 camera3_notify_msg_t notify_msg;
12234 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12235 notify_msg.type = CAMERA3_MSG_ERROR;
12236 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12237 notify_msg.message.error.error_stream = info->stream;
12238 notify_msg.message.error.frame_number = req->frame_number;
12239 pStream_Buf[index].acquire_fence = -1;
12240 pStream_Buf[index].release_fence = -1;
12241 pStream_Buf[index].buffer = info->buffer;
12242 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12243 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012244 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012245 index++;
12246 // Remove buffer from list
12247 info = req->mPendingBufferList.erase(info);
12248 }
12249
12250 // Remove this request from Map
12251 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12252 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12253 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12254
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012255 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012256
12257 delete [] pStream_Buf;
12258 } else {
12259
12260 // Go through the pending requests info and send error request to framework
12261 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12262
12263 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12264
12265 // Send error notify to frameworks
12266 camera3_notify_msg_t notify_msg;
12267 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12268 notify_msg.type = CAMERA3_MSG_ERROR;
12269 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12270 notify_msg.message.error.error_stream = NULL;
12271 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012272 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012273
12274 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12275 if (NULL == pStream_Buf) {
12276 LOGE("No memory for pending buffers array");
12277 return NO_MEMORY;
12278 }
12279 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12280
12281 result.result = NULL;
12282 result.frame_number = req->frame_number;
12283 result.input_buffer = i->input_buffer;
12284 result.num_output_buffers = req->mPendingBufferList.size();
12285 result.output_buffers = pStream_Buf;
12286
12287 size_t index = 0;
12288 for (auto info = req->mPendingBufferList.begin();
12289 info != req->mPendingBufferList.end(); ) {
12290 pStream_Buf[index].acquire_fence = -1;
12291 pStream_Buf[index].release_fence = -1;
12292 pStream_Buf[index].buffer = info->buffer;
12293 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12294 pStream_Buf[index].stream = info->stream;
12295 index++;
12296 // Remove buffer from list
12297 info = req->mPendingBufferList.erase(info);
12298 }
12299
12300 // Remove this request from Map
12301 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12302 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12303 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12304
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012305 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012306 delete [] pStream_Buf;
12307 i = erasePendingRequest(i);
12308 }
12309 }
12310
12311 /* Reset pending frame Drop list and requests list */
12312 mPendingFrameDropList.clear();
12313
12314 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12315 req.mPendingBufferList.clear();
12316 }
12317 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12318 mPendingReprocessResultList.clear();
12319 LOGH("Cleared all the pending buffers ");
12320
12321 return rc;
12322}
12323
12324bool QCamera3HardwareInterface::isOnEncoder(
12325 const cam_dimension_t max_viewfinder_size,
12326 uint32_t width, uint32_t height)
12327{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012328 return ((width > (uint32_t)max_viewfinder_size.width) ||
12329 (height > (uint32_t)max_viewfinder_size.height) ||
12330 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12331 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012332}
12333
12334/*===========================================================================
12335 * FUNCTION : setBundleInfo
12336 *
12337 * DESCRIPTION: Set bundle info for all streams that are bundle.
12338 *
12339 * PARAMETERS : None
12340 *
12341 * RETURN : NO_ERROR on success
12342 * Error codes on failure
12343 *==========================================================================*/
12344int32_t QCamera3HardwareInterface::setBundleInfo()
12345{
12346 int32_t rc = NO_ERROR;
12347
12348 if (mChannelHandle) {
12349 cam_bundle_config_t bundleInfo;
12350 memset(&bundleInfo, 0, sizeof(bundleInfo));
12351 rc = mCameraHandle->ops->get_bundle_info(
12352 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12353 if (rc != NO_ERROR) {
12354 LOGE("get_bundle_info failed");
12355 return rc;
12356 }
12357 if (mAnalysisChannel) {
12358 mAnalysisChannel->setBundleInfo(bundleInfo);
12359 }
12360 if (mSupportChannel) {
12361 mSupportChannel->setBundleInfo(bundleInfo);
12362 }
12363 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12364 it != mStreamInfo.end(); it++) {
12365 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12366 channel->setBundleInfo(bundleInfo);
12367 }
12368 if (mRawDumpChannel) {
12369 mRawDumpChannel->setBundleInfo(bundleInfo);
12370 }
12371 }
12372
12373 return rc;
12374}
12375
12376/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012377 * FUNCTION : setInstantAEC
12378 *
12379 * DESCRIPTION: Set Instant AEC related params.
12380 *
12381 * PARAMETERS :
12382 * @meta: CameraMetadata reference
12383 *
12384 * RETURN : NO_ERROR on success
12385 * Error codes on failure
12386 *==========================================================================*/
12387int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12388{
12389 int32_t rc = NO_ERROR;
12390 uint8_t val = 0;
12391 char prop[PROPERTY_VALUE_MAX];
12392
12393 // First try to configure instant AEC from framework metadata
12394 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12395 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12396 }
12397
12398 // If framework did not set this value, try to read from set prop.
12399 if (val == 0) {
12400 memset(prop, 0, sizeof(prop));
12401 property_get("persist.camera.instant.aec", prop, "0");
12402 val = (uint8_t)atoi(prop);
12403 }
12404
12405 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12406 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12407 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12408 mInstantAEC = val;
12409 mInstantAECSettledFrameNumber = 0;
12410 mInstantAecFrameIdxCount = 0;
12411 LOGH("instantAEC value set %d",val);
12412 if (mInstantAEC) {
12413 memset(prop, 0, sizeof(prop));
12414 property_get("persist.camera.ae.instant.bound", prop, "10");
12415 int32_t aec_frame_skip_cnt = atoi(prop);
12416 if (aec_frame_skip_cnt >= 0) {
12417 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12418 } else {
12419 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12420 rc = BAD_VALUE;
12421 }
12422 }
12423 } else {
12424 LOGE("Bad instant aec value set %d", val);
12425 rc = BAD_VALUE;
12426 }
12427 return rc;
12428}
12429
12430/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012431 * FUNCTION : get_num_overall_buffers
12432 *
12433 * DESCRIPTION: Estimate number of pending buffers across all requests.
12434 *
12435 * PARAMETERS : None
12436 *
12437 * RETURN : Number of overall pending buffers
12438 *
12439 *==========================================================================*/
12440uint32_t PendingBuffersMap::get_num_overall_buffers()
12441{
12442 uint32_t sum_buffers = 0;
12443 for (auto &req : mPendingBuffersInRequest) {
12444 sum_buffers += req.mPendingBufferList.size();
12445 }
12446 return sum_buffers;
12447}
12448
12449/*===========================================================================
12450 * FUNCTION : removeBuf
12451 *
12452 * DESCRIPTION: Remove a matching buffer from tracker.
12453 *
12454 * PARAMETERS : @buffer: image buffer for the callback
12455 *
12456 * RETURN : None
12457 *
12458 *==========================================================================*/
12459void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12460{
12461 bool buffer_found = false;
12462 for (auto req = mPendingBuffersInRequest.begin();
12463 req != mPendingBuffersInRequest.end(); req++) {
12464 for (auto k = req->mPendingBufferList.begin();
12465 k != req->mPendingBufferList.end(); k++ ) {
12466 if (k->buffer == buffer) {
12467 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12468 req->frame_number, buffer);
12469 k = req->mPendingBufferList.erase(k);
12470 if (req->mPendingBufferList.empty()) {
12471 // Remove this request from Map
12472 req = mPendingBuffersInRequest.erase(req);
12473 }
12474 buffer_found = true;
12475 break;
12476 }
12477 }
12478 if (buffer_found) {
12479 break;
12480 }
12481 }
12482 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12483 get_num_overall_buffers());
12484}
12485
12486/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012487 * FUNCTION : getBufErrStatus
12488 *
12489 * DESCRIPTION: get buffer error status
12490 *
12491 * PARAMETERS : @buffer: buffer handle
12492 *
12493 * RETURN : Error status
12494 *
12495 *==========================================================================*/
12496int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12497{
12498 for (auto& req : mPendingBuffersInRequest) {
12499 for (auto& k : req.mPendingBufferList) {
12500 if (k.buffer == buffer)
12501 return k.bufStatus;
12502 }
12503 }
12504 return CAMERA3_BUFFER_STATUS_OK;
12505}
12506
12507/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012508 * FUNCTION : setPAAFSupport
12509 *
12510 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12511 * feature mask according to stream type and filter
12512 * arrangement
12513 *
12514 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12515 * @stream_type: stream type
12516 * @filter_arrangement: filter arrangement
12517 *
12518 * RETURN : None
12519 *==========================================================================*/
12520void QCamera3HardwareInterface::setPAAFSupport(
12521 cam_feature_mask_t& feature_mask,
12522 cam_stream_type_t stream_type,
12523 cam_color_filter_arrangement_t filter_arrangement)
12524{
12525 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12526 feature_mask, stream_type, filter_arrangement);
12527
12528 switch (filter_arrangement) {
12529 case CAM_FILTER_ARRANGEMENT_RGGB:
12530 case CAM_FILTER_ARRANGEMENT_GRBG:
12531 case CAM_FILTER_ARRANGEMENT_GBRG:
12532 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012533 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12534 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012535 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12536 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12537 }
12538 break;
12539 case CAM_FILTER_ARRANGEMENT_Y:
12540 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12541 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12542 }
12543 break;
12544 default:
12545 break;
12546 }
12547}
12548
12549/*===========================================================================
12550* FUNCTION : getSensorMountAngle
12551*
12552* DESCRIPTION: Retrieve sensor mount angle
12553*
12554* PARAMETERS : None
12555*
12556* RETURN : sensor mount angle in uint32_t
12557*==========================================================================*/
12558uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12559{
12560 return gCamCapability[mCameraId]->sensor_mount_angle;
12561}
12562
12563/*===========================================================================
12564* FUNCTION : getRelatedCalibrationData
12565*
12566* DESCRIPTION: Retrieve related system calibration data
12567*
12568* PARAMETERS : None
12569*
12570* RETURN : Pointer of related system calibration data
12571*==========================================================================*/
12572const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12573{
12574 return (const cam_related_system_calibration_data_t *)
12575 &(gCamCapability[mCameraId]->related_cam_calibration);
12576}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012577
12578/*===========================================================================
12579 * FUNCTION : is60HzZone
12580 *
12581 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
12582 *
12583 * PARAMETERS : None
12584 *
12585 * RETURN : True if in 60Hz zone, False otherwise
12586 *==========================================================================*/
12587bool QCamera3HardwareInterface::is60HzZone()
12588{
12589 time_t t = time(NULL);
12590 struct tm lt;
12591
12592 struct tm* r = localtime_r(&t, &lt);
12593
12594 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
12595 return true;
12596 else
12597 return false;
12598}
Thierry Strudel3d639192016-09-09 11:52:26 -070012599}; //end namespace qcamera