blob: df672473a49ba7c6b1fd47df0933ac33c0cd6236 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
143
144const QCamera3HardwareInterface::QCameraMap<
145 camera_metadata_enum_android_ir_mode_t,
146 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
147 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
148 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
149 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
150};
Thierry Strudel3d639192016-09-09 11:52:26 -0700151
152const QCamera3HardwareInterface::QCameraMap<
153 camera_metadata_enum_android_control_effect_mode_t,
154 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
155 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
156 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
157 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
158 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
159 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
160 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
161 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
162 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
163 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
164};
165
166const QCamera3HardwareInterface::QCameraMap<
167 camera_metadata_enum_android_control_awb_mode_t,
168 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
169 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
170 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
171 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
172 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
173 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
174 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
175 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
176 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
177 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
178};
179
180const QCamera3HardwareInterface::QCameraMap<
181 camera_metadata_enum_android_control_scene_mode_t,
182 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
183 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
184 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
185 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
186 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
187 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
188 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
189 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
190 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
191 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
192 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
193 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
194 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
195 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
196 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
197 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800198 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
199 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700200};
201
202const QCamera3HardwareInterface::QCameraMap<
203 camera_metadata_enum_android_control_af_mode_t,
204 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
205 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
206 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
207 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
208 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
209 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
210 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
211 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
212};
213
214const QCamera3HardwareInterface::QCameraMap<
215 camera_metadata_enum_android_color_correction_aberration_mode_t,
216 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
217 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
218 CAM_COLOR_CORRECTION_ABERRATION_OFF },
219 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
220 CAM_COLOR_CORRECTION_ABERRATION_FAST },
221 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
222 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_control_ae_antibanding_mode_t,
227 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
228 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
229 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
230 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
231 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_ae_mode_t,
236 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
237 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
238 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
239 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
240 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
241 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
242};
243
244const QCamera3HardwareInterface::QCameraMap<
245 camera_metadata_enum_android_flash_mode_t,
246 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
247 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
248 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
249 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_statistics_face_detect_mode_t,
254 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
255 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
256 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
257 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
262 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
263 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
264 CAM_FOCUS_UNCALIBRATED },
265 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
266 CAM_FOCUS_APPROXIMATE },
267 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
268 CAM_FOCUS_CALIBRATED }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_lens_state_t,
273 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
274 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
275 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
276};
277
278const int32_t available_thumbnail_sizes[] = {0, 0,
279 176, 144,
280 240, 144,
281 256, 144,
282 240, 160,
283 256, 154,
284 240, 240,
285 320, 240};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_sensor_test_pattern_mode_t,
289 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
291 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
292 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
293 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
294 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
295 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
296};
297
298/* Since there is no mapping for all the options some Android enum are not listed.
299 * Also, the order in this list is important because while mapping from HAL to Android it will
300 * traverse from lower to higher index which means that for HAL values that are map to different
301 * Android values, the traverse logic will select the first one found.
302 */
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_sensor_reference_illuminant1_t,
305 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
322};
323
324const QCamera3HardwareInterface::QCameraMap<
325 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
326 { 60, CAM_HFR_MODE_60FPS},
327 { 90, CAM_HFR_MODE_90FPS},
328 { 120, CAM_HFR_MODE_120FPS},
329 { 150, CAM_HFR_MODE_150FPS},
330 { 180, CAM_HFR_MODE_180FPS},
331 { 210, CAM_HFR_MODE_210FPS},
332 { 240, CAM_HFR_MODE_240FPS},
333 { 480, CAM_HFR_MODE_480FPS},
334};
335
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700336const QCamera3HardwareInterface::QCameraMap<
337 qcamera3_ext_instant_aec_mode_t,
338 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
339 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
340 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
341 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
342};
Thierry Strudel3d639192016-09-09 11:52:26 -0700343camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
344 .initialize = QCamera3HardwareInterface::initialize,
345 .configure_streams = QCamera3HardwareInterface::configure_streams,
346 .register_stream_buffers = NULL,
347 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
348 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
349 .get_metadata_vendor_tag_ops = NULL,
350 .dump = QCamera3HardwareInterface::dump,
351 .flush = QCamera3HardwareInterface::flush,
352 .reserved = {0},
353};
354
355// initialise to some default value
356uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
357
358/*===========================================================================
359 * FUNCTION : QCamera3HardwareInterface
360 *
361 * DESCRIPTION: constructor of QCamera3HardwareInterface
362 *
363 * PARAMETERS :
364 * @cameraId : camera ID
365 *
366 * RETURN : none
367 *==========================================================================*/
368QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
369 const camera_module_callbacks_t *callbacks)
370 : mCameraId(cameraId),
371 mCameraHandle(NULL),
372 mCameraInitialized(false),
373 mCallbackOps(NULL),
374 mMetadataChannel(NULL),
375 mPictureChannel(NULL),
376 mRawChannel(NULL),
377 mSupportChannel(NULL),
378 mAnalysisChannel(NULL),
379 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700380 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700381 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800382 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700383 mCommon(),
384 mChannelHandle(0),
385 mFirstConfiguration(true),
386 mFlush(false),
387 mFlushPerf(false),
388 mParamHeap(NULL),
389 mParameters(NULL),
390 mPrevParameters(NULL),
391 m_bIsVideo(false),
392 m_bIs4KVideo(false),
393 m_bEisSupportedSize(false),
394 m_bEisEnable(false),
395 m_MobicatMask(0),
396 mMinProcessedFrameDuration(0),
397 mMinJpegFrameDuration(0),
398 mMinRawFrameDuration(0),
399 mMetaFrameCount(0U),
400 mUpdateDebugLevel(false),
401 mCallbacks(callbacks),
402 mCaptureIntent(0),
403 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800404 /* DevCamDebug metadata internal m control*/
405 mDevCamDebugMetaEnable(0),
406 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700407 mBatchSize(0),
408 mToBeQueuedVidBufs(0),
409 mHFRVideoFps(DEFAULT_VIDEO_FPS),
410 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
411 mFirstFrameNumberInBatch(0),
412 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800413 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700414 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
415 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700416 mInstantAEC(false),
417 mResetInstantAEC(false),
418 mInstantAECSettledFrameNumber(0),
419 mAecSkipDisplayFrameBound(0),
420 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700422 mLastCustIntentFrmNum(-1),
423 mState(CLOSED),
424 mIsDeviceLinked(false),
425 mIsMainCamera(true),
426 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700427 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800428 m_pDualCamCmdPtr(NULL),
429 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700430{
431 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700432 mCommon.init(gCamCapability[cameraId]);
433 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700434#ifndef USE_HAL_3_3
435 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
436#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700437 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700438#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700439 mCameraDevice.common.close = close_camera_device;
440 mCameraDevice.ops = &mCameraOps;
441 mCameraDevice.priv = this;
442 gCamCapability[cameraId]->version = CAM_HAL_V3;
443 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
444 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
445 gCamCapability[cameraId]->min_num_pp_bufs = 3;
446
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800447 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700448
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800449 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700450 mPendingLiveRequest = 0;
451 mCurrentRequestId = -1;
452 pthread_mutex_init(&mMutex, NULL);
453
454 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
455 mDefaultMetadata[i] = NULL;
456
457 // Getting system props of different kinds
458 char prop[PROPERTY_VALUE_MAX];
459 memset(prop, 0, sizeof(prop));
460 property_get("persist.camera.raw.dump", prop, "0");
461 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800462 property_get("persist.camera.hal3.force.hdr", prop, "0");
463 mForceHdrSnapshot = atoi(prop);
464
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 if (mEnableRawDump)
466 LOGD("Raw dump from Camera HAL enabled");
467
468 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
469 memset(mLdafCalib, 0, sizeof(mLdafCalib));
470
471 memset(prop, 0, sizeof(prop));
472 property_get("persist.camera.tnr.preview", prop, "0");
473 m_bTnrPreview = (uint8_t)atoi(prop);
474
475 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800476 property_get("persist.camera.swtnr.preview", prop, "1");
477 m_bSwTnrPreview = (uint8_t)atoi(prop);
478
479 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 property_get("persist.camera.tnr.video", prop, "0");
481 m_bTnrVideo = (uint8_t)atoi(prop);
482
483 memset(prop, 0, sizeof(prop));
484 property_get("persist.camera.avtimer.debug", prop, "0");
485 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800486 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700487
488 //Load and read GPU library.
489 lib_surface_utils = NULL;
490 LINK_get_surface_pixel_alignment = NULL;
491 mSurfaceStridePadding = CAM_PAD_TO_32;
492 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
493 if (lib_surface_utils) {
494 *(void **)&LINK_get_surface_pixel_alignment =
495 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
496 if (LINK_get_surface_pixel_alignment) {
497 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
498 }
499 dlclose(lib_surface_utils);
500 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700501
502 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700503}
504
505/*===========================================================================
506 * FUNCTION : ~QCamera3HardwareInterface
507 *
508 * DESCRIPTION: destructor of QCamera3HardwareInterface
509 *
510 * PARAMETERS : none
511 *
512 * RETURN : none
513 *==========================================================================*/
514QCamera3HardwareInterface::~QCamera3HardwareInterface()
515{
516 LOGD("E");
517
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800518 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700519
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800520 // Disable power hint and enable the perf lock for close camera
521 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
522 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
523
524 // unlink of dualcam during close camera
525 if (mIsDeviceLinked) {
526 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
527 &m_pDualCamCmdPtr->bundle_info;
528 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
529 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
530 pthread_mutex_lock(&gCamLock);
531
532 if (mIsMainCamera == 1) {
533 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
534 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
535 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
536 // related session id should be session id of linked session
537 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
538 } else {
539 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
540 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
541 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
542 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
543 }
544 pthread_mutex_unlock(&gCamLock);
545
546 rc = mCameraHandle->ops->set_dual_cam_cmd(
547 mCameraHandle->camera_handle);
548 if (rc < 0) {
549 LOGE("Dualcam: Unlink failed, but still proceed to close");
550 }
551 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700552
553 /* We need to stop all streams before deleting any stream */
554 if (mRawDumpChannel) {
555 mRawDumpChannel->stop();
556 }
557
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700558 if (mHdrPlusRawSrcChannel) {
559 mHdrPlusRawSrcChannel->stop();
560 }
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 // NOTE: 'camera3_stream_t *' objects are already freed at
563 // this stage by the framework
564 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
565 it != mStreamInfo.end(); it++) {
566 QCamera3ProcessingChannel *channel = (*it)->channel;
567 if (channel) {
568 channel->stop();
569 }
570 }
571 if (mSupportChannel)
572 mSupportChannel->stop();
573
574 if (mAnalysisChannel) {
575 mAnalysisChannel->stop();
576 }
577 if (mMetadataChannel) {
578 mMetadataChannel->stop();
579 }
580 if (mChannelHandle) {
581 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
582 mChannelHandle);
583 LOGD("stopping channel %d", mChannelHandle);
584 }
585
586 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
587 it != mStreamInfo.end(); it++) {
588 QCamera3ProcessingChannel *channel = (*it)->channel;
589 if (channel)
590 delete channel;
591 free (*it);
592 }
593 if (mSupportChannel) {
594 delete mSupportChannel;
595 mSupportChannel = NULL;
596 }
597
598 if (mAnalysisChannel) {
599 delete mAnalysisChannel;
600 mAnalysisChannel = NULL;
601 }
602 if (mRawDumpChannel) {
603 delete mRawDumpChannel;
604 mRawDumpChannel = NULL;
605 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700606 if (mHdrPlusRawSrcChannel) {
607 delete mHdrPlusRawSrcChannel;
608 mHdrPlusRawSrcChannel = NULL;
609 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700610 if (mDummyBatchChannel) {
611 delete mDummyBatchChannel;
612 mDummyBatchChannel = NULL;
613 }
614
615 mPictureChannel = NULL;
616
617 if (mMetadataChannel) {
618 delete mMetadataChannel;
619 mMetadataChannel = NULL;
620 }
621
622 /* Clean up all channels */
623 if (mCameraInitialized) {
624 if(!mFirstConfiguration){
625 //send the last unconfigure
626 cam_stream_size_info_t stream_config_info;
627 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
628 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
629 stream_config_info.buffer_info.max_buffers =
630 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700631 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700632 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
633 stream_config_info);
634 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
635 if (rc < 0) {
636 LOGE("set_parms failed for unconfigure");
637 }
638 }
639 deinitParameters();
640 }
641
642 if (mChannelHandle) {
643 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
644 mChannelHandle);
645 LOGH("deleting channel %d", mChannelHandle);
646 mChannelHandle = 0;
647 }
648
649 if (mState != CLOSED)
650 closeCamera();
651
652 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
653 req.mPendingBufferList.clear();
654 }
655 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700656 for (pendingRequestIterator i = mPendingRequestsList.begin();
657 i != mPendingRequestsList.end();) {
658 i = erasePendingRequest(i);
659 }
660 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
661 if (mDefaultMetadata[i])
662 free_camera_metadata(mDefaultMetadata[i]);
663
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800664 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700665
666 pthread_cond_destroy(&mRequestCond);
667
668 pthread_cond_destroy(&mBuffersCond);
669
670 pthread_mutex_destroy(&mMutex);
671 LOGD("X");
672}
673
674/*===========================================================================
675 * FUNCTION : erasePendingRequest
676 *
677 * DESCRIPTION: function to erase a desired pending request after freeing any
678 * allocated memory
679 *
680 * PARAMETERS :
681 * @i : iterator pointing to pending request to be erased
682 *
683 * RETURN : iterator pointing to the next request
684 *==========================================================================*/
685QCamera3HardwareInterface::pendingRequestIterator
686 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
687{
688 if (i->input_buffer != NULL) {
689 free(i->input_buffer);
690 i->input_buffer = NULL;
691 }
692 if (i->settings != NULL)
693 free_camera_metadata((camera_metadata_t*)i->settings);
694 return mPendingRequestsList.erase(i);
695}
696
697/*===========================================================================
698 * FUNCTION : camEvtHandle
699 *
700 * DESCRIPTION: Function registered to mm-camera-interface to handle events
701 *
702 * PARAMETERS :
703 * @camera_handle : interface layer camera handle
704 * @evt : ptr to event
705 * @user_data : user data ptr
706 *
707 * RETURN : none
708 *==========================================================================*/
709void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
710 mm_camera_event_t *evt,
711 void *user_data)
712{
713 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
714 if (obj && evt) {
715 switch(evt->server_event_type) {
716 case CAM_EVENT_TYPE_DAEMON_DIED:
717 pthread_mutex_lock(&obj->mMutex);
718 obj->mState = ERROR;
719 pthread_mutex_unlock(&obj->mMutex);
720 LOGE("Fatal, camera daemon died");
721 break;
722
723 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
724 LOGD("HAL got request pull from Daemon");
725 pthread_mutex_lock(&obj->mMutex);
726 obj->mWokenUpByDaemon = true;
727 obj->unblockRequestIfNecessary();
728 pthread_mutex_unlock(&obj->mMutex);
729 break;
730
731 default:
732 LOGW("Warning: Unhandled event %d",
733 evt->server_event_type);
734 break;
735 }
736 } else {
737 LOGE("NULL user_data/evt");
738 }
739}
740
741/*===========================================================================
742 * FUNCTION : openCamera
743 *
744 * DESCRIPTION: open camera
745 *
746 * PARAMETERS :
747 * @hw_device : double ptr for camera device struct
748 *
749 * RETURN : int32_t type of status
750 * NO_ERROR -- success
751 * none-zero failure code
752 *==========================================================================*/
753int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
754{
755 int rc = 0;
756 if (mState != CLOSED) {
757 *hw_device = NULL;
758 return PERMISSION_DENIED;
759 }
760
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800761 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
763 mCameraId);
764
765 rc = openCamera();
766 if (rc == 0) {
767 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700769 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
Thierry Strudel3d639192016-09-09 11:52:26 -0700772 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
773 mCameraId, rc);
774
775 if (rc == NO_ERROR) {
776 mState = OPENED;
777 }
778 return rc;
779}
780
781/*===========================================================================
782 * FUNCTION : openCamera
783 *
784 * DESCRIPTION: open camera
785 *
786 * PARAMETERS : none
787 *
788 * RETURN : int32_t type of status
789 * NO_ERROR -- success
790 * none-zero failure code
791 *==========================================================================*/
792int QCamera3HardwareInterface::openCamera()
793{
794 int rc = 0;
795 char value[PROPERTY_VALUE_MAX];
796
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800797 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700798 if (mCameraHandle) {
799 LOGE("Failure: Camera already opened");
800 return ALREADY_EXISTS;
801 }
802
803 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
804 if (rc < 0) {
805 LOGE("Failed to reserve flash for camera id: %d",
806 mCameraId);
807 return UNKNOWN_ERROR;
808 }
809
810 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
811 if (rc) {
812 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
813 return rc;
814 }
815
816 if (!mCameraHandle) {
817 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
818 return -ENODEV;
819 }
820
821 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
822 camEvtHandle, (void *)this);
823
824 if (rc < 0) {
825 LOGE("Error, failed to register event callback");
826 /* Not closing camera here since it is already handled in destructor */
827 return FAILED_TRANSACTION;
828 }
829
830 mExifParams.debug_params =
831 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
832 if (mExifParams.debug_params) {
833 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
834 } else {
835 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
836 return NO_MEMORY;
837 }
838 mFirstConfiguration = true;
839
840 //Notify display HAL that a camera session is active.
841 //But avoid calling the same during bootup because camera service might open/close
842 //cameras at boot time during its initialization and display service will also internally
843 //wait for camera service to initialize first while calling this display API, resulting in a
844 //deadlock situation. Since boot time camera open/close calls are made only to fetch
845 //capabilities, no need of this display bw optimization.
846 //Use "service.bootanim.exit" property to know boot status.
847 property_get("service.bootanim.exit", value, "0");
848 if (atoi(value) == 1) {
849 pthread_mutex_lock(&gCamLock);
850 if (gNumCameraSessions++ == 0) {
851 setCameraLaunchStatus(true);
852 }
853 pthread_mutex_unlock(&gCamLock);
854 }
855
856 //fill the session id needed while linking dual cam
857 pthread_mutex_lock(&gCamLock);
858 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
859 &sessionId[mCameraId]);
860 pthread_mutex_unlock(&gCamLock);
861
862 if (rc < 0) {
863 LOGE("Error, failed to get sessiion id");
864 return UNKNOWN_ERROR;
865 } else {
866 //Allocate related cam sync buffer
867 //this is needed for the payload that goes along with bundling cmd for related
868 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700869 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
870 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700871 if(rc != OK) {
872 rc = NO_MEMORY;
873 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
874 return NO_MEMORY;
875 }
876
877 //Map memory for related cam sync buffer
878 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700879 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
880 m_pDualCamCmdHeap->getFd(0),
881 sizeof(cam_dual_camera_cmd_info_t),
882 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700883 if(rc < 0) {
884 LOGE("Dualcam: failed to map Related cam sync buffer");
885 rc = FAILED_TRANSACTION;
886 return NO_MEMORY;
887 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700888 m_pDualCamCmdPtr =
889 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700890 }
891
892 LOGH("mCameraId=%d",mCameraId);
893
894 return NO_ERROR;
895}
896
897/*===========================================================================
898 * FUNCTION : closeCamera
899 *
900 * DESCRIPTION: close camera
901 *
902 * PARAMETERS : none
903 *
904 * RETURN : int32_t type of status
905 * NO_ERROR -- success
906 * none-zero failure code
907 *==========================================================================*/
908int QCamera3HardwareInterface::closeCamera()
909{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800910 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 int rc = NO_ERROR;
912 char value[PROPERTY_VALUE_MAX];
913
914 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
915 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700916
917 // unmap memory for related cam sync buffer
918 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800919 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700920 if (NULL != m_pDualCamCmdHeap) {
921 m_pDualCamCmdHeap->deallocate();
922 delete m_pDualCamCmdHeap;
923 m_pDualCamCmdHeap = NULL;
924 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700925 }
926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
928 mCameraHandle = NULL;
929
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700930 // Disconnect from HDR+ client.
931 if (mHdrPlusClient != nullptr) {
932 mHdrPlusClient->disconnect();
933 mHdrPlusClient = nullptr;
934 }
935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 //reset session id to some invalid id
937 pthread_mutex_lock(&gCamLock);
938 sessionId[mCameraId] = 0xDEADBEEF;
939 pthread_mutex_unlock(&gCamLock);
940
941 //Notify display HAL that there is no active camera session
942 //but avoid calling the same during bootup. Refer to openCamera
943 //for more details.
944 property_get("service.bootanim.exit", value, "0");
945 if (atoi(value) == 1) {
946 pthread_mutex_lock(&gCamLock);
947 if (--gNumCameraSessions == 0) {
948 setCameraLaunchStatus(false);
949 }
950 pthread_mutex_unlock(&gCamLock);
951 }
952
Thierry Strudel3d639192016-09-09 11:52:26 -0700953 if (mExifParams.debug_params) {
954 free(mExifParams.debug_params);
955 mExifParams.debug_params = NULL;
956 }
957 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
958 LOGW("Failed to release flash for camera id: %d",
959 mCameraId);
960 }
961 mState = CLOSED;
962 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
963 mCameraId, rc);
964 return rc;
965}
966
967/*===========================================================================
968 * FUNCTION : initialize
969 *
970 * DESCRIPTION: Initialize frameworks callback functions
971 *
972 * PARAMETERS :
973 * @callback_ops : callback function to frameworks
974 *
975 * RETURN :
976 *
977 *==========================================================================*/
978int QCamera3HardwareInterface::initialize(
979 const struct camera3_callback_ops *callback_ops)
980{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800981 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700982 int rc;
983
984 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
985 pthread_mutex_lock(&mMutex);
986
987 // Validate current state
988 switch (mState) {
989 case OPENED:
990 /* valid state */
991 break;
992 default:
993 LOGE("Invalid state %d", mState);
994 rc = -ENODEV;
995 goto err1;
996 }
997
998 rc = initParameters();
999 if (rc < 0) {
1000 LOGE("initParamters failed %d", rc);
1001 goto err1;
1002 }
1003 mCallbackOps = callback_ops;
1004
1005 mChannelHandle = mCameraHandle->ops->add_channel(
1006 mCameraHandle->camera_handle, NULL, NULL, this);
1007 if (mChannelHandle == 0) {
1008 LOGE("add_channel failed");
1009 rc = -ENOMEM;
1010 pthread_mutex_unlock(&mMutex);
1011 return rc;
1012 }
1013
1014 pthread_mutex_unlock(&mMutex);
1015 mCameraInitialized = true;
1016 mState = INITIALIZED;
1017 LOGI("X");
1018 return 0;
1019
1020err1:
1021 pthread_mutex_unlock(&mMutex);
1022 return rc;
1023}
1024
1025/*===========================================================================
1026 * FUNCTION : validateStreamDimensions
1027 *
1028 * DESCRIPTION: Check if the configuration requested are those advertised
1029 *
1030 * PARAMETERS :
1031 * @stream_list : streams to be configured
1032 *
1033 * RETURN :
1034 *
1035 *==========================================================================*/
1036int QCamera3HardwareInterface::validateStreamDimensions(
1037 camera3_stream_configuration_t *streamList)
1038{
1039 int rc = NO_ERROR;
1040 size_t count = 0;
1041
1042 camera3_stream_t *inputStream = NULL;
1043 /*
1044 * Loop through all streams to find input stream if it exists*
1045 */
1046 for (size_t i = 0; i< streamList->num_streams; i++) {
1047 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1048 if (inputStream != NULL) {
1049 LOGE("Error, Multiple input streams requested");
1050 return -EINVAL;
1051 }
1052 inputStream = streamList->streams[i];
1053 }
1054 }
1055 /*
1056 * Loop through all streams requested in configuration
1057 * Check if unsupported sizes have been requested on any of them
1058 */
1059 for (size_t j = 0; j < streamList->num_streams; j++) {
1060 bool sizeFound = false;
1061 camera3_stream_t *newStream = streamList->streams[j];
1062
1063 uint32_t rotatedHeight = newStream->height;
1064 uint32_t rotatedWidth = newStream->width;
1065 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1066 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1067 rotatedHeight = newStream->width;
1068 rotatedWidth = newStream->height;
1069 }
1070
1071 /*
1072 * Sizes are different for each type of stream format check against
1073 * appropriate table.
1074 */
1075 switch (newStream->format) {
1076 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1077 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1078 case HAL_PIXEL_FORMAT_RAW10:
1079 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1080 for (size_t i = 0; i < count; i++) {
1081 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1082 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1083 sizeFound = true;
1084 break;
1085 }
1086 }
1087 break;
1088 case HAL_PIXEL_FORMAT_BLOB:
1089 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1090 /* Verify set size against generated sizes table */
1091 for (size_t i = 0; i < count; i++) {
1092 if (((int32_t)rotatedWidth ==
1093 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1094 ((int32_t)rotatedHeight ==
1095 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1096 sizeFound = true;
1097 break;
1098 }
1099 }
1100 break;
1101 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1102 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1103 default:
1104 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1105 || newStream->stream_type == CAMERA3_STREAM_INPUT
1106 || IS_USAGE_ZSL(newStream->usage)) {
1107 if (((int32_t)rotatedWidth ==
1108 gCamCapability[mCameraId]->active_array_size.width) &&
1109 ((int32_t)rotatedHeight ==
1110 gCamCapability[mCameraId]->active_array_size.height)) {
1111 sizeFound = true;
1112 break;
1113 }
1114 /* We could potentially break here to enforce ZSL stream
1115 * set from frameworks always is full active array size
1116 * but it is not clear from the spc if framework will always
1117 * follow that, also we have logic to override to full array
1118 * size, so keeping the logic lenient at the moment
1119 */
1120 }
1121 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1122 MAX_SIZES_CNT);
1123 for (size_t i = 0; i < count; i++) {
1124 if (((int32_t)rotatedWidth ==
1125 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1126 ((int32_t)rotatedHeight ==
1127 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1128 sizeFound = true;
1129 break;
1130 }
1131 }
1132 break;
1133 } /* End of switch(newStream->format) */
1134
1135 /* We error out even if a single stream has unsupported size set */
1136 if (!sizeFound) {
1137 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1138 rotatedWidth, rotatedHeight, newStream->format,
1139 gCamCapability[mCameraId]->active_array_size.width,
1140 gCamCapability[mCameraId]->active_array_size.height);
1141 rc = -EINVAL;
1142 break;
1143 }
1144 } /* End of for each stream */
1145 return rc;
1146}
1147
1148/*==============================================================================
1149 * FUNCTION : isSupportChannelNeeded
1150 *
1151 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1152 *
1153 * PARAMETERS :
1154 * @stream_list : streams to be configured
1155 * @stream_config_info : the config info for streams to be configured
1156 *
1157 * RETURN : Boolen true/false decision
1158 *
1159 *==========================================================================*/
1160bool QCamera3HardwareInterface::isSupportChannelNeeded(
1161 camera3_stream_configuration_t *streamList,
1162 cam_stream_size_info_t stream_config_info)
1163{
1164 uint32_t i;
1165 bool pprocRequested = false;
1166 /* Check for conditions where PProc pipeline does not have any streams*/
1167 for (i = 0; i < stream_config_info.num_streams; i++) {
1168 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1169 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1170 pprocRequested = true;
1171 break;
1172 }
1173 }
1174
1175 if (pprocRequested == false )
1176 return true;
1177
1178 /* Dummy stream needed if only raw or jpeg streams present */
1179 for (i = 0; i < streamList->num_streams; i++) {
1180 switch(streamList->streams[i]->format) {
1181 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1182 case HAL_PIXEL_FORMAT_RAW10:
1183 case HAL_PIXEL_FORMAT_RAW16:
1184 case HAL_PIXEL_FORMAT_BLOB:
1185 break;
1186 default:
1187 return false;
1188 }
1189 }
1190 return true;
1191}
1192
1193/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001194 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001195 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001196 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001197 *
1198 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001199 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001200 *
1201 * RETURN : int32_t type of status
1202 * NO_ERROR -- success
1203 * none-zero failure code
1204 *
1205 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001206int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001207{
1208 int32_t rc = NO_ERROR;
1209
1210 cam_dimension_t max_dim = {0, 0};
1211 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1212 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1213 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1214 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1215 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1216 }
1217
1218 clear_metadata_buffer(mParameters);
1219
1220 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1221 max_dim);
1222 if (rc != NO_ERROR) {
1223 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1224 return rc;
1225 }
1226
1227 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1228 if (rc != NO_ERROR) {
1229 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1230 return rc;
1231 }
1232
1233 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001234 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001235
1236 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1237 mParameters);
1238 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001239 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 return rc;
1241 }
1242
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001243 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1244 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u", __FUNCTION__,
1245 sensorModeInfo.active_array_size.width, sensorModeInfo.active_array_size.height,
1246 sensorModeInfo.pixel_array_size.width, sensorModeInfo.pixel_array_size.height,
1247 sensorModeInfo.op_pixel_clk);
Thierry Strudel3d639192016-09-09 11:52:26 -07001248
1249 return rc;
1250}
1251
1252/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001253 * FUNCTION : addToPPFeatureMask
1254 *
1255 * DESCRIPTION: add additional features to pp feature mask based on
1256 * stream type and usecase
1257 *
1258 * PARAMETERS :
1259 * @stream_format : stream type for feature mask
1260 * @stream_idx : stream idx within postprocess_mask list to change
1261 *
1262 * RETURN : NULL
1263 *
1264 *==========================================================================*/
1265void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1266 uint32_t stream_idx)
1267{
1268 char feature_mask_value[PROPERTY_VALUE_MAX];
1269 cam_feature_mask_t feature_mask;
1270 int args_converted;
1271 int property_len;
1272
1273 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001274#ifdef _LE_CAMERA_
1275 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1276 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1277 property_len = property_get("persist.camera.hal3.feature",
1278 feature_mask_value, swtnr_feature_mask_value);
1279#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001280 property_len = property_get("persist.camera.hal3.feature",
1281 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001282#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001283 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1284 (feature_mask_value[1] == 'x')) {
1285 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1286 } else {
1287 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1288 }
1289 if (1 != args_converted) {
1290 feature_mask = 0;
1291 LOGE("Wrong feature mask %s", feature_mask_value);
1292 return;
1293 }
1294
1295 switch (stream_format) {
1296 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1297 /* Add LLVD to pp feature mask only if video hint is enabled */
1298 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1299 mStreamConfigInfo.postprocess_mask[stream_idx]
1300 |= CAM_QTI_FEATURE_SW_TNR;
1301 LOGH("Added SW TNR to pp feature mask");
1302 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1303 mStreamConfigInfo.postprocess_mask[stream_idx]
1304 |= CAM_QCOM_FEATURE_LLVD;
1305 LOGH("Added LLVD SeeMore to pp feature mask");
1306 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001307 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1308 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1309 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001311 break;
1312 }
1313 default:
1314 break;
1315 }
1316 LOGD("PP feature mask %llx",
1317 mStreamConfigInfo.postprocess_mask[stream_idx]);
1318}
1319
1320/*==============================================================================
1321 * FUNCTION : updateFpsInPreviewBuffer
1322 *
1323 * DESCRIPTION: update FPS information in preview buffer.
1324 *
1325 * PARAMETERS :
1326 * @metadata : pointer to metadata buffer
1327 * @frame_number: frame_number to look for in pending buffer list
1328 *
1329 * RETURN : None
1330 *
1331 *==========================================================================*/
1332void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1333 uint32_t frame_number)
1334{
1335 // Mark all pending buffers for this particular request
1336 // with corresponding framerate information
1337 for (List<PendingBuffersInRequest>::iterator req =
1338 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1339 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1340 for(List<PendingBufferInfo>::iterator j =
1341 req->mPendingBufferList.begin();
1342 j != req->mPendingBufferList.end(); j++) {
1343 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1344 if ((req->frame_number == frame_number) &&
1345 (channel->getStreamTypeMask() &
1346 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1347 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1348 CAM_INTF_PARM_FPS_RANGE, metadata) {
1349 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1350 struct private_handle_t *priv_handle =
1351 (struct private_handle_t *)(*(j->buffer));
1352 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1353 }
1354 }
1355 }
1356 }
1357}
1358
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001359/*==============================================================================
1360 * FUNCTION : updateTimeStampInPendingBuffers
1361 *
1362 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1363 * of a frame number
1364 *
1365 * PARAMETERS :
1366 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1367 * @timestamp : timestamp to be set
1368 *
1369 * RETURN : None
1370 *
1371 *==========================================================================*/
1372void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1373 uint32_t frameNumber, nsecs_t timestamp)
1374{
1375 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1376 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1377 if (req->frame_number != frameNumber)
1378 continue;
1379
1380 for (auto k = req->mPendingBufferList.begin();
1381 k != req->mPendingBufferList.end(); k++ ) {
1382 struct private_handle_t *priv_handle =
1383 (struct private_handle_t *) (*(k->buffer));
1384 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1385 }
1386 }
1387 return;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*===========================================================================
1391 * FUNCTION : configureStreams
1392 *
1393 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1394 * and output streams.
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 *
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::configureStreams(
1403 camera3_stream_configuration_t *streamList)
1404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001405 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 int rc = 0;
1407
1408 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001409 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001410 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001411 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001412
1413 return rc;
1414}
1415
1416/*===========================================================================
1417 * FUNCTION : configureStreamsPerfLocked
1418 *
1419 * DESCRIPTION: configureStreams while perfLock is held.
1420 *
1421 * PARAMETERS :
1422 * @stream_list : streams to be configured
1423 *
1424 * RETURN : int32_t type of status
1425 * NO_ERROR -- success
1426 * none-zero failure code
1427 *==========================================================================*/
1428int QCamera3HardwareInterface::configureStreamsPerfLocked(
1429 camera3_stream_configuration_t *streamList)
1430{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001431 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001432 int rc = 0;
1433
1434 // Sanity check stream_list
1435 if (streamList == NULL) {
1436 LOGE("NULL stream configuration");
1437 return BAD_VALUE;
1438 }
1439 if (streamList->streams == NULL) {
1440 LOGE("NULL stream list");
1441 return BAD_VALUE;
1442 }
1443
1444 if (streamList->num_streams < 1) {
1445 LOGE("Bad number of streams requested: %d",
1446 streamList->num_streams);
1447 return BAD_VALUE;
1448 }
1449
1450 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1451 LOGE("Maximum number of streams %d exceeded: %d",
1452 MAX_NUM_STREAMS, streamList->num_streams);
1453 return BAD_VALUE;
1454 }
1455
1456 mOpMode = streamList->operation_mode;
1457 LOGD("mOpMode: %d", mOpMode);
1458
1459 /* first invalidate all the steams in the mStreamList
1460 * if they appear again, they will be validated */
1461 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1462 it != mStreamInfo.end(); it++) {
1463 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1464 if (channel) {
1465 channel->stop();
1466 }
1467 (*it)->status = INVALID;
1468 }
1469
1470 if (mRawDumpChannel) {
1471 mRawDumpChannel->stop();
1472 delete mRawDumpChannel;
1473 mRawDumpChannel = NULL;
1474 }
1475
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 if (mHdrPlusRawSrcChannel) {
1477 mHdrPlusRawSrcChannel->stop();
1478 delete mHdrPlusRawSrcChannel;
1479 mHdrPlusRawSrcChannel = NULL;
1480 }
1481
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 if (mSupportChannel)
1483 mSupportChannel->stop();
1484
1485 if (mAnalysisChannel) {
1486 mAnalysisChannel->stop();
1487 }
1488 if (mMetadataChannel) {
1489 /* If content of mStreamInfo is not 0, there is metadata stream */
1490 mMetadataChannel->stop();
1491 }
1492 if (mChannelHandle) {
1493 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1494 mChannelHandle);
1495 LOGD("stopping channel %d", mChannelHandle);
1496 }
1497
1498 pthread_mutex_lock(&mMutex);
1499
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001500 // Check if HDR+ is enabled.
1501 char prop[PROPERTY_VALUE_MAX];
1502 property_get("persist.camera.hdrplus", prop, "0");
1503 bool enableHdrPlus = atoi(prop);
1504 if (enableHdrPlus) {
1505 ALOGD("%s: HDR+ in Camera HAL enabled.", __FUNCTION__);
1506 // Connect to HDR+ client if not yet.
1507 if (mHdrPlusClient == nullptr) {
1508 mHdrPlusClient = std::make_shared<HdrPlusClient>();
1509 rc = mHdrPlusClient->connect(this);
1510 if (rc < 0) {
1511 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
1512 strerror(-rc), rc);
1513 pthread_mutex_unlock(&mMutex);
1514 return -ENODEV;
1515 }
1516
1517 // Set static metadata.
1518 rc = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
1519 if (rc < 0) {
1520 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
1521 strerror(-rc), rc);
1522 pthread_mutex_unlock(&mMutex);
1523 return -ENODEV;
1524 }
1525 }
1526 } else {
1527 ALOGD("%s: HDR+ in Camera HAL disabled.", __FUNCTION__);
1528 // Disconnect from HDR+ client if HDR+ is not enabled.
1529 if (mHdrPlusClient != nullptr) {
1530 mHdrPlusClient->disconnect();
1531 mHdrPlusClient = nullptr;
1532 }
1533 }
1534
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 // Check state
1536 switch (mState) {
1537 case INITIALIZED:
1538 case CONFIGURED:
1539 case STARTED:
1540 /* valid state */
1541 break;
1542 default:
1543 LOGE("Invalid state %d", mState);
1544 pthread_mutex_unlock(&mMutex);
1545 return -ENODEV;
1546 }
1547
1548 /* Check whether we have video stream */
1549 m_bIs4KVideo = false;
1550 m_bIsVideo = false;
1551 m_bEisSupportedSize = false;
1552 m_bTnrEnabled = false;
1553 bool isZsl = false;
1554 uint32_t videoWidth = 0U;
1555 uint32_t videoHeight = 0U;
1556 size_t rawStreamCnt = 0;
1557 size_t stallStreamCnt = 0;
1558 size_t processedStreamCnt = 0;
1559 // Number of streams on ISP encoder path
1560 size_t numStreamsOnEncoder = 0;
1561 size_t numYuv888OnEncoder = 0;
1562 bool bYuv888OverrideJpeg = false;
1563 cam_dimension_t largeYuv888Size = {0, 0};
1564 cam_dimension_t maxViewfinderSize = {0, 0};
1565 bool bJpegExceeds4K = false;
1566 bool bJpegOnEncoder = false;
1567 bool bUseCommonFeatureMask = false;
1568 cam_feature_mask_t commonFeatureMask = 0;
1569 bool bSmallJpegSize = false;
1570 uint32_t width_ratio;
1571 uint32_t height_ratio;
1572 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1573 camera3_stream_t *inputStream = NULL;
1574 bool isJpeg = false;
1575 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001576 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001577
1578 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1579
1580 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001581 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001582 uint8_t eis_prop_set;
1583 uint32_t maxEisWidth = 0;
1584 uint32_t maxEisHeight = 0;
1585
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001586 // Initialize all instant AEC related variables
1587 mInstantAEC = false;
1588 mResetInstantAEC = false;
1589 mInstantAECSettledFrameNumber = 0;
1590 mAecSkipDisplayFrameBound = 0;
1591 mInstantAecFrameIdxCount = 0;
1592
Thierry Strudel3d639192016-09-09 11:52:26 -07001593 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1594
1595 size_t count = IS_TYPE_MAX;
1596 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1597 for (size_t i = 0; i < count; i++) {
1598 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001599 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1600 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001601 break;
1602 }
1603 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001604 count = CAM_OPT_STAB_MAX;
1605 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1606 for (size_t i = 0; i < count; i++) {
1607 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1608 oisSupported = true;
1609 break;
1610 }
1611 }
1612
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001613 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001614 maxEisWidth = MAX_EIS_WIDTH;
1615 maxEisHeight = MAX_EIS_HEIGHT;
1616 }
1617
1618 /* EIS setprop control */
1619 char eis_prop[PROPERTY_VALUE_MAX];
1620 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001621 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 eis_prop_set = (uint8_t)atoi(eis_prop);
1623
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001624 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001625 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1626
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001627 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1628 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1629
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 /* stream configurations */
1631 for (size_t i = 0; i < streamList->num_streams; i++) {
1632 camera3_stream_t *newStream = streamList->streams[i];
1633 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1634 "height = %d, rotation = %d, usage = 0x%x",
1635 i, newStream->stream_type, newStream->format,
1636 newStream->width, newStream->height, newStream->rotation,
1637 newStream->usage);
1638 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1639 newStream->stream_type == CAMERA3_STREAM_INPUT){
1640 isZsl = true;
1641 }
1642 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1643 inputStream = newStream;
1644 }
1645
1646 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1647 isJpeg = true;
1648 jpegSize.width = newStream->width;
1649 jpegSize.height = newStream->height;
1650 if (newStream->width > VIDEO_4K_WIDTH ||
1651 newStream->height > VIDEO_4K_HEIGHT)
1652 bJpegExceeds4K = true;
1653 }
1654
1655 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1656 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1657 m_bIsVideo = true;
1658 videoWidth = newStream->width;
1659 videoHeight = newStream->height;
1660 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1661 (VIDEO_4K_HEIGHT <= newStream->height)) {
1662 m_bIs4KVideo = true;
1663 }
1664 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1665 (newStream->height <= maxEisHeight);
1666 }
1667 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1668 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1669 switch (newStream->format) {
1670 case HAL_PIXEL_FORMAT_BLOB:
1671 stallStreamCnt++;
1672 if (isOnEncoder(maxViewfinderSize, newStream->width,
1673 newStream->height)) {
1674 numStreamsOnEncoder++;
1675 bJpegOnEncoder = true;
1676 }
1677 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1678 newStream->width);
1679 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1680 newStream->height);;
1681 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1682 "FATAL: max_downscale_factor cannot be zero and so assert");
1683 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1684 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1685 LOGH("Setting small jpeg size flag to true");
1686 bSmallJpegSize = true;
1687 }
1688 break;
1689 case HAL_PIXEL_FORMAT_RAW10:
1690 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1691 case HAL_PIXEL_FORMAT_RAW16:
1692 rawStreamCnt++;
1693 break;
1694 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1695 processedStreamCnt++;
1696 if (isOnEncoder(maxViewfinderSize, newStream->width,
1697 newStream->height)) {
1698 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1699 !IS_USAGE_ZSL(newStream->usage)) {
1700 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1701 }
1702 numStreamsOnEncoder++;
1703 }
1704 break;
1705 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1706 processedStreamCnt++;
1707 if (isOnEncoder(maxViewfinderSize, newStream->width,
1708 newStream->height)) {
1709 // If Yuv888 size is not greater than 4K, set feature mask
1710 // to SUPERSET so that it support concurrent request on
1711 // YUV and JPEG.
1712 if (newStream->width <= VIDEO_4K_WIDTH &&
1713 newStream->height <= VIDEO_4K_HEIGHT) {
1714 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1715 }
1716 numStreamsOnEncoder++;
1717 numYuv888OnEncoder++;
1718 largeYuv888Size.width = newStream->width;
1719 largeYuv888Size.height = newStream->height;
1720 }
1721 break;
1722 default:
1723 processedStreamCnt++;
1724 if (isOnEncoder(maxViewfinderSize, newStream->width,
1725 newStream->height)) {
1726 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1727 numStreamsOnEncoder++;
1728 }
1729 break;
1730 }
1731
1732 }
1733 }
1734
1735 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1736 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1737 !m_bIsVideo) {
1738 m_bEisEnable = false;
1739 }
1740
1741 /* Logic to enable/disable TNR based on specific config size/etc.*/
1742 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1743 ((videoWidth == 1920 && videoHeight == 1080) ||
1744 (videoWidth == 1280 && videoHeight == 720)) &&
1745 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1746 m_bTnrEnabled = true;
1747
1748 /* Check if num_streams is sane */
1749 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1750 rawStreamCnt > MAX_RAW_STREAMS ||
1751 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1752 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1753 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1754 pthread_mutex_unlock(&mMutex);
1755 return -EINVAL;
1756 }
1757 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001758 if (isZsl && m_bIs4KVideo) {
1759 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001760 pthread_mutex_unlock(&mMutex);
1761 return -EINVAL;
1762 }
1763 /* Check if stream sizes are sane */
1764 if (numStreamsOnEncoder > 2) {
1765 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1766 pthread_mutex_unlock(&mMutex);
1767 return -EINVAL;
1768 } else if (1 < numStreamsOnEncoder){
1769 bUseCommonFeatureMask = true;
1770 LOGH("Multiple streams above max viewfinder size, common mask needed");
1771 }
1772
1773 /* Check if BLOB size is greater than 4k in 4k recording case */
1774 if (m_bIs4KVideo && bJpegExceeds4K) {
1775 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1776 pthread_mutex_unlock(&mMutex);
1777 return -EINVAL;
1778 }
1779
1780 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1781 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1782 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1783 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1784 // configurations:
1785 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1786 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1787 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1788 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1789 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1790 __func__);
1791 pthread_mutex_unlock(&mMutex);
1792 return -EINVAL;
1793 }
1794
1795 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1796 // the YUV stream's size is greater or equal to the JPEG size, set common
1797 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1798 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1799 jpegSize.width, jpegSize.height) &&
1800 largeYuv888Size.width > jpegSize.width &&
1801 largeYuv888Size.height > jpegSize.height) {
1802 bYuv888OverrideJpeg = true;
1803 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1804 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1805 }
1806
1807 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1808 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1809 commonFeatureMask);
1810 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1811 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1812
1813 rc = validateStreamDimensions(streamList);
1814 if (rc == NO_ERROR) {
1815 rc = validateStreamRotations(streamList);
1816 }
1817 if (rc != NO_ERROR) {
1818 LOGE("Invalid stream configuration requested!");
1819 pthread_mutex_unlock(&mMutex);
1820 return rc;
1821 }
1822
1823 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1824 for (size_t i = 0; i < streamList->num_streams; i++) {
1825 camera3_stream_t *newStream = streamList->streams[i];
1826 LOGH("newStream type = %d, stream format = %d "
1827 "stream size : %d x %d, stream rotation = %d",
1828 newStream->stream_type, newStream->format,
1829 newStream->width, newStream->height, newStream->rotation);
1830 //if the stream is in the mStreamList validate it
1831 bool stream_exists = false;
1832 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1833 it != mStreamInfo.end(); it++) {
1834 if ((*it)->stream == newStream) {
1835 QCamera3ProcessingChannel *channel =
1836 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1837 stream_exists = true;
1838 if (channel)
1839 delete channel;
1840 (*it)->status = VALID;
1841 (*it)->stream->priv = NULL;
1842 (*it)->channel = NULL;
1843 }
1844 }
1845 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1846 //new stream
1847 stream_info_t* stream_info;
1848 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1849 if (!stream_info) {
1850 LOGE("Could not allocate stream info");
1851 rc = -ENOMEM;
1852 pthread_mutex_unlock(&mMutex);
1853 return rc;
1854 }
1855 stream_info->stream = newStream;
1856 stream_info->status = VALID;
1857 stream_info->channel = NULL;
1858 mStreamInfo.push_back(stream_info);
1859 }
1860 /* Covers Opaque ZSL and API1 F/W ZSL */
1861 if (IS_USAGE_ZSL(newStream->usage)
1862 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1863 if (zslStream != NULL) {
1864 LOGE("Multiple input/reprocess streams requested!");
1865 pthread_mutex_unlock(&mMutex);
1866 return BAD_VALUE;
1867 }
1868 zslStream = newStream;
1869 }
1870 /* Covers YUV reprocess */
1871 if (inputStream != NULL) {
1872 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1873 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1874 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1875 && inputStream->width == newStream->width
1876 && inputStream->height == newStream->height) {
1877 if (zslStream != NULL) {
1878 /* This scenario indicates multiple YUV streams with same size
1879 * as input stream have been requested, since zsl stream handle
1880 * is solely use for the purpose of overriding the size of streams
1881 * which share h/w streams we will just make a guess here as to
1882 * which of the stream is a ZSL stream, this will be refactored
1883 * once we make generic logic for streams sharing encoder output
1884 */
1885 LOGH("Warning, Multiple ip/reprocess streams requested!");
1886 }
1887 zslStream = newStream;
1888 }
1889 }
1890 }
1891
1892 /* If a zsl stream is set, we know that we have configured at least one input or
1893 bidirectional stream */
1894 if (NULL != zslStream) {
1895 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1896 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1897 mInputStreamInfo.format = zslStream->format;
1898 mInputStreamInfo.usage = zslStream->usage;
1899 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1900 mInputStreamInfo.dim.width,
1901 mInputStreamInfo.dim.height,
1902 mInputStreamInfo.format, mInputStreamInfo.usage);
1903 }
1904
1905 cleanAndSortStreamInfo();
1906 if (mMetadataChannel) {
1907 delete mMetadataChannel;
1908 mMetadataChannel = NULL;
1909 }
1910 if (mSupportChannel) {
1911 delete mSupportChannel;
1912 mSupportChannel = NULL;
1913 }
1914
1915 if (mAnalysisChannel) {
1916 delete mAnalysisChannel;
1917 mAnalysisChannel = NULL;
1918 }
1919
1920 if (mDummyBatchChannel) {
1921 delete mDummyBatchChannel;
1922 mDummyBatchChannel = NULL;
1923 }
1924
1925 //Create metadata channel and initialize it
1926 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1927 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1928 gCamCapability[mCameraId]->color_arrangement);
1929 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1930 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001931 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 if (mMetadataChannel == NULL) {
1933 LOGE("failed to allocate metadata channel");
1934 rc = -ENOMEM;
1935 pthread_mutex_unlock(&mMutex);
1936 return rc;
1937 }
1938 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1939 if (rc < 0) {
1940 LOGE("metadata channel initialization failed");
1941 delete mMetadataChannel;
1942 mMetadataChannel = NULL;
1943 pthread_mutex_unlock(&mMutex);
1944 return rc;
1945 }
1946
Thierry Strudel3d639192016-09-09 11:52:26 -07001947 bool isRawStreamRequested = false;
1948 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1949 /* Allocate channel objects for the requested streams */
1950 for (size_t i = 0; i < streamList->num_streams; i++) {
1951 camera3_stream_t *newStream = streamList->streams[i];
1952 uint32_t stream_usage = newStream->usage;
1953 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1954 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1955 struct camera_info *p_info = NULL;
1956 pthread_mutex_lock(&gCamLock);
1957 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1958 pthread_mutex_unlock(&gCamLock);
1959 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1960 || IS_USAGE_ZSL(newStream->usage)) &&
1961 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1963 if (bUseCommonFeatureMask) {
1964 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1965 commonFeatureMask;
1966 } else {
1967 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1968 CAM_QCOM_FEATURE_NONE;
1969 }
1970
1971 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1972 LOGH("Input stream configured, reprocess config");
1973 } else {
1974 //for non zsl streams find out the format
1975 switch (newStream->format) {
1976 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1977 {
1978 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1979 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1980 /* add additional features to pp feature mask */
1981 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1982 mStreamConfigInfo.num_streams);
1983
1984 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1985 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1986 CAM_STREAM_TYPE_VIDEO;
1987 if (m_bTnrEnabled && m_bTnrVideo) {
1988 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1989 CAM_QCOM_FEATURE_CPP_TNR;
1990 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1991 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1992 ~CAM_QCOM_FEATURE_CDS;
1993 }
1994 } else {
1995 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1996 CAM_STREAM_TYPE_PREVIEW;
1997 if (m_bTnrEnabled && m_bTnrPreview) {
1998 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1999 CAM_QCOM_FEATURE_CPP_TNR;
2000 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2001 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2002 ~CAM_QCOM_FEATURE_CDS;
2003 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002004 if(!m_bSwTnrPreview) {
2005 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2006 ~CAM_QTI_FEATURE_SW_TNR;
2007 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 padding_info.width_padding = mSurfaceStridePadding;
2009 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002010 previewSize.width = (int32_t)newStream->width;
2011 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 }
2013 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2014 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2015 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2016 newStream->height;
2017 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2018 newStream->width;
2019 }
2020 }
2021 break;
2022 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2023 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2024 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2025 if (bUseCommonFeatureMask)
2026 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2027 commonFeatureMask;
2028 else
2029 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2030 CAM_QCOM_FEATURE_NONE;
2031 } else {
2032 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2033 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 }
2035 break;
2036 case HAL_PIXEL_FORMAT_BLOB:
2037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2038 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2039 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2040 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2041 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2042 } else {
2043 if (bUseCommonFeatureMask &&
2044 isOnEncoder(maxViewfinderSize, newStream->width,
2045 newStream->height)) {
2046 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2047 } else {
2048 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2049 }
2050 }
2051 if (isZsl) {
2052 if (zslStream) {
2053 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2054 (int32_t)zslStream->width;
2055 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2056 (int32_t)zslStream->height;
2057 } else {
2058 LOGE("Error, No ZSL stream identified");
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062 } else if (m_bIs4KVideo) {
2063 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2064 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2065 } else if (bYuv888OverrideJpeg) {
2066 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2067 (int32_t)largeYuv888Size.width;
2068 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2069 (int32_t)largeYuv888Size.height;
2070 }
2071 break;
2072 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2073 case HAL_PIXEL_FORMAT_RAW16:
2074 case HAL_PIXEL_FORMAT_RAW10:
2075 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2076 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2077 isRawStreamRequested = true;
2078 break;
2079 default:
2080 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2081 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2082 break;
2083 }
2084 }
2085
2086 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2087 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2088 gCamCapability[mCameraId]->color_arrangement);
2089
2090 if (newStream->priv == NULL) {
2091 //New stream, construct channel
2092 switch (newStream->stream_type) {
2093 case CAMERA3_STREAM_INPUT:
2094 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2095 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2096 break;
2097 case CAMERA3_STREAM_BIDIRECTIONAL:
2098 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2099 GRALLOC_USAGE_HW_CAMERA_WRITE;
2100 break;
2101 case CAMERA3_STREAM_OUTPUT:
2102 /* For video encoding stream, set read/write rarely
2103 * flag so that they may be set to un-cached */
2104 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2105 newStream->usage |=
2106 (GRALLOC_USAGE_SW_READ_RARELY |
2107 GRALLOC_USAGE_SW_WRITE_RARELY |
2108 GRALLOC_USAGE_HW_CAMERA_WRITE);
2109 else if (IS_USAGE_ZSL(newStream->usage))
2110 {
2111 LOGD("ZSL usage flag skipping");
2112 }
2113 else if (newStream == zslStream
2114 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2115 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2116 } else
2117 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2118 break;
2119 default:
2120 LOGE("Invalid stream_type %d", newStream->stream_type);
2121 break;
2122 }
2123
2124 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2125 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2126 QCamera3ProcessingChannel *channel = NULL;
2127 switch (newStream->format) {
2128 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2129 if ((newStream->usage &
2130 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2131 (streamList->operation_mode ==
2132 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2133 ) {
2134 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2135 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002136 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002137 this,
2138 newStream,
2139 (cam_stream_type_t)
2140 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2141 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2142 mMetadataChannel,
2143 0); //heap buffers are not required for HFR video channel
2144 if (channel == NULL) {
2145 LOGE("allocation of channel failed");
2146 pthread_mutex_unlock(&mMutex);
2147 return -ENOMEM;
2148 }
2149 //channel->getNumBuffers() will return 0 here so use
2150 //MAX_INFLIGH_HFR_REQUESTS
2151 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2152 newStream->priv = channel;
2153 LOGI("num video buffers in HFR mode: %d",
2154 MAX_INFLIGHT_HFR_REQUESTS);
2155 } else {
2156 /* Copy stream contents in HFR preview only case to create
2157 * dummy batch channel so that sensor streaming is in
2158 * HFR mode */
2159 if (!m_bIsVideo && (streamList->operation_mode ==
2160 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2161 mDummyBatchStream = *newStream;
2162 }
2163 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2164 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002165 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002166 this,
2167 newStream,
2168 (cam_stream_type_t)
2169 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2170 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2171 mMetadataChannel,
2172 MAX_INFLIGHT_REQUESTS);
2173 if (channel == NULL) {
2174 LOGE("allocation of channel failed");
2175 pthread_mutex_unlock(&mMutex);
2176 return -ENOMEM;
2177 }
2178 newStream->max_buffers = channel->getNumBuffers();
2179 newStream->priv = channel;
2180 }
2181 break;
2182 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2183 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2184 mChannelHandle,
2185 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002186 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002187 this,
2188 newStream,
2189 (cam_stream_type_t)
2190 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2191 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2192 mMetadataChannel);
2193 if (channel == NULL) {
2194 LOGE("allocation of YUV channel failed");
2195 pthread_mutex_unlock(&mMutex);
2196 return -ENOMEM;
2197 }
2198 newStream->max_buffers = channel->getNumBuffers();
2199 newStream->priv = channel;
2200 break;
2201 }
2202 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2203 case HAL_PIXEL_FORMAT_RAW16:
2204 case HAL_PIXEL_FORMAT_RAW10:
2205 mRawChannel = new QCamera3RawChannel(
2206 mCameraHandle->camera_handle, mChannelHandle,
2207 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002208 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002209 this, newStream,
2210 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2211 mMetadataChannel,
2212 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2213 if (mRawChannel == NULL) {
2214 LOGE("allocation of raw channel failed");
2215 pthread_mutex_unlock(&mMutex);
2216 return -ENOMEM;
2217 }
2218 newStream->max_buffers = mRawChannel->getNumBuffers();
2219 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2220 break;
2221 case HAL_PIXEL_FORMAT_BLOB:
2222 // Max live snapshot inflight buffer is 1. This is to mitigate
2223 // frame drop issues for video snapshot. The more buffers being
2224 // allocated, the more frame drops there are.
2225 mPictureChannel = new QCamera3PicChannel(
2226 mCameraHandle->camera_handle, mChannelHandle,
2227 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002228 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2230 m_bIs4KVideo, isZsl, mMetadataChannel,
2231 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2232 if (mPictureChannel == NULL) {
2233 LOGE("allocation of channel failed");
2234 pthread_mutex_unlock(&mMutex);
2235 return -ENOMEM;
2236 }
2237 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2238 newStream->max_buffers = mPictureChannel->getNumBuffers();
2239 mPictureChannel->overrideYuvSize(
2240 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2241 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2242 break;
2243
2244 default:
2245 LOGE("not a supported format 0x%x", newStream->format);
2246 break;
2247 }
2248 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2249 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2250 } else {
2251 LOGE("Error, Unknown stream type");
2252 pthread_mutex_unlock(&mMutex);
2253 return -EINVAL;
2254 }
2255
2256 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2257 if (channel != NULL && channel->isUBWCEnabled()) {
2258 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2260 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002261 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2262 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2263 }
2264 }
2265
2266 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2267 it != mStreamInfo.end(); it++) {
2268 if ((*it)->stream == newStream) {
2269 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2270 break;
2271 }
2272 }
2273 } else {
2274 // Channel already exists for this stream
2275 // Do nothing for now
2276 }
2277 padding_info = gCamCapability[mCameraId]->padding_info;
2278
2279 /* Do not add entries for input stream in metastream info
2280 * since there is no real stream associated with it
2281 */
2282 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2283 mStreamConfigInfo.num_streams++;
2284 }
2285
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002286 // Create analysis stream all the time, even when h/w support is not available
2287 {
2288 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2289 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2290 gCamCapability[mCameraId]->color_arrangement);
2291 cam_analysis_info_t analysisInfo;
2292 int32_t ret = NO_ERROR;
2293 ret = mCommon.getAnalysisInfo(
2294 FALSE,
2295 analysisFeatureMask,
2296 &analysisInfo);
2297 if (ret == NO_ERROR) {
2298 cam_dimension_t analysisDim;
2299 analysisDim = mCommon.getMatchingDimension(previewSize,
2300 analysisInfo.analysis_recommended_res);
2301
2302 mAnalysisChannel = new QCamera3SupportChannel(
2303 mCameraHandle->camera_handle,
2304 mChannelHandle,
2305 mCameraHandle->ops,
2306 &analysisInfo.analysis_padding_info,
2307 analysisFeatureMask,
2308 CAM_STREAM_TYPE_ANALYSIS,
2309 &analysisDim,
2310 (analysisInfo.analysis_format
2311 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2312 : CAM_FORMAT_YUV_420_NV21),
2313 analysisInfo.hw_analysis_supported,
2314 gCamCapability[mCameraId]->color_arrangement,
2315 this,
2316 0); // force buffer count to 0
2317 } else {
2318 LOGW("getAnalysisInfo failed, ret = %d", ret);
2319 }
2320 if (!mAnalysisChannel) {
2321 LOGW("Analysis channel cannot be created");
2322 }
2323 }
2324
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 //RAW DUMP channel
2326 if (mEnableRawDump && isRawStreamRequested == false){
2327 cam_dimension_t rawDumpSize;
2328 rawDumpSize = getMaxRawSize(mCameraId);
2329 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2330 setPAAFSupport(rawDumpFeatureMask,
2331 CAM_STREAM_TYPE_RAW,
2332 gCamCapability[mCameraId]->color_arrangement);
2333 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2334 mChannelHandle,
2335 mCameraHandle->ops,
2336 rawDumpSize,
2337 &padding_info,
2338 this, rawDumpFeatureMask);
2339 if (!mRawDumpChannel) {
2340 LOGE("Raw Dump channel cannot be created");
2341 pthread_mutex_unlock(&mMutex);
2342 return -ENOMEM;
2343 }
2344 }
2345
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002346 // Initialize HDR+ Raw Source channel.
2347 if (mHdrPlusClient != nullptr) {
2348 if (isRawStreamRequested || mRawDumpChannel) {
2349 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported.",
2350 __FUNCTION__);
2351 mHdrPlusClient->disconnect();
2352 mHdrPlusClient = nullptr;
2353 } else {
2354 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2355 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2356 setPAAFSupport(hdrPlusRawFeatureMask,
2357 CAM_STREAM_TYPE_RAW,
2358 gCamCapability[mCameraId]->color_arrangement);
2359 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2360 mChannelHandle,
2361 mCameraHandle->ops,
2362 rawSize,
2363 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002364 this, hdrPlusRawFeatureMask,
2365 mHdrPlusClient,
2366 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002367 if (!mHdrPlusRawSrcChannel) {
2368 LOGE("HDR+ Raw Source channel cannot be created");
2369 pthread_mutex_unlock(&mMutex);
2370 return -ENOMEM;
2371 }
2372 }
2373 }
2374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375
2376 if (mAnalysisChannel) {
2377 cam_analysis_info_t analysisInfo;
2378 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2379 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2380 CAM_STREAM_TYPE_ANALYSIS;
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2382 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2383 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2384 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2385 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002386 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2388 &analysisInfo);
2389 if (rc != NO_ERROR) {
2390 LOGE("getAnalysisInfo failed, ret = %d", rc);
2391 pthread_mutex_unlock(&mMutex);
2392 return rc;
2393 }
2394 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002395 mCommon.getMatchingDimension(previewSize,
2396 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 mStreamConfigInfo.num_streams++;
2398 }
2399
2400 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2401 cam_analysis_info_t supportInfo;
2402 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2403 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2404 setPAAFSupport(callbackFeatureMask,
2405 CAM_STREAM_TYPE_CALLBACK,
2406 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002407 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002408 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002409 if (ret != NO_ERROR) {
2410 /* Ignore the error for Mono camera
2411 * because the PAAF bit mask is only set
2412 * for CAM_STREAM_TYPE_ANALYSIS stream type
2413 */
2414 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2415 LOGW("getAnalysisInfo failed, ret = %d", ret);
2416 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002417 }
2418 mSupportChannel = new QCamera3SupportChannel(
2419 mCameraHandle->camera_handle,
2420 mChannelHandle,
2421 mCameraHandle->ops,
2422 &gCamCapability[mCameraId]->padding_info,
2423 callbackFeatureMask,
2424 CAM_STREAM_TYPE_CALLBACK,
2425 &QCamera3SupportChannel::kDim,
2426 CAM_FORMAT_YUV_420_NV21,
2427 supportInfo.hw_analysis_supported,
2428 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002429 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 if (!mSupportChannel) {
2431 LOGE("dummy channel cannot be created");
2432 pthread_mutex_unlock(&mMutex);
2433 return -ENOMEM;
2434 }
2435 }
2436
2437 if (mSupportChannel) {
2438 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2439 QCamera3SupportChannel::kDim;
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_CALLBACK;
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2443 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2444 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2445 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2446 gCamCapability[mCameraId]->color_arrangement);
2447 mStreamConfigInfo.num_streams++;
2448 }
2449
2450 if (mRawDumpChannel) {
2451 cam_dimension_t rawSize;
2452 rawSize = getMaxRawSize(mCameraId);
2453 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2454 rawSize;
2455 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2456 CAM_STREAM_TYPE_RAW;
2457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2458 CAM_QCOM_FEATURE_NONE;
2459 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2460 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2461 gCamCapability[mCameraId]->color_arrangement);
2462 mStreamConfigInfo.num_streams++;
2463 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002464
2465 if (mHdrPlusRawSrcChannel) {
2466 cam_dimension_t rawSize;
2467 rawSize = getMaxRawSize(mCameraId);
2468 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2469 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2470 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2471 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2473 gCamCapability[mCameraId]->color_arrangement);
2474 mStreamConfigInfo.num_streams++;
2475 }
2476
Thierry Strudel3d639192016-09-09 11:52:26 -07002477 /* In HFR mode, if video stream is not added, create a dummy channel so that
2478 * ISP can create a batch mode even for preview only case. This channel is
2479 * never 'start'ed (no stream-on), it is only 'initialized' */
2480 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2481 !m_bIsVideo) {
2482 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2483 setPAAFSupport(dummyFeatureMask,
2484 CAM_STREAM_TYPE_VIDEO,
2485 gCamCapability[mCameraId]->color_arrangement);
2486 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle,
2488 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002489 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 this,
2491 &mDummyBatchStream,
2492 CAM_STREAM_TYPE_VIDEO,
2493 dummyFeatureMask,
2494 mMetadataChannel);
2495 if (NULL == mDummyBatchChannel) {
2496 LOGE("creation of mDummyBatchChannel failed."
2497 "Preview will use non-hfr sensor mode ");
2498 }
2499 }
2500 if (mDummyBatchChannel) {
2501 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2502 mDummyBatchStream.width;
2503 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2504 mDummyBatchStream.height;
2505 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2506 CAM_STREAM_TYPE_VIDEO;
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2508 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2509 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2511 gCamCapability[mCameraId]->color_arrangement);
2512 mStreamConfigInfo.num_streams++;
2513 }
2514
2515 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2516 mStreamConfigInfo.buffer_info.max_buffers =
2517 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2518
2519 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2520 for (pendingRequestIterator i = mPendingRequestsList.begin();
2521 i != mPendingRequestsList.end();) {
2522 i = erasePendingRequest(i);
2523 }
2524 mPendingFrameDropList.clear();
2525 // Initialize/Reset the pending buffers list
2526 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2527 req.mPendingBufferList.clear();
2528 }
2529 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2530
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 mCurJpegMeta.clear();
2532 //Get min frame duration for this streams configuration
2533 deriveMinFrameDuration();
2534
2535 // Update state
2536 mState = CONFIGURED;
2537
2538 pthread_mutex_unlock(&mMutex);
2539
2540 return rc;
2541}
2542
2543/*===========================================================================
2544 * FUNCTION : validateCaptureRequest
2545 *
2546 * DESCRIPTION: validate a capture request from camera service
2547 *
2548 * PARAMETERS :
2549 * @request : request from framework to process
2550 *
2551 * RETURN :
2552 *
2553 *==========================================================================*/
2554int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002555 camera3_capture_request_t *request,
2556 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002557{
2558 ssize_t idx = 0;
2559 const camera3_stream_buffer_t *b;
2560 CameraMetadata meta;
2561
2562 /* Sanity check the request */
2563 if (request == NULL) {
2564 LOGE("NULL capture request");
2565 return BAD_VALUE;
2566 }
2567
2568 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2569 /*settings cannot be null for the first request*/
2570 return BAD_VALUE;
2571 }
2572
2573 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002574 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2575 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002576 LOGE("Request %d: No output buffers provided!",
2577 __FUNCTION__, frameNumber);
2578 return BAD_VALUE;
2579 }
2580 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2581 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2582 request->num_output_buffers, MAX_NUM_STREAMS);
2583 return BAD_VALUE;
2584 }
2585 if (request->input_buffer != NULL) {
2586 b = request->input_buffer;
2587 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2588 LOGE("Request %d: Buffer %ld: Status not OK!",
2589 frameNumber, (long)idx);
2590 return BAD_VALUE;
2591 }
2592 if (b->release_fence != -1) {
2593 LOGE("Request %d: Buffer %ld: Has a release fence!",
2594 frameNumber, (long)idx);
2595 return BAD_VALUE;
2596 }
2597 if (b->buffer == NULL) {
2598 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2599 frameNumber, (long)idx);
2600 return BAD_VALUE;
2601 }
2602 }
2603
2604 // Validate all buffers
2605 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002606 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 QCamera3ProcessingChannel *channel =
2608 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2609 if (channel == NULL) {
2610 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2611 frameNumber, (long)idx);
2612 return BAD_VALUE;
2613 }
2614 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2615 LOGE("Request %d: Buffer %ld: Status not OK!",
2616 frameNumber, (long)idx);
2617 return BAD_VALUE;
2618 }
2619 if (b->release_fence != -1) {
2620 LOGE("Request %d: Buffer %ld: Has a release fence!",
2621 frameNumber, (long)idx);
2622 return BAD_VALUE;
2623 }
2624 if (b->buffer == NULL) {
2625 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2626 frameNumber, (long)idx);
2627 return BAD_VALUE;
2628 }
2629 if (*(b->buffer) == NULL) {
2630 LOGE("Request %d: Buffer %ld: NULL private handle!",
2631 frameNumber, (long)idx);
2632 return BAD_VALUE;
2633 }
2634 idx++;
2635 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002637 return NO_ERROR;
2638}
2639
2640/*===========================================================================
2641 * FUNCTION : deriveMinFrameDuration
2642 *
2643 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2644 * on currently configured streams.
2645 *
2646 * PARAMETERS : NONE
2647 *
2648 * RETURN : NONE
2649 *
2650 *==========================================================================*/
2651void QCamera3HardwareInterface::deriveMinFrameDuration()
2652{
2653 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2654
2655 maxJpegDim = 0;
2656 maxProcessedDim = 0;
2657 maxRawDim = 0;
2658
2659 // Figure out maximum jpeg, processed, and raw dimensions
2660 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2661 it != mStreamInfo.end(); it++) {
2662
2663 // Input stream doesn't have valid stream_type
2664 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2665 continue;
2666
2667 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2668 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2669 if (dimension > maxJpegDim)
2670 maxJpegDim = dimension;
2671 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2672 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2673 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2674 if (dimension > maxRawDim)
2675 maxRawDim = dimension;
2676 } else {
2677 if (dimension > maxProcessedDim)
2678 maxProcessedDim = dimension;
2679 }
2680 }
2681
2682 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2683 MAX_SIZES_CNT);
2684
2685 //Assume all jpeg dimensions are in processed dimensions.
2686 if (maxJpegDim > maxProcessedDim)
2687 maxProcessedDim = maxJpegDim;
2688 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2689 if (maxProcessedDim > maxRawDim) {
2690 maxRawDim = INT32_MAX;
2691
2692 for (size_t i = 0; i < count; i++) {
2693 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2694 gCamCapability[mCameraId]->raw_dim[i].height;
2695 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2696 maxRawDim = dimension;
2697 }
2698 }
2699
2700 //Find minimum durations for processed, jpeg, and raw
2701 for (size_t i = 0; i < count; i++) {
2702 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2703 gCamCapability[mCameraId]->raw_dim[i].height) {
2704 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2705 break;
2706 }
2707 }
2708 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2709 for (size_t i = 0; i < count; i++) {
2710 if (maxProcessedDim ==
2711 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2712 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2713 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2714 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2715 break;
2716 }
2717 }
2718}
2719
2720/*===========================================================================
2721 * FUNCTION : getMinFrameDuration
2722 *
2723 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2724 * and current request configuration.
2725 *
2726 * PARAMETERS : @request: requset sent by the frameworks
2727 *
2728 * RETURN : min farme duration for a particular request
2729 *
2730 *==========================================================================*/
2731int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2732{
2733 bool hasJpegStream = false;
2734 bool hasRawStream = false;
2735 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2736 const camera3_stream_t *stream = request->output_buffers[i].stream;
2737 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2738 hasJpegStream = true;
2739 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2740 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2741 stream->format == HAL_PIXEL_FORMAT_RAW16)
2742 hasRawStream = true;
2743 }
2744
2745 if (!hasJpegStream)
2746 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2747 else
2748 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2749}
2750
2751/*===========================================================================
2752 * FUNCTION : handleBuffersDuringFlushLock
2753 *
2754 * DESCRIPTION: Account for buffers returned from back-end during flush
2755 * This function is executed while mMutex is held by the caller.
2756 *
2757 * PARAMETERS :
2758 * @buffer: image buffer for the callback
2759 *
2760 * RETURN :
2761 *==========================================================================*/
2762void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2763{
2764 bool buffer_found = false;
2765 for (List<PendingBuffersInRequest>::iterator req =
2766 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2767 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2768 for (List<PendingBufferInfo>::iterator i =
2769 req->mPendingBufferList.begin();
2770 i != req->mPendingBufferList.end(); i++) {
2771 if (i->buffer == buffer->buffer) {
2772 mPendingBuffersMap.numPendingBufsAtFlush--;
2773 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2774 buffer->buffer, req->frame_number,
2775 mPendingBuffersMap.numPendingBufsAtFlush);
2776 buffer_found = true;
2777 break;
2778 }
2779 }
2780 if (buffer_found) {
2781 break;
2782 }
2783 }
2784 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2785 //signal the flush()
2786 LOGD("All buffers returned to HAL. Continue flush");
2787 pthread_cond_signal(&mBuffersCond);
2788 }
2789}
2790
Thierry Strudel3d639192016-09-09 11:52:26 -07002791/*===========================================================================
2792 * FUNCTION : handleBatchMetadata
2793 *
2794 * DESCRIPTION: Handles metadata buffer callback in batch mode
2795 *
2796 * PARAMETERS : @metadata_buf: metadata buffer
2797 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2798 * the meta buf in this method
2799 *
2800 * RETURN :
2801 *
2802 *==========================================================================*/
2803void QCamera3HardwareInterface::handleBatchMetadata(
2804 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2805{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002806 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002807
2808 if (NULL == metadata_buf) {
2809 LOGE("metadata_buf is NULL");
2810 return;
2811 }
2812 /* In batch mode, the metdata will contain the frame number and timestamp of
2813 * the last frame in the batch. Eg: a batch containing buffers from request
2814 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2815 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2816 * multiple process_capture_results */
2817 metadata_buffer_t *metadata =
2818 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2819 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2820 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2821 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2822 uint32_t frame_number = 0, urgent_frame_number = 0;
2823 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2824 bool invalid_metadata = false;
2825 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2826 size_t loopCount = 1;
2827
2828 int32_t *p_frame_number_valid =
2829 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2830 uint32_t *p_frame_number =
2831 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2832 int64_t *p_capture_time =
2833 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2834 int32_t *p_urgent_frame_number_valid =
2835 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2836 uint32_t *p_urgent_frame_number =
2837 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2838
2839 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2840 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2841 (NULL == p_urgent_frame_number)) {
2842 LOGE("Invalid metadata");
2843 invalid_metadata = true;
2844 } else {
2845 frame_number_valid = *p_frame_number_valid;
2846 last_frame_number = *p_frame_number;
2847 last_frame_capture_time = *p_capture_time;
2848 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2849 last_urgent_frame_number = *p_urgent_frame_number;
2850 }
2851
2852 /* In batchmode, when no video buffers are requested, set_parms are sent
2853 * for every capture_request. The difference between consecutive urgent
2854 * frame numbers and frame numbers should be used to interpolate the
2855 * corresponding frame numbers and time stamps */
2856 pthread_mutex_lock(&mMutex);
2857 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002858 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2859 if(idx < 0) {
2860 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2861 last_urgent_frame_number);
2862 mState = ERROR;
2863 pthread_mutex_unlock(&mMutex);
2864 return;
2865 }
2866 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002867 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2868 first_urgent_frame_number;
2869
2870 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2871 urgent_frame_number_valid,
2872 first_urgent_frame_number, last_urgent_frame_number);
2873 }
2874
2875 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002876 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2877 if(idx < 0) {
2878 LOGE("Invalid frame number received: %d. Irrecoverable error",
2879 last_frame_number);
2880 mState = ERROR;
2881 pthread_mutex_unlock(&mMutex);
2882 return;
2883 }
2884 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002885 frameNumDiff = last_frame_number + 1 -
2886 first_frame_number;
2887 mPendingBatchMap.removeItem(last_frame_number);
2888
2889 LOGD("frm: valid: %d frm_num: %d - %d",
2890 frame_number_valid,
2891 first_frame_number, last_frame_number);
2892
2893 }
2894 pthread_mutex_unlock(&mMutex);
2895
2896 if (urgent_frame_number_valid || frame_number_valid) {
2897 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2898 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2899 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2900 urgentFrameNumDiff, last_urgent_frame_number);
2901 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2902 LOGE("frameNumDiff: %d frameNum: %d",
2903 frameNumDiff, last_frame_number);
2904 }
2905
2906 for (size_t i = 0; i < loopCount; i++) {
2907 /* handleMetadataWithLock is called even for invalid_metadata for
2908 * pipeline depth calculation */
2909 if (!invalid_metadata) {
2910 /* Infer frame number. Batch metadata contains frame number of the
2911 * last frame */
2912 if (urgent_frame_number_valid) {
2913 if (i < urgentFrameNumDiff) {
2914 urgent_frame_number =
2915 first_urgent_frame_number + i;
2916 LOGD("inferred urgent frame_number: %d",
2917 urgent_frame_number);
2918 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2919 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2920 } else {
2921 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2922 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2923 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2924 }
2925 }
2926
2927 /* Infer frame number. Batch metadata contains frame number of the
2928 * last frame */
2929 if (frame_number_valid) {
2930 if (i < frameNumDiff) {
2931 frame_number = first_frame_number + i;
2932 LOGD("inferred frame_number: %d", frame_number);
2933 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2934 CAM_INTF_META_FRAME_NUMBER, frame_number);
2935 } else {
2936 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2937 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2938 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2939 }
2940 }
2941
2942 if (last_frame_capture_time) {
2943 //Infer timestamp
2944 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002945 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002946 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002947 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002948 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2949 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2950 LOGD("batch capture_time: %lld, capture_time: %lld",
2951 last_frame_capture_time, capture_time);
2952 }
2953 }
2954 pthread_mutex_lock(&mMutex);
2955 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002956 false /* free_and_bufdone_meta_buf */,
2957 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 pthread_mutex_unlock(&mMutex);
2959 }
2960
2961 /* BufDone metadata buffer */
2962 if (free_and_bufdone_meta_buf) {
2963 mMetadataChannel->bufDone(metadata_buf);
2964 free(metadata_buf);
2965 }
2966}
2967
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002968void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2969 camera3_error_msg_code_t errorCode)
2970{
2971 camera3_notify_msg_t notify_msg;
2972 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2973 notify_msg.type = CAMERA3_MSG_ERROR;
2974 notify_msg.message.error.error_code = errorCode;
2975 notify_msg.message.error.error_stream = NULL;
2976 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002977 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002978
2979 return;
2980}
Thierry Strudel3d639192016-09-09 11:52:26 -07002981/*===========================================================================
2982 * FUNCTION : handleMetadataWithLock
2983 *
2984 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2985 *
2986 * PARAMETERS : @metadata_buf: metadata buffer
2987 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2988 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002989 * @firstMetadataInBatch: Boolean to indicate whether this is the
2990 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002991 *
2992 * RETURN :
2993 *
2994 *==========================================================================*/
2995void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002996 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2997 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002998{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002999 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003000 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3001 //during flush do not send metadata from this thread
3002 LOGD("not sending metadata during flush or when mState is error");
3003 if (free_and_bufdone_meta_buf) {
3004 mMetadataChannel->bufDone(metadata_buf);
3005 free(metadata_buf);
3006 }
3007 return;
3008 }
3009
3010 //not in flush
3011 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3012 int32_t frame_number_valid, urgent_frame_number_valid;
3013 uint32_t frame_number, urgent_frame_number;
3014 int64_t capture_time;
3015 nsecs_t currentSysTime;
3016
3017 int32_t *p_frame_number_valid =
3018 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3019 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3020 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3021 int32_t *p_urgent_frame_number_valid =
3022 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3023 uint32_t *p_urgent_frame_number =
3024 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3025 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3026 metadata) {
3027 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3028 *p_frame_number_valid, *p_frame_number);
3029 }
3030
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003031 camera_metadata_t *resultMetadata = nullptr;
3032
Thierry Strudel3d639192016-09-09 11:52:26 -07003033 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3034 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3035 LOGE("Invalid metadata");
3036 if (free_and_bufdone_meta_buf) {
3037 mMetadataChannel->bufDone(metadata_buf);
3038 free(metadata_buf);
3039 }
3040 goto done_metadata;
3041 }
3042 frame_number_valid = *p_frame_number_valid;
3043 frame_number = *p_frame_number;
3044 capture_time = *p_capture_time;
3045 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3046 urgent_frame_number = *p_urgent_frame_number;
3047 currentSysTime = systemTime(CLOCK_MONOTONIC);
3048
3049 // Detect if buffers from any requests are overdue
3050 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003051 int64_t timeout;
3052 {
3053 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3054 // If there is a pending HDR+ request, the following requests may be blocked until the
3055 // HDR+ request is done. So allow a longer timeout.
3056 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3057 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3058 }
3059
3060 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003061 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003062 assert(missed.stream->priv);
3063 if (missed.stream->priv) {
3064 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3065 assert(ch->mStreams[0]);
3066 if (ch->mStreams[0]) {
3067 LOGE("Cancel missing frame = %d, buffer = %p,"
3068 "stream type = %d, stream format = %d",
3069 req.frame_number, missed.buffer,
3070 ch->mStreams[0]->getMyType(), missed.stream->format);
3071 ch->timeoutFrame(req.frame_number);
3072 }
3073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003074 }
3075 }
3076 }
3077 //Partial result on process_capture_result for timestamp
3078 if (urgent_frame_number_valid) {
3079 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3080 urgent_frame_number, capture_time);
3081
3082 //Recieved an urgent Frame Number, handle it
3083 //using partial results
3084 for (pendingRequestIterator i =
3085 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3086 LOGD("Iterator Frame = %d urgent frame = %d",
3087 i->frame_number, urgent_frame_number);
3088
3089 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3090 (i->partial_result_cnt == 0)) {
3091 LOGE("Error: HAL missed urgent metadata for frame number %d",
3092 i->frame_number);
3093 }
3094
3095 if (i->frame_number == urgent_frame_number &&
3096 i->bUrgentReceived == 0) {
3097
3098 camera3_capture_result_t result;
3099 memset(&result, 0, sizeof(camera3_capture_result_t));
3100
3101 i->partial_result_cnt++;
3102 i->bUrgentReceived = 1;
3103 // Extract 3A metadata
3104 result.result =
3105 translateCbUrgentMetadataToResultMetadata(metadata);
3106 // Populate metadata result
3107 result.frame_number = urgent_frame_number;
3108 result.num_output_buffers = 0;
3109 result.output_buffers = NULL;
3110 result.partial_result = i->partial_result_cnt;
3111
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003112 if (mHdrPlusClient != nullptr) {
3113 // Notify HDR+ client about the partial metadata.
3114 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3115 result.partial_result == PARTIAL_RESULT_COUNT);
3116 }
3117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003119 LOGD("urgent frame_number = %u, capture_time = %lld",
3120 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003121 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3122 // Instant AEC settled for this frame.
3123 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3124 mInstantAECSettledFrameNumber = urgent_frame_number;
3125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003126 free_camera_metadata((camera_metadata_t *)result.result);
3127 break;
3128 }
3129 }
3130 }
3131
3132 if (!frame_number_valid) {
3133 LOGD("Not a valid normal frame number, used as SOF only");
3134 if (free_and_bufdone_meta_buf) {
3135 mMetadataChannel->bufDone(metadata_buf);
3136 free(metadata_buf);
3137 }
3138 goto done_metadata;
3139 }
3140 LOGH("valid frame_number = %u, capture_time = %lld",
3141 frame_number, capture_time);
3142
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003143 // Check whether any stream buffer corresponding to this is dropped or not
3144 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3145 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3146 for (auto & pendingRequest : mPendingRequestsList) {
3147 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3148 mInstantAECSettledFrameNumber)) {
3149 camera3_notify_msg_t notify_msg = {};
3150 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003151 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003152 QCamera3ProcessingChannel *channel =
3153 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003154 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003155 if (p_cam_frame_drop) {
3156 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003157 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003158 // Got the stream ID for drop frame.
3159 dropFrame = true;
3160 break;
3161 }
3162 }
3163 } else {
3164 // This is instant AEC case.
3165 // For instant AEC drop the stream untill AEC is settled.
3166 dropFrame = true;
3167 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003168
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003169 if (dropFrame) {
3170 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3171 if (p_cam_frame_drop) {
3172 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003173 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003174 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003175 } else {
3176 // For instant AEC, inform frame drop and frame number
3177 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3178 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003179 pendingRequest.frame_number, streamID,
3180 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003181 }
3182 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003183 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003184 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003185 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003186 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003187 if (p_cam_frame_drop) {
3188 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003189 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003190 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003191 } else {
3192 // For instant AEC, inform frame drop and frame number
3193 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3194 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003195 pendingRequest.frame_number, streamID,
3196 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003197 }
3198 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003199 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003200 PendingFrameDrop.stream_ID = streamID;
3201 // Add the Frame drop info to mPendingFrameDropList
3202 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003204 }
3205 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003206 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003207
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003208 for (auto & pendingRequest : mPendingRequestsList) {
3209 // Find the pending request with the frame number.
3210 if (pendingRequest.frame_number == frame_number) {
3211 // Update the sensor timestamp.
3212 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003213
Thierry Strudel3d639192016-09-09 11:52:26 -07003214
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003215 /* Set the timestamp in display metadata so that clients aware of
3216 private_handle such as VT can use this un-modified timestamps.
3217 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003218 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003219
Thierry Strudel3d639192016-09-09 11:52:26 -07003220 // Find channel requiring metadata, meaning internal offline postprocess
3221 // is needed.
3222 //TODO: for now, we don't support two streams requiring metadata at the same time.
3223 // (because we are not making copies, and metadata buffer is not reference counted.
3224 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003225 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3226 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003227 if (iter->need_metadata) {
3228 internalPproc = true;
3229 QCamera3ProcessingChannel *channel =
3230 (QCamera3ProcessingChannel *)iter->stream->priv;
3231 channel->queueReprocMetadata(metadata_buf);
3232 break;
3233 }
3234 }
3235
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003236 for (auto itr = pendingRequest.internalRequestList.begin();
3237 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003238 if (itr->need_metadata) {
3239 internalPproc = true;
3240 QCamera3ProcessingChannel *channel =
3241 (QCamera3ProcessingChannel *)itr->stream->priv;
3242 channel->queueReprocMetadata(metadata_buf);
3243 break;
3244 }
3245 }
3246
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003247 resultMetadata = translateFromHalMetadata(metadata,
3248 pendingRequest.timestamp, pendingRequest.request_id,
3249 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3250 pendingRequest.capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08003251 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003252 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003253 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003254 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003255 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003256
3257 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003258 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003259
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003260 if (pendingRequest.blob_request) {
3261 //Dump tuning metadata if enabled and available
3262 char prop[PROPERTY_VALUE_MAX];
3263 memset(prop, 0, sizeof(prop));
3264 property_get("persist.camera.dumpmetadata", prop, "0");
3265 int32_t enabled = atoi(prop);
3266 if (enabled && metadata->is_tuning_params_valid) {
3267 dumpMetadataToFile(metadata->tuning_params,
3268 mMetaFrameCount,
3269 enabled,
3270 "Snapshot",
3271 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003272 }
3273 }
3274
3275 if (!internalPproc) {
3276 LOGD("couldn't find need_metadata for this metadata");
3277 // Return metadata buffer
3278 if (free_and_bufdone_meta_buf) {
3279 mMetadataChannel->bufDone(metadata_buf);
3280 free(metadata_buf);
3281 }
3282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003283
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003284 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003285 }
3286 }
3287
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003288 // Try to send out shutter callbacks and capture results.
3289 handlePendingResultsWithLock(frame_number, resultMetadata);
3290 return;
3291
Thierry Strudel3d639192016-09-09 11:52:26 -07003292done_metadata:
3293 for (pendingRequestIterator i = mPendingRequestsList.begin();
3294 i != mPendingRequestsList.end() ;i++) {
3295 i->pipeline_depth++;
3296 }
3297 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3298 unblockRequestIfNecessary();
3299}
3300
3301/*===========================================================================
3302 * FUNCTION : hdrPlusPerfLock
3303 *
3304 * DESCRIPTION: perf lock for HDR+ using custom intent
3305 *
3306 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3307 *
3308 * RETURN : None
3309 *
3310 *==========================================================================*/
3311void QCamera3HardwareInterface::hdrPlusPerfLock(
3312 mm_camera_super_buf_t *metadata_buf)
3313{
3314 if (NULL == metadata_buf) {
3315 LOGE("metadata_buf is NULL");
3316 return;
3317 }
3318 metadata_buffer_t *metadata =
3319 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3320 int32_t *p_frame_number_valid =
3321 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3322 uint32_t *p_frame_number =
3323 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3324
3325 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3326 LOGE("%s: Invalid metadata", __func__);
3327 return;
3328 }
3329
3330 //acquire perf lock for 5 sec after the last HDR frame is captured
3331 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3332 if ((p_frame_number != NULL) &&
3333 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003334 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003335 }
3336 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003337}
3338
3339/*===========================================================================
3340 * FUNCTION : handleInputBufferWithLock
3341 *
3342 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3343 *
3344 * PARAMETERS : @frame_number: frame number of the input buffer
3345 *
3346 * RETURN :
3347 *
3348 *==========================================================================*/
3349void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 pendingRequestIterator i = mPendingRequestsList.begin();
3353 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3354 i++;
3355 }
3356 if (i != mPendingRequestsList.end() && i->input_buffer) {
3357 //found the right request
3358 if (!i->shutter_notified) {
3359 CameraMetadata settings;
3360 camera3_notify_msg_t notify_msg;
3361 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3362 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3363 if(i->settings) {
3364 settings = i->settings;
3365 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3366 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3367 } else {
3368 LOGE("No timestamp in input settings! Using current one.");
3369 }
3370 } else {
3371 LOGE("Input settings missing!");
3372 }
3373
3374 notify_msg.type = CAMERA3_MSG_SHUTTER;
3375 notify_msg.message.shutter.frame_number = frame_number;
3376 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003377 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 i->shutter_notified = true;
3379 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3380 i->frame_number, notify_msg.message.shutter.timestamp);
3381 }
3382
3383 if (i->input_buffer->release_fence != -1) {
3384 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3385 close(i->input_buffer->release_fence);
3386 if (rc != OK) {
3387 LOGE("input buffer sync wait failed %d", rc);
3388 }
3389 }
3390
3391 camera3_capture_result result;
3392 memset(&result, 0, sizeof(camera3_capture_result));
3393 result.frame_number = frame_number;
3394 result.result = i->settings;
3395 result.input_buffer = i->input_buffer;
3396 result.partial_result = PARTIAL_RESULT_COUNT;
3397
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003398 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 LOGD("Input request metadata and input buffer frame_number = %u",
3400 i->frame_number);
3401 i = erasePendingRequest(i);
3402 } else {
3403 LOGE("Could not find input request for frame number %d", frame_number);
3404 }
3405}
3406
3407/*===========================================================================
3408 * FUNCTION : handleBufferWithLock
3409 *
3410 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3411 *
3412 * PARAMETERS : @buffer: image buffer for the callback
3413 * @frame_number: frame number of the image buffer
3414 *
3415 * RETURN :
3416 *
3417 *==========================================================================*/
3418void QCamera3HardwareInterface::handleBufferWithLock(
3419 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3420{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003421 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003422
3423 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3424 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3425 }
3426
Thierry Strudel3d639192016-09-09 11:52:26 -07003427 /* Nothing to be done during error state */
3428 if ((ERROR == mState) || (DEINIT == mState)) {
3429 return;
3430 }
3431 if (mFlushPerf) {
3432 handleBuffersDuringFlushLock(buffer);
3433 return;
3434 }
3435 //not in flush
3436 // If the frame number doesn't exist in the pending request list,
3437 // directly send the buffer to the frameworks, and update pending buffers map
3438 // Otherwise, book-keep the buffer.
3439 pendingRequestIterator i = mPendingRequestsList.begin();
3440 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3441 i++;
3442 }
3443 if (i == mPendingRequestsList.end()) {
3444 // Verify all pending requests frame_numbers are greater
3445 for (pendingRequestIterator j = mPendingRequestsList.begin();
3446 j != mPendingRequestsList.end(); j++) {
3447 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3448 LOGW("Error: pending live frame number %d is smaller than %d",
3449 j->frame_number, frame_number);
3450 }
3451 }
3452 camera3_capture_result_t result;
3453 memset(&result, 0, sizeof(camera3_capture_result_t));
3454 result.result = NULL;
3455 result.frame_number = frame_number;
3456 result.num_output_buffers = 1;
3457 result.partial_result = 0;
3458 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3459 m != mPendingFrameDropList.end(); m++) {
3460 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3461 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3462 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3463 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3464 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3465 frame_number, streamID);
3466 m = mPendingFrameDropList.erase(m);
3467 break;
3468 }
3469 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003470 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003471 result.output_buffers = buffer;
3472 LOGH("result frame_number = %d, buffer = %p",
3473 frame_number, buffer->buffer);
3474
3475 mPendingBuffersMap.removeBuf(buffer->buffer);
3476
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003477 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 } else {
3479 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 if (i->input_buffer->release_fence != -1) {
3481 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3482 close(i->input_buffer->release_fence);
3483 if (rc != OK) {
3484 LOGE("input buffer sync wait failed %d", rc);
3485 }
3486 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003488
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003489 // Put buffer into the pending request
3490 for (auto &requestedBuffer : i->buffers) {
3491 if (requestedBuffer.stream == buffer->stream) {
3492 if (requestedBuffer.buffer != nullptr) {
3493 LOGE("Error: buffer is already set");
3494 } else {
3495 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3496 sizeof(camera3_stream_buffer_t));
3497 *(requestedBuffer.buffer) = *buffer;
3498 LOGH("cache buffer %p at result frame_number %u",
3499 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 }
3501 }
3502 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003503
3504 if (i->input_buffer) {
3505 // For a reprocessing request, try to send out shutter callback and result metadata.
3506 handlePendingResultsWithLock(frame_number, nullptr);
3507 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003508 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003509
3510 if (mPreviewStarted == false) {
3511 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3512 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3513 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3514 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3515 mPreviewStarted = true;
3516
3517 // Set power hint for preview
3518 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3519 }
3520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003521}
3522
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003523void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3524 const camera_metadata_t *resultMetadata)
3525{
3526 // Find the pending request for this result metadata.
3527 auto requestIter = mPendingRequestsList.begin();
3528 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3529 requestIter++;
3530 }
3531
3532 if (requestIter == mPendingRequestsList.end()) {
3533 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3534 return;
3535 }
3536
3537 // Update the result metadata
3538 requestIter->resultMetadata = resultMetadata;
3539
3540 // Check what type of request this is.
3541 bool liveRequest = false;
3542 if (requestIter->hdrplus) {
3543 // HDR+ request doesn't have partial results.
3544 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3545 } else if (requestIter->input_buffer != nullptr) {
3546 // Reprocessing request result is the same as settings.
3547 requestIter->resultMetadata = requestIter->settings;
3548 // Reprocessing request doesn't have partial results.
3549 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3550 } else {
3551 liveRequest = true;
3552 requestIter->partial_result_cnt++;
3553 mPendingLiveRequest--;
3554
3555 // For a live request, send the metadata to HDR+ client.
3556 if (mHdrPlusClient != nullptr) {
3557 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3558 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3559 }
3560 }
3561
3562 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3563 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3564 bool readyToSend = true;
3565
3566 // Iterate through the pending requests to send out shutter callbacks and results that are
3567 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3568 // live requests that don't have result metadata yet.
3569 auto iter = mPendingRequestsList.begin();
3570 while (iter != mPendingRequestsList.end()) {
3571 // Check if current pending request is ready. If it's not ready, the following pending
3572 // requests are also not ready.
3573 if (readyToSend && iter->resultMetadata == nullptr) {
3574 readyToSend = false;
3575 }
3576
3577 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3578
3579 std::vector<camera3_stream_buffer_t> outputBuffers;
3580
3581 camera3_capture_result_t result = {};
3582 result.frame_number = iter->frame_number;
3583 result.result = iter->resultMetadata;
3584 result.partial_result = iter->partial_result_cnt;
3585
3586 // If this pending buffer has result metadata, we may be able to send out shutter callback
3587 // and result metadata.
3588 if (iter->resultMetadata != nullptr) {
3589 if (!readyToSend) {
3590 // If any of the previous pending request is not ready, this pending request is
3591 // also not ready to send in order to keep shutter callbacks and result metadata
3592 // in order.
3593 iter++;
3594 continue;
3595 }
3596
3597 // Invoke shutter callback if not yet.
3598 if (!iter->shutter_notified) {
3599 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3600
3601 // Find the timestamp in HDR+ result metadata
3602 camera_metadata_ro_entry_t entry;
3603 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3604 ANDROID_SENSOR_TIMESTAMP, &entry);
3605 if (res != OK) {
3606 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3607 __FUNCTION__, iter->frame_number, strerror(-res), res);
3608 } else {
3609 timestamp = entry.data.i64[0];
3610 }
3611
3612 camera3_notify_msg_t notify_msg = {};
3613 notify_msg.type = CAMERA3_MSG_SHUTTER;
3614 notify_msg.message.shutter.frame_number = iter->frame_number;
3615 notify_msg.message.shutter.timestamp = timestamp;
3616 orchestrateNotify(&notify_msg);
3617 iter->shutter_notified = true;
3618 }
3619
3620 result.input_buffer = iter->input_buffer;
3621
3622 // Prepare output buffer array
3623 for (auto bufferInfoIter = iter->buffers.begin();
3624 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3625 if (bufferInfoIter->buffer != nullptr) {
3626
3627 QCamera3Channel *channel =
3628 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3629 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3630
3631 // Check if this buffer is a dropped frame.
3632 auto frameDropIter = mPendingFrameDropList.begin();
3633 while (frameDropIter != mPendingFrameDropList.end()) {
3634 if((frameDropIter->stream_ID == streamID) &&
3635 (frameDropIter->frame_number == frameNumber)) {
3636 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3637 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3638 streamID);
3639 mPendingFrameDropList.erase(frameDropIter);
3640 break;
3641 } else {
3642 frameDropIter++;
3643 }
3644 }
3645
3646 // Check buffer error status
3647 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3648 bufferInfoIter->buffer->buffer);
3649 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3650
3651 outputBuffers.push_back(*(bufferInfoIter->buffer));
3652 free(bufferInfoIter->buffer);
3653 bufferInfoIter->buffer = NULL;
3654 }
3655 }
3656
3657 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3658 result.num_output_buffers = outputBuffers.size();
3659 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3660 // If the result metadata belongs to a live request, notify errors for previous pending
3661 // live requests.
3662 mPendingLiveRequest--;
3663
3664 CameraMetadata dummyMetadata;
3665 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3666 result.result = dummyMetadata.release();
3667
3668 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3669 } else {
3670 iter++;
3671 continue;
3672 }
3673
3674 orchestrateResult(&result);
3675
3676 // For reprocessing, result metadata is the same as settings so do not free it here to
3677 // avoid double free.
3678 if (result.result != iter->settings) {
3679 free_camera_metadata((camera_metadata_t *)result.result);
3680 }
3681 iter->resultMetadata = nullptr;
3682 iter = erasePendingRequest(iter);
3683 }
3684
3685 if (liveRequest) {
3686 for (auto &iter : mPendingRequestsList) {
3687 // Increment pipeline depth for the following pending requests.
3688 if (iter.frame_number > frameNumber) {
3689 iter.pipeline_depth++;
3690 }
3691 }
3692 }
3693
3694 unblockRequestIfNecessary();
3695}
3696
Thierry Strudel3d639192016-09-09 11:52:26 -07003697/*===========================================================================
3698 * FUNCTION : unblockRequestIfNecessary
3699 *
3700 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3701 * that mMutex is held when this function is called.
3702 *
3703 * PARAMETERS :
3704 *
3705 * RETURN :
3706 *
3707 *==========================================================================*/
3708void QCamera3HardwareInterface::unblockRequestIfNecessary()
3709{
3710 // Unblock process_capture_request
3711 pthread_cond_signal(&mRequestCond);
3712}
3713
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003714/*===========================================================================
3715 * FUNCTION : isHdrSnapshotRequest
3716 *
3717 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3718 *
3719 * PARAMETERS : camera3 request structure
3720 *
3721 * RETURN : boolean decision variable
3722 *
3723 *==========================================================================*/
3724bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3725{
3726 if (request == NULL) {
3727 LOGE("Invalid request handle");
3728 assert(0);
3729 return false;
3730 }
3731
3732 if (!mForceHdrSnapshot) {
3733 CameraMetadata frame_settings;
3734 frame_settings = request->settings;
3735
3736 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3737 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3738 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3739 return false;
3740 }
3741 } else {
3742 return false;
3743 }
3744
3745 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3746 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3747 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3748 return false;
3749 }
3750 } else {
3751 return false;
3752 }
3753 }
3754
3755 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3756 if (request->output_buffers[i].stream->format
3757 == HAL_PIXEL_FORMAT_BLOB) {
3758 return true;
3759 }
3760 }
3761
3762 return false;
3763}
3764/*===========================================================================
3765 * FUNCTION : orchestrateRequest
3766 *
3767 * DESCRIPTION: Orchestrates a capture request from camera service
3768 *
3769 * PARAMETERS :
3770 * @request : request from framework to process
3771 *
3772 * RETURN : Error status codes
3773 *
3774 *==========================================================================*/
3775int32_t QCamera3HardwareInterface::orchestrateRequest(
3776 camera3_capture_request_t *request)
3777{
3778
3779 uint32_t originalFrameNumber = request->frame_number;
3780 uint32_t originalOutputCount = request->num_output_buffers;
3781 const camera_metadata_t *original_settings = request->settings;
3782 List<InternalRequest> internallyRequestedStreams;
3783 List<InternalRequest> emptyInternalList;
3784
3785 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3786 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3787 uint32_t internalFrameNumber;
3788 CameraMetadata modified_meta;
3789
3790
3791 /* Add Blob channel to list of internally requested streams */
3792 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3793 if (request->output_buffers[i].stream->format
3794 == HAL_PIXEL_FORMAT_BLOB) {
3795 InternalRequest streamRequested;
3796 streamRequested.meteringOnly = 1;
3797 streamRequested.need_metadata = 0;
3798 streamRequested.stream = request->output_buffers[i].stream;
3799 internallyRequestedStreams.push_back(streamRequested);
3800 }
3801 }
3802 request->num_output_buffers = 0;
3803 auto itr = internallyRequestedStreams.begin();
3804
3805 /* Modify setting to set compensation */
3806 modified_meta = request->settings;
3807 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3808 uint8_t aeLock = 1;
3809 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3810 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3811 camera_metadata_t *modified_settings = modified_meta.release();
3812 request->settings = modified_settings;
3813
3814 /* Capture Settling & -2x frame */
3815 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3816 request->frame_number = internalFrameNumber;
3817 processCaptureRequest(request, internallyRequestedStreams);
3818
3819 request->num_output_buffers = originalOutputCount;
3820 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3821 request->frame_number = internalFrameNumber;
3822 processCaptureRequest(request, emptyInternalList);
3823 request->num_output_buffers = 0;
3824
3825 modified_meta = modified_settings;
3826 expCompensation = 0;
3827 aeLock = 1;
3828 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3829 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3830 modified_settings = modified_meta.release();
3831 request->settings = modified_settings;
3832
3833 /* Capture Settling & 0X frame */
3834
3835 itr = internallyRequestedStreams.begin();
3836 if (itr == internallyRequestedStreams.end()) {
3837 LOGE("Error Internally Requested Stream list is empty");
3838 assert(0);
3839 } else {
3840 itr->need_metadata = 0;
3841 itr->meteringOnly = 1;
3842 }
3843
3844 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3845 request->frame_number = internalFrameNumber;
3846 processCaptureRequest(request, internallyRequestedStreams);
3847
3848 itr = internallyRequestedStreams.begin();
3849 if (itr == internallyRequestedStreams.end()) {
3850 ALOGE("Error Internally Requested Stream list is empty");
3851 assert(0);
3852 } else {
3853 itr->need_metadata = 1;
3854 itr->meteringOnly = 0;
3855 }
3856
3857 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3858 request->frame_number = internalFrameNumber;
3859 processCaptureRequest(request, internallyRequestedStreams);
3860
3861 /* Capture 2X frame*/
3862 modified_meta = modified_settings;
3863 expCompensation = GB_HDR_2X_STEP_EV;
3864 aeLock = 1;
3865 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3866 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3867 modified_settings = modified_meta.release();
3868 request->settings = modified_settings;
3869
3870 itr = internallyRequestedStreams.begin();
3871 if (itr == internallyRequestedStreams.end()) {
3872 ALOGE("Error Internally Requested Stream list is empty");
3873 assert(0);
3874 } else {
3875 itr->need_metadata = 0;
3876 itr->meteringOnly = 1;
3877 }
3878 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3879 request->frame_number = internalFrameNumber;
3880 processCaptureRequest(request, internallyRequestedStreams);
3881
3882 itr = internallyRequestedStreams.begin();
3883 if (itr == internallyRequestedStreams.end()) {
3884 ALOGE("Error Internally Requested Stream list is empty");
3885 assert(0);
3886 } else {
3887 itr->need_metadata = 1;
3888 itr->meteringOnly = 0;
3889 }
3890
3891 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3892 request->frame_number = internalFrameNumber;
3893 processCaptureRequest(request, internallyRequestedStreams);
3894
3895
3896 /* Capture 2X on original streaming config*/
3897 internallyRequestedStreams.clear();
3898
3899 /* Restore original settings pointer */
3900 request->settings = original_settings;
3901 } else {
3902 uint32_t internalFrameNumber;
3903 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3904 request->frame_number = internalFrameNumber;
3905 return processCaptureRequest(request, internallyRequestedStreams);
3906 }
3907
3908 return NO_ERROR;
3909}
3910
3911/*===========================================================================
3912 * FUNCTION : orchestrateResult
3913 *
3914 * DESCRIPTION: Orchestrates a capture result to camera service
3915 *
3916 * PARAMETERS :
3917 * @request : request from framework to process
3918 *
3919 * RETURN :
3920 *
3921 *==========================================================================*/
3922void QCamera3HardwareInterface::orchestrateResult(
3923 camera3_capture_result_t *result)
3924{
3925 uint32_t frameworkFrameNumber;
3926 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3927 frameworkFrameNumber);
3928 if (rc != NO_ERROR) {
3929 LOGE("Cannot find translated frameworkFrameNumber");
3930 assert(0);
3931 } else {
3932 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3933 LOGD("CAM_DEBUG Internal Request drop the result");
3934 } else {
3935 result->frame_number = frameworkFrameNumber;
3936 mCallbackOps->process_capture_result(mCallbackOps, result);
3937 }
3938 }
3939}
3940
3941/*===========================================================================
3942 * FUNCTION : orchestrateNotify
3943 *
3944 * DESCRIPTION: Orchestrates a notify to camera service
3945 *
3946 * PARAMETERS :
3947 * @request : request from framework to process
3948 *
3949 * RETURN :
3950 *
3951 *==========================================================================*/
3952void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3953{
3954 uint32_t frameworkFrameNumber;
3955 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3956 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3957 frameworkFrameNumber);
3958 if (rc != NO_ERROR) {
3959 LOGE("Cannot find translated frameworkFrameNumber");
3960 assert(0);
3961 } else {
3962 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3963 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3964 } else {
3965 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3966 mCallbackOps->notify(mCallbackOps, notify_msg);
3967 }
3968 }
3969}
3970
3971/*===========================================================================
3972 * FUNCTION : FrameNumberRegistry
3973 *
3974 * DESCRIPTION: Constructor
3975 *
3976 * PARAMETERS :
3977 *
3978 * RETURN :
3979 *
3980 *==========================================================================*/
3981FrameNumberRegistry::FrameNumberRegistry()
3982{
3983 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3984}
3985
3986/*===========================================================================
3987 * FUNCTION : ~FrameNumberRegistry
3988 *
3989 * DESCRIPTION: Destructor
3990 *
3991 * PARAMETERS :
3992 *
3993 * RETURN :
3994 *
3995 *==========================================================================*/
3996FrameNumberRegistry::~FrameNumberRegistry()
3997{
3998}
3999
4000/*===========================================================================
4001 * FUNCTION : PurgeOldEntriesLocked
4002 *
4003 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4004 *
4005 * PARAMETERS :
4006 *
4007 * RETURN : NONE
4008 *
4009 *==========================================================================*/
4010void FrameNumberRegistry::purgeOldEntriesLocked()
4011{
4012 while (_register.begin() != _register.end()) {
4013 auto itr = _register.begin();
4014 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4015 _register.erase(itr);
4016 } else {
4017 return;
4018 }
4019 }
4020}
4021
4022/*===========================================================================
4023 * FUNCTION : allocStoreInternalFrameNumber
4024 *
4025 * DESCRIPTION: Method to note down a framework request and associate a new
4026 * internal request number against it
4027 *
4028 * PARAMETERS :
4029 * @fFrameNumber: Identifier given by framework
4030 * @internalFN : Output parameter which will have the newly generated internal
4031 * entry
4032 *
4033 * RETURN : Error code
4034 *
4035 *==========================================================================*/
4036int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4037 uint32_t &internalFrameNumber)
4038{
4039 Mutex::Autolock lock(mRegistryLock);
4040 internalFrameNumber = _nextFreeInternalNumber++;
4041 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4042 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4043 purgeOldEntriesLocked();
4044 return NO_ERROR;
4045}
4046
4047/*===========================================================================
4048 * FUNCTION : generateStoreInternalFrameNumber
4049 *
4050 * DESCRIPTION: Method to associate a new internal request number independent
4051 * of any associate with framework requests
4052 *
4053 * PARAMETERS :
4054 * @internalFrame#: Output parameter which will have the newly generated internal
4055 *
4056 *
4057 * RETURN : Error code
4058 *
4059 *==========================================================================*/
4060int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4061{
4062 Mutex::Autolock lock(mRegistryLock);
4063 internalFrameNumber = _nextFreeInternalNumber++;
4064 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4065 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4066 purgeOldEntriesLocked();
4067 return NO_ERROR;
4068}
4069
4070/*===========================================================================
4071 * FUNCTION : getFrameworkFrameNumber
4072 *
4073 * DESCRIPTION: Method to query the framework framenumber given an internal #
4074 *
4075 * PARAMETERS :
4076 * @internalFrame#: Internal reference
4077 * @frameworkframenumber: Output parameter holding framework frame entry
4078 *
4079 * RETURN : Error code
4080 *
4081 *==========================================================================*/
4082int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4083 uint32_t &frameworkFrameNumber)
4084{
4085 Mutex::Autolock lock(mRegistryLock);
4086 auto itr = _register.find(internalFrameNumber);
4087 if (itr == _register.end()) {
4088 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
4089 return -ENOENT;
4090 }
4091
4092 frameworkFrameNumber = itr->second;
4093 purgeOldEntriesLocked();
4094 return NO_ERROR;
4095}
Thierry Strudel3d639192016-09-09 11:52:26 -07004096
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004097status_t QCamera3HardwareInterface::fillPbStreamConfig(
4098 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4099 QCamera3Channel *channel, uint32_t streamIndex) {
4100 if (config == nullptr) {
4101 LOGE("%s: config is null", __FUNCTION__);
4102 return BAD_VALUE;
4103 }
4104
4105 if (channel == nullptr) {
4106 LOGE("%s: channel is null", __FUNCTION__);
4107 return BAD_VALUE;
4108 }
4109
4110 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4111 if (stream == nullptr) {
4112 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4113 return NAME_NOT_FOUND;
4114 }
4115
4116 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4117 if (streamInfo == nullptr) {
4118 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4119 return NAME_NOT_FOUND;
4120 }
4121
4122 config->id = pbStreamId;
4123 config->image.width = streamInfo->dim.width;
4124 config->image.height = streamInfo->dim.height;
4125 config->image.padding = 0;
4126 config->image.format = pbStreamFormat;
4127
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004128 uint32_t totalPlaneSize = 0;
4129
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004130 // Fill plane information.
4131 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4132 pbcamera::PlaneConfiguration plane;
4133 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4134 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4135 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004136
4137 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004138 }
4139
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004140 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004141 return OK;
4142}
4143
Thierry Strudel3d639192016-09-09 11:52:26 -07004144/*===========================================================================
4145 * FUNCTION : processCaptureRequest
4146 *
4147 * DESCRIPTION: process a capture request from camera service
4148 *
4149 * PARAMETERS :
4150 * @request : request from framework to process
4151 *
4152 * RETURN :
4153 *
4154 *==========================================================================*/
4155int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004156 camera3_capture_request_t *request,
4157 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004158{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004159 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004160 int rc = NO_ERROR;
4161 int32_t request_id;
4162 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004163 bool isVidBufRequested = false;
4164 camera3_stream_buffer_t *pInputBuffer = NULL;
4165
4166 pthread_mutex_lock(&mMutex);
4167
4168 // Validate current state
4169 switch (mState) {
4170 case CONFIGURED:
4171 case STARTED:
4172 /* valid state */
4173 break;
4174
4175 case ERROR:
4176 pthread_mutex_unlock(&mMutex);
4177 handleCameraDeviceError();
4178 return -ENODEV;
4179
4180 default:
4181 LOGE("Invalid state %d", mState);
4182 pthread_mutex_unlock(&mMutex);
4183 return -ENODEV;
4184 }
4185
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004186 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004187 if (rc != NO_ERROR) {
4188 LOGE("incoming request is not valid");
4189 pthread_mutex_unlock(&mMutex);
4190 return rc;
4191 }
4192
4193 meta = request->settings;
4194
4195 // For first capture request, send capture intent, and
4196 // stream on all streams
4197 if (mState == CONFIGURED) {
4198 // send an unconfigure to the backend so that the isp
4199 // resources are deallocated
4200 if (!mFirstConfiguration) {
4201 cam_stream_size_info_t stream_config_info;
4202 int32_t hal_version = CAM_HAL_V3;
4203 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4204 stream_config_info.buffer_info.min_buffers =
4205 MIN_INFLIGHT_REQUESTS;
4206 stream_config_info.buffer_info.max_buffers =
4207 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4208 clear_metadata_buffer(mParameters);
4209 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4210 CAM_INTF_PARM_HAL_VERSION, hal_version);
4211 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4212 CAM_INTF_META_STREAM_INFO, stream_config_info);
4213 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4214 mParameters);
4215 if (rc < 0) {
4216 LOGE("set_parms for unconfigure failed");
4217 pthread_mutex_unlock(&mMutex);
4218 return rc;
4219 }
4220 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004221 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004222 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004223 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004224 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225 property_get("persist.camera.is_type", is_type_value, "4");
4226 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4227 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4228 property_get("persist.camera.is_type_preview", is_type_value, "4");
4229 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4230 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004231
4232 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4233 int32_t hal_version = CAM_HAL_V3;
4234 uint8_t captureIntent =
4235 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4236 mCaptureIntent = captureIntent;
4237 clear_metadata_buffer(mParameters);
4238 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4239 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4240 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004241 if (mFirstConfiguration) {
4242 // configure instant AEC
4243 // Instant AEC is a session based parameter and it is needed only
4244 // once per complete session after open camera.
4245 // i.e. This is set only once for the first capture request, after open camera.
4246 setInstantAEC(meta);
4247 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004248 uint8_t fwkVideoStabMode=0;
4249 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4250 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4251 }
4252
4253 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4254 // turn it on for video/preview
4255 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4256 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004257 int32_t vsMode;
4258 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4260 rc = BAD_VALUE;
4261 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004262 LOGD("setEis %d", setEis);
4263 bool eis3Supported = false;
4264 size_t count = IS_TYPE_MAX;
4265 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4266 for (size_t i = 0; i < count; i++) {
4267 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4268 eis3Supported = true;
4269 break;
4270 }
4271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004272
4273 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004274 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004275 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4276 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004277 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4278 is_type = isTypePreview;
4279 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4280 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4281 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004282 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004283 } else {
4284 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004285 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004286 } else {
4287 is_type = IS_TYPE_NONE;
4288 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004289 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004290 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004291 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4292 }
4293 }
4294
4295 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4296 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4297
4298 int32_t tintless_value = 1;
4299 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4300 CAM_INTF_PARM_TINTLESS, tintless_value);
4301 //Disable CDS for HFR mode or if DIS/EIS is on.
4302 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4303 //after every configure_stream
4304 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4305 (m_bIsVideo)) {
4306 int32_t cds = CAM_CDS_MODE_OFF;
4307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4308 CAM_INTF_PARM_CDS_MODE, cds))
4309 LOGE("Failed to disable CDS for HFR mode");
4310
4311 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004312
4313 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4314 uint8_t* use_av_timer = NULL;
4315
4316 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004317 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004318 use_av_timer = &m_debug_avtimer;
4319 }
4320 else{
4321 use_av_timer =
4322 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004323 if (use_av_timer) {
4324 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4325 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004326 }
4327
4328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4329 rc = BAD_VALUE;
4330 }
4331 }
4332
Thierry Strudel3d639192016-09-09 11:52:26 -07004333 setMobicat();
4334
4335 /* Set fps and hfr mode while sending meta stream info so that sensor
4336 * can configure appropriate streaming mode */
4337 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004338 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4339 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004340 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4341 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004342 if (rc == NO_ERROR) {
4343 int32_t max_fps =
4344 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004345 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004346 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4347 }
4348 /* For HFR, more buffers are dequeued upfront to improve the performance */
4349 if (mBatchSize) {
4350 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4351 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4352 }
4353 }
4354 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004355 LOGE("setHalFpsRange failed");
4356 }
4357 }
4358 if (meta.exists(ANDROID_CONTROL_MODE)) {
4359 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4360 rc = extractSceneMode(meta, metaMode, mParameters);
4361 if (rc != NO_ERROR) {
4362 LOGE("extractSceneMode failed");
4363 }
4364 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004365 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004366
Thierry Strudel04e026f2016-10-10 11:27:36 -07004367 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4368 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4369 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4370 rc = setVideoHdrMode(mParameters, vhdr);
4371 if (rc != NO_ERROR) {
4372 LOGE("setVideoHDR is failed");
4373 }
4374 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004375
Thierry Strudel3d639192016-09-09 11:52:26 -07004376 //TODO: validate the arguments, HSV scenemode should have only the
4377 //advertised fps ranges
4378
4379 /*set the capture intent, hal version, tintless, stream info,
4380 *and disenable parameters to the backend*/
4381 LOGD("set_parms META_STREAM_INFO " );
4382 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4383 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004384 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004385 mStreamConfigInfo.type[i],
4386 mStreamConfigInfo.stream_sizes[i].width,
4387 mStreamConfigInfo.stream_sizes[i].height,
4388 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004389 mStreamConfigInfo.format[i],
4390 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004391 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004392
Thierry Strudel3d639192016-09-09 11:52:26 -07004393 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4394 mParameters);
4395 if (rc < 0) {
4396 LOGE("set_parms failed for hal version, stream info");
4397 }
4398
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004399 cam_sensor_mode_info_t sensor_mode_info;
4400 memset(&sensor_mode_info, 0, sizeof(sensor_mode_info));
4401 rc = getSensorModeInfo(sensor_mode_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07004402 if (rc != NO_ERROR) {
4403 LOGE("Failed to get sensor output size");
4404 pthread_mutex_unlock(&mMutex);
4405 goto error_exit;
4406 }
4407
4408 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4409 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004410 sensor_mode_info.active_array_size.width,
4411 sensor_mode_info.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004412
4413 /* Set batchmode before initializing channel. Since registerBuffer
4414 * internally initializes some of the channels, better set batchmode
4415 * even before first register buffer */
4416 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4417 it != mStreamInfo.end(); it++) {
4418 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4419 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4420 && mBatchSize) {
4421 rc = channel->setBatchSize(mBatchSize);
4422 //Disable per frame map unmap for HFR/batchmode case
4423 rc |= channel->setPerFrameMapUnmap(false);
4424 if (NO_ERROR != rc) {
4425 LOGE("Channel init failed %d", rc);
4426 pthread_mutex_unlock(&mMutex);
4427 goto error_exit;
4428 }
4429 }
4430 }
4431
4432 //First initialize all streams
4433 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4434 it != mStreamInfo.end(); it++) {
4435 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4436 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4437 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004438 setEis) {
4439 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4440 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4441 is_type = mStreamConfigInfo.is_type[i];
4442 break;
4443 }
4444 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004445 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004446 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004447 rc = channel->initialize(IS_TYPE_NONE);
4448 }
4449 if (NO_ERROR != rc) {
4450 LOGE("Channel initialization failed %d", rc);
4451 pthread_mutex_unlock(&mMutex);
4452 goto error_exit;
4453 }
4454 }
4455
4456 if (mRawDumpChannel) {
4457 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4458 if (rc != NO_ERROR) {
4459 LOGE("Error: Raw Dump Channel init failed");
4460 pthread_mutex_unlock(&mMutex);
4461 goto error_exit;
4462 }
4463 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004464 if (mHdrPlusRawSrcChannel) {
4465 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4466 if (rc != NO_ERROR) {
4467 LOGE("Error: HDR+ RAW Source Channel init failed");
4468 pthread_mutex_unlock(&mMutex);
4469 goto error_exit;
4470 }
4471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004472 if (mSupportChannel) {
4473 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4474 if (rc < 0) {
4475 LOGE("Support channel initialization failed");
4476 pthread_mutex_unlock(&mMutex);
4477 goto error_exit;
4478 }
4479 }
4480 if (mAnalysisChannel) {
4481 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4482 if (rc < 0) {
4483 LOGE("Analysis channel initialization failed");
4484 pthread_mutex_unlock(&mMutex);
4485 goto error_exit;
4486 }
4487 }
4488 if (mDummyBatchChannel) {
4489 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4490 if (rc < 0) {
4491 LOGE("mDummyBatchChannel setBatchSize failed");
4492 pthread_mutex_unlock(&mMutex);
4493 goto error_exit;
4494 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004495 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004496 if (rc < 0) {
4497 LOGE("mDummyBatchChannel initialization failed");
4498 pthread_mutex_unlock(&mMutex);
4499 goto error_exit;
4500 }
4501 }
4502
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004503 if (mHdrPlusClient != nullptr) {
4504 pbcamera::InputConfiguration inputConfig;
4505 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
4506
4507 // Configure HDR+ client streams.
4508 // Get input config.
4509 if (mHdrPlusRawSrcChannel) {
4510 // HDR+ input buffers will be provided by HAL.
4511 rc = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
4512 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
4513 if (rc != OK) {
4514 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream.",
4515 __FUNCTION__);
4516 pthread_mutex_unlock(&mMutex);
4517 goto error_exit;
4518 }
4519
4520 inputConfig.isSensorInput = false;
4521 } else {
4522 // Sensor MIPI will send data to Easel.
4523 inputConfig.isSensorInput = true;
4524 inputConfig.sensorMode.pixelArrayWidth =
4525 sensor_mode_info.pixel_array_size.width;
4526 inputConfig.sensorMode.pixelArrayHeight =
4527 sensor_mode_info.pixel_array_size.height;
4528 inputConfig.sensorMode.activeArrayWidth =
4529 sensor_mode_info.active_array_size.width;
4530 inputConfig.sensorMode.activeArrayHeight =
4531 sensor_mode_info.active_array_size.height;
4532 inputConfig.sensorMode.outputPixelClkHz =
4533 sensor_mode_info.op_pixel_clk;
4534 }
4535
4536 // Get output configurations.
4537 // Easel may need to output RAW16 buffers if mRawChannel was created.
4538 if (mRawChannel != nullptr) {
4539 pbcamera::StreamConfiguration outputConfig;
4540 rc = fillPbStreamConfig(&outputConfig, kPbRaw16OutputStreamId,
4541 HAL_PIXEL_FORMAT_RAW16, mRawChannel, /*stream index*/0);
4542 if (rc != OK) {
4543 LOGE("%s: Failed to get fill stream config for raw stream.", __FUNCTION__);
4544 pthread_mutex_unlock(&mMutex);
4545 goto error_exit;
4546 }
4547 outputStreamConfigs.push_back(outputConfig);
4548 }
4549
4550 // Easel may need to output YUV output buffers if mPictureChannel was created.
4551 if (mPictureChannel != nullptr) {
4552 pbcamera::StreamConfiguration outputConfig;
4553 rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
4554 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
4555 if (rc != OK) {
4556 LOGE("%s: Failed to get fill stream config for YUV stream.", __FUNCTION__);
4557 pthread_mutex_unlock(&mMutex);
4558 goto error_exit;
4559 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004560
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004561 outputStreamConfigs.push_back(outputConfig);
4562 }
4563
4564 // TODO: consider other channels for YUV output buffers.
4565
4566 rc = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
4567 if (rc != OK) {
4568 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
4569 strerror(-rc), rc);
4570 pthread_mutex_unlock(&mMutex);
4571 goto error_exit;
4572 }
4573 }
4574
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 // Set bundle info
4576 rc = setBundleInfo();
4577 if (rc < 0) {
4578 LOGE("setBundleInfo failed %d", rc);
4579 pthread_mutex_unlock(&mMutex);
4580 goto error_exit;
4581 }
4582
4583 //update settings from app here
4584 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4585 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4586 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4587 }
4588 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4589 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4590 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4591 }
4592 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4593 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4594 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4595
4596 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4597 (mLinkedCameraId != mCameraId) ) {
4598 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4599 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004600 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004601 goto error_exit;
4602 }
4603 }
4604
4605 // add bundle related cameras
4606 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4607 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004608 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4609 &m_pDualCamCmdPtr->bundle_info;
4610 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 if (mIsDeviceLinked)
4612 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4613 else
4614 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4615
4616 pthread_mutex_lock(&gCamLock);
4617
4618 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4619 LOGE("Dualcam: Invalid Session Id ");
4620 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004621 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004622 goto error_exit;
4623 }
4624
4625 if (mIsMainCamera == 1) {
4626 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4627 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004628 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004629 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004630 // related session id should be session id of linked session
4631 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4632 } else {
4633 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4634 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004635 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004636 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004637 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4638 }
4639 pthread_mutex_unlock(&gCamLock);
4640
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004641 rc = mCameraHandle->ops->set_dual_cam_cmd(
4642 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004643 if (rc < 0) {
4644 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004645 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004646 goto error_exit;
4647 }
4648 }
4649
4650 //Then start them.
4651 LOGH("Start META Channel");
4652 rc = mMetadataChannel->start();
4653 if (rc < 0) {
4654 LOGE("META channel start failed");
4655 pthread_mutex_unlock(&mMutex);
4656 goto error_exit;
4657 }
4658
4659 if (mAnalysisChannel) {
4660 rc = mAnalysisChannel->start();
4661 if (rc < 0) {
4662 LOGE("Analysis channel start failed");
4663 mMetadataChannel->stop();
4664 pthread_mutex_unlock(&mMutex);
4665 goto error_exit;
4666 }
4667 }
4668
4669 if (mSupportChannel) {
4670 rc = mSupportChannel->start();
4671 if (rc < 0) {
4672 LOGE("Support channel start failed");
4673 mMetadataChannel->stop();
4674 /* Although support and analysis are mutually exclusive today
4675 adding it in anycase for future proofing */
4676 if (mAnalysisChannel) {
4677 mAnalysisChannel->stop();
4678 }
4679 pthread_mutex_unlock(&mMutex);
4680 goto error_exit;
4681 }
4682 }
4683 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4684 it != mStreamInfo.end(); it++) {
4685 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4686 LOGH("Start Processing Channel mask=%d",
4687 channel->getStreamTypeMask());
4688 rc = channel->start();
4689 if (rc < 0) {
4690 LOGE("channel start failed");
4691 pthread_mutex_unlock(&mMutex);
4692 goto error_exit;
4693 }
4694 }
4695
4696 if (mRawDumpChannel) {
4697 LOGD("Starting raw dump stream");
4698 rc = mRawDumpChannel->start();
4699 if (rc != NO_ERROR) {
4700 LOGE("Error Starting Raw Dump Channel");
4701 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4702 it != mStreamInfo.end(); it++) {
4703 QCamera3Channel *channel =
4704 (QCamera3Channel *)(*it)->stream->priv;
4705 LOGH("Stopping Processing Channel mask=%d",
4706 channel->getStreamTypeMask());
4707 channel->stop();
4708 }
4709 if (mSupportChannel)
4710 mSupportChannel->stop();
4711 if (mAnalysisChannel) {
4712 mAnalysisChannel->stop();
4713 }
4714 mMetadataChannel->stop();
4715 pthread_mutex_unlock(&mMutex);
4716 goto error_exit;
4717 }
4718 }
4719
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004720 if (mHdrPlusRawSrcChannel) {
4721 LOGD("Starting HDR+ RAW stream");
4722 rc = mHdrPlusRawSrcChannel->start();
4723 if (rc != NO_ERROR) {
4724 LOGE("Error Starting HDR+ RAW Channel");
4725 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4726 it != mStreamInfo.end(); it++) {
4727 QCamera3Channel *channel =
4728 (QCamera3Channel *)(*it)->stream->priv;
4729 LOGH("Stopping Processing Channel mask=%d",
4730 channel->getStreamTypeMask());
4731 channel->stop();
4732 }
4733 if (mSupportChannel)
4734 mSupportChannel->stop();
4735 if (mAnalysisChannel) {
4736 mAnalysisChannel->stop();
4737 }
4738 if (mRawDumpChannel) {
4739 mRawDumpChannel->stop();
4740 }
4741 mMetadataChannel->stop();
4742 pthread_mutex_unlock(&mMutex);
4743 goto error_exit;
4744 }
4745 }
4746
Thierry Strudel3d639192016-09-09 11:52:26 -07004747 if (mChannelHandle) {
4748
4749 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4750 mChannelHandle);
4751 if (rc != NO_ERROR) {
4752 LOGE("start_channel failed %d", rc);
4753 pthread_mutex_unlock(&mMutex);
4754 goto error_exit;
4755 }
4756 }
4757
4758 goto no_error;
4759error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004760 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004761 return rc;
4762no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 mWokenUpByDaemon = false;
4764 mPendingLiveRequest = 0;
4765 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004766 }
4767
4768 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004769 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004770
4771 if (mFlushPerf) {
4772 //we cannot accept any requests during flush
4773 LOGE("process_capture_request cannot proceed during flush");
4774 pthread_mutex_unlock(&mMutex);
4775 return NO_ERROR; //should return an error
4776 }
4777
4778 if (meta.exists(ANDROID_REQUEST_ID)) {
4779 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4780 mCurrentRequestId = request_id;
4781 LOGD("Received request with id: %d", request_id);
4782 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4783 LOGE("Unable to find request id field, \
4784 & no previous id available");
4785 pthread_mutex_unlock(&mMutex);
4786 return NAME_NOT_FOUND;
4787 } else {
4788 LOGD("Re-using old request id");
4789 request_id = mCurrentRequestId;
4790 }
4791
4792 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4793 request->num_output_buffers,
4794 request->input_buffer,
4795 frameNumber);
4796 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004797 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 int blob_request = 0;
4799 uint32_t snapshotStreamId = 0;
4800 for (size_t i = 0; i < request->num_output_buffers; i++) {
4801 const camera3_stream_buffer_t& output = request->output_buffers[i];
4802 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4803
4804 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004805 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004806 blob_request = 1;
4807 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4808 }
4809
4810 if (output.acquire_fence != -1) {
4811 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4812 close(output.acquire_fence);
4813 if (rc != OK) {
4814 LOGE("sync wait failed %d", rc);
4815 pthread_mutex_unlock(&mMutex);
4816 return rc;
4817 }
4818 }
4819
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004820 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004821 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004822
4823 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4824 isVidBufRequested = true;
4825 }
4826 }
4827
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004828 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4829 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4830 itr++) {
4831 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4832 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4833 channel->getStreamID(channel->getStreamTypeMask());
4834
4835 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4836 isVidBufRequested = true;
4837 }
4838 }
4839
Thierry Strudel3d639192016-09-09 11:52:26 -07004840 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004841 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004842 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 }
4844 if (blob_request && mRawDumpChannel) {
4845 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004846 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004848 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 }
4850
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004851 {
4852 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4853 // Request a RAW buffer if
4854 // 1. mHdrPlusRawSrcChannel is valid.
4855 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
4856 // 3. There is no pending HDR+ request.
4857 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
4858 mHdrPlusPendingRequests.size() == 0) {
4859 streamsArray.stream_request[streamsArray.num_streams].streamID =
4860 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
4861 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4862 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004863 }
4864
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004865 //extract capture intent
4866 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4867 mCaptureIntent =
4868 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4869 }
4870
4871 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4872 mCacMode =
4873 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4874 }
4875
4876 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004877 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004878
4879 // Decide if this is an HDR+ capture request.
4880 if (mHdrPlusClient != nullptr &&
4881 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
4882 bool highQualityPostProcessing = true;
4883
4884 // Check noise reduction mode is high quality.
4885 if (!meta.exists(ANDROID_NOISE_REDUCTION_MODE) ||
4886 meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
4887 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
4888 highQualityPostProcessing = false;
4889 }
4890
4891 // Check edge mode is high quality.
4892 if (!meta.exists(ANDROID_EDGE_MODE) ||
4893 meta.find(ANDROID_EDGE_MODE).data.u8[0] !=
4894 ANDROID_EDGE_MODE_HIGH_QUALITY) {
4895 highQualityPostProcessing = false;
4896 }
4897
4898 // If all post processing is high quality, this still capture request is an HDR+ request.
4899 // TODO: support more than a single JPEG output buffer.
4900 if (highQualityPostProcessing && request->num_output_buffers == 1 &&
4901 request->output_buffers[0].stream->format == HAL_PIXEL_FORMAT_BLOB) {
4902 auto frame = std::make_shared<mm_camera_buf_def_t>();
4903
4904 // Get a YUV buffer from pic channel.
4905 QCamera3PicChannel *picChannel =
4906 (QCamera3PicChannel*)request->output_buffers[0].stream->priv;
4907 rc = picChannel->getYuvBufferForRequest(frame.get(), frameNumber);
4908 if (rc != OK) {
4909 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
4910 __FUNCTION__, strerror(-rc), rc);
4911 pthread_mutex_unlock(&mMutex);
4912 return rc;
4913 }
4914
4915 pbcamera::StreamBuffer buffer;
4916 buffer.streamId = kPbYuvOutputStreamId;
4917 buffer.data = frame->buffer;
4918 buffer.dataSize = frame->frame_len;
4919
4920 pbcamera::CaptureRequest pbRequest;
4921 pbRequest.id = frameNumber;
4922 pbRequest.outputBuffers.push_back(buffer);
4923
4924 // Submit an HDR+ capture request to HDR+ service.
4925 rc = mHdrPlusClient->submitCaptureRequest(&pbRequest);
4926 if (rc != OK) {
4927 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__,
4928 __LINE__, strerror(-rc), rc);
4929 }
4930
4931 hdrPlusRequest = true;
4932
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004933 pendingHdrPlusRequest.yuvBuffer = frame;
4934 pendingHdrPlusRequest.frameworkOutputBuffers.push_back(request->output_buffers[0]);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004935 } else {
4936 ALOGD("%s: Fall back to non HDR+ capture request. high quality: %d, number of "
4937 "output buffers: %d", __FUNCTION__, highQualityPostProcessing,
4938 request->num_output_buffers);
4939 }
4940 }
4941
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004942 if (hdrPlusRequest) {
4943 // For a HDR+ request, just set the frame parameters.
4944 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4945 if (rc < 0) {
4946 LOGE("fail to set frame parameters");
4947 pthread_mutex_unlock(&mMutex);
4948 return rc;
4949 }
4950 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004951 /* Parse the settings:
4952 * - For every request in NORMAL MODE
4953 * - For every request in HFR mode during preview only case
4954 * - For first request of every batch in HFR mode during video
4955 * recording. In batchmode the same settings except frame number is
4956 * repeated in each request of the batch.
4957 */
4958 if (!mBatchSize ||
4959 (mBatchSize && !isVidBufRequested) ||
4960 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004961 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 if (rc < 0) {
4963 LOGE("fail to set frame parameters");
4964 pthread_mutex_unlock(&mMutex);
4965 return rc;
4966 }
4967 }
4968 /* For batchMode HFR, setFrameParameters is not called for every
4969 * request. But only frame number of the latest request is parsed.
4970 * Keep track of first and last frame numbers in a batch so that
4971 * metadata for the frame numbers of batch can be duplicated in
4972 * handleBatchMetadta */
4973 if (mBatchSize) {
4974 if (!mToBeQueuedVidBufs) {
4975 //start of the batch
4976 mFirstFrameNumberInBatch = request->frame_number;
4977 }
4978 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4979 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4980 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004982 return BAD_VALUE;
4983 }
4984 }
4985 if (mNeedSensorRestart) {
4986 /* Unlock the mutex as restartSensor waits on the channels to be
4987 * stopped, which in turn calls stream callback functions -
4988 * handleBufferWithLock and handleMetadataWithLock */
4989 pthread_mutex_unlock(&mMutex);
4990 rc = dynamicUpdateMetaStreamInfo();
4991 if (rc != NO_ERROR) {
4992 LOGE("Restarting the sensor failed");
4993 return BAD_VALUE;
4994 }
4995 mNeedSensorRestart = false;
4996 pthread_mutex_lock(&mMutex);
4997 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004998 if(mResetInstantAEC) {
4999 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5000 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5001 mResetInstantAEC = false;
5002 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005003 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (request->input_buffer->acquire_fence != -1) {
5005 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5006 close(request->input_buffer->acquire_fence);
5007 if (rc != OK) {
5008 LOGE("input buffer sync wait failed %d", rc);
5009 pthread_mutex_unlock(&mMutex);
5010 return rc;
5011 }
5012 }
5013 }
5014
5015 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5016 mLastCustIntentFrmNum = frameNumber;
5017 }
5018 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005019 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005020 pendingRequestIterator latestRequest;
5021 pendingRequest.frame_number = frameNumber;
5022 pendingRequest.num_buffers = request->num_output_buffers;
5023 pendingRequest.request_id = request_id;
5024 pendingRequest.blob_request = blob_request;
5025 pendingRequest.timestamp = 0;
5026 pendingRequest.bUrgentReceived = 0;
5027 if (request->input_buffer) {
5028 pendingRequest.input_buffer =
5029 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5030 *(pendingRequest.input_buffer) = *(request->input_buffer);
5031 pInputBuffer = pendingRequest.input_buffer;
5032 } else {
5033 pendingRequest.input_buffer = NULL;
5034 pInputBuffer = NULL;
5035 }
5036
5037 pendingRequest.pipeline_depth = 0;
5038 pendingRequest.partial_result_cnt = 0;
5039 extractJpegMetadata(mCurJpegMeta, request);
5040 pendingRequest.jpegMetadata = mCurJpegMeta;
5041 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5042 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005043 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08005044 /* DevCamDebug metadata processCaptureRequest */
5045 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5046 mDevCamDebugMetaEnable =
5047 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5048 }
5049 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5050 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005051
5052 //extract CAC info
5053 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5054 mCacMode =
5055 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5056 }
5057 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005058 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005059
5060 PendingBuffersInRequest bufsForCurRequest;
5061 bufsForCurRequest.frame_number = frameNumber;
5062 // Mark current timestamp for the new request
5063 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005064 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005065
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005066 if (hdrPlusRequest) {
5067 // Save settings for this request.
5068 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5069 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5070
5071 // Add to pending HDR+ request queue.
5072 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5073 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5074
5075 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5076 }
5077
Thierry Strudel3d639192016-09-09 11:52:26 -07005078 for (size_t i = 0; i < request->num_output_buffers; i++) {
5079 RequestedBufferInfo requestedBuf;
5080 memset(&requestedBuf, 0, sizeof(requestedBuf));
5081 requestedBuf.stream = request->output_buffers[i].stream;
5082 requestedBuf.buffer = NULL;
5083 pendingRequest.buffers.push_back(requestedBuf);
5084
5085 // Add to buffer handle the pending buffers list
5086 PendingBufferInfo bufferInfo;
5087 bufferInfo.buffer = request->output_buffers[i].buffer;
5088 bufferInfo.stream = request->output_buffers[i].stream;
5089 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5090 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5091 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5092 frameNumber, bufferInfo.buffer,
5093 channel->getStreamTypeMask(), bufferInfo.stream->format);
5094 }
5095 // Add this request packet into mPendingBuffersMap
5096 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5097 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5098 mPendingBuffersMap.get_num_overall_buffers());
5099
5100 latestRequest = mPendingRequestsList.insert(
5101 mPendingRequestsList.end(), pendingRequest);
5102 if(mFlush) {
5103 LOGI("mFlush is true");
5104 pthread_mutex_unlock(&mMutex);
5105 return NO_ERROR;
5106 }
5107
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005108 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5109 // channel.
5110 if (!hdrPlusRequest) {
5111 int indexUsed;
5112 // Notify metadata channel we receive a request
5113 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005115 if(request->input_buffer != NULL){
5116 LOGD("Input request, frame_number %d", frameNumber);
5117 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5118 if (NO_ERROR != rc) {
5119 LOGE("fail to set reproc parameters");
5120 pthread_mutex_unlock(&mMutex);
5121 return rc;
5122 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 }
5124
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005125 // Call request on other streams
5126 uint32_t streams_need_metadata = 0;
5127 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5128 for (size_t i = 0; i < request->num_output_buffers; i++) {
5129 const camera3_stream_buffer_t& output = request->output_buffers[i];
5130 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5131
5132 if (channel == NULL) {
5133 LOGW("invalid channel pointer for stream");
5134 continue;
5135 }
5136
5137 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5138 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5139 output.buffer, request->input_buffer, frameNumber);
5140 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005141 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005142 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5143 if (rc < 0) {
5144 LOGE("Fail to request on picture channel");
5145 pthread_mutex_unlock(&mMutex);
5146 return rc;
5147 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005148 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005149 LOGD("snapshot request with buffer %p, frame_number %d",
5150 output.buffer, frameNumber);
5151 if (!request->settings) {
5152 rc = channel->request(output.buffer, frameNumber,
5153 NULL, mPrevParameters, indexUsed);
5154 } else {
5155 rc = channel->request(output.buffer, frameNumber,
5156 NULL, mParameters, indexUsed);
5157 }
5158 if (rc < 0) {
5159 LOGE("Fail to request on picture channel");
5160 pthread_mutex_unlock(&mMutex);
5161 return rc;
5162 }
5163
5164 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5165 uint32_t j = 0;
5166 for (j = 0; j < streamsArray.num_streams; j++) {
5167 if (streamsArray.stream_request[j].streamID == streamId) {
5168 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5169 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5170 else
5171 streamsArray.stream_request[j].buf_index = indexUsed;
5172 break;
5173 }
5174 }
5175 if (j == streamsArray.num_streams) {
5176 LOGE("Did not find matching stream to update index");
5177 assert(0);
5178 }
5179
5180 pendingBufferIter->need_metadata = true;
5181 streams_need_metadata++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005182 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005183 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5184 bool needMetadata = false;
5185 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5186 rc = yuvChannel->request(output.buffer, frameNumber,
5187 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5188 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005189 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005190 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 pthread_mutex_unlock(&mMutex);
5192 return rc;
5193 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005194
5195 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5196 uint32_t j = 0;
5197 for (j = 0; j < streamsArray.num_streams; j++) {
5198 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005199 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5200 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5201 else
5202 streamsArray.stream_request[j].buf_index = indexUsed;
5203 break;
5204 }
5205 }
5206 if (j == streamsArray.num_streams) {
5207 LOGE("Did not find matching stream to update index");
5208 assert(0);
5209 }
5210
5211 pendingBufferIter->need_metadata = needMetadata;
5212 if (needMetadata)
5213 streams_need_metadata += 1;
5214 LOGD("calling YUV channel request, need_metadata is %d",
5215 needMetadata);
5216 } else {
5217 LOGD("request with buffer %p, frame_number %d",
5218 output.buffer, frameNumber);
5219
5220 rc = channel->request(output.buffer, frameNumber, indexUsed);
5221
5222 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5223 uint32_t j = 0;
5224 for (j = 0; j < streamsArray.num_streams; j++) {
5225 if (streamsArray.stream_request[j].streamID == streamId) {
5226 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5227 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5228 else
5229 streamsArray.stream_request[j].buf_index = indexUsed;
5230 break;
5231 }
5232 }
5233 if (j == streamsArray.num_streams) {
5234 LOGE("Did not find matching stream to update index");
5235 assert(0);
5236 }
5237
5238 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5239 && mBatchSize) {
5240 mToBeQueuedVidBufs++;
5241 if (mToBeQueuedVidBufs == mBatchSize) {
5242 channel->queueBatchBuf();
5243 }
5244 }
5245 if (rc < 0) {
5246 LOGE("request failed");
5247 pthread_mutex_unlock(&mMutex);
5248 return rc;
5249 }
5250 }
5251 pendingBufferIter++;
5252 }
5253
5254 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5255 itr++) {
5256 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5257
5258 if (channel == NULL) {
5259 LOGE("invalid channel pointer for stream");
5260 assert(0);
5261 return BAD_VALUE;
5262 }
5263
5264 InternalRequest requestedStream;
5265 requestedStream = (*itr);
5266
5267
5268 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5269 LOGD("snapshot request internally input buffer %p, frame_number %d",
5270 request->input_buffer, frameNumber);
5271 if(request->input_buffer != NULL){
5272 rc = channel->request(NULL, frameNumber,
5273 pInputBuffer, &mReprocMeta, indexUsed, true,
5274 requestedStream.meteringOnly);
5275 if (rc < 0) {
5276 LOGE("Fail to request on picture channel");
5277 pthread_mutex_unlock(&mMutex);
5278 return rc;
5279 }
5280 } else {
5281 LOGD("snapshot request with frame_number %d", frameNumber);
5282 if (!request->settings) {
5283 rc = channel->request(NULL, frameNumber,
5284 NULL, mPrevParameters, indexUsed, true,
5285 requestedStream.meteringOnly);
5286 } else {
5287 rc = channel->request(NULL, frameNumber,
5288 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5289 }
5290 if (rc < 0) {
5291 LOGE("Fail to request on picture channel");
5292 pthread_mutex_unlock(&mMutex);
5293 return rc;
5294 }
5295
5296 if ((*itr).meteringOnly != 1) {
5297 requestedStream.need_metadata = 1;
5298 streams_need_metadata++;
5299 }
5300 }
5301
5302 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5303 uint32_t j = 0;
5304 for (j = 0; j < streamsArray.num_streams; j++) {
5305 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005306 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5307 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5308 else
5309 streamsArray.stream_request[j].buf_index = indexUsed;
5310 break;
5311 }
5312 }
5313 if (j == streamsArray.num_streams) {
5314 LOGE("Did not find matching stream to update index");
5315 assert(0);
5316 }
5317
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005318 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005319 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005320 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005321 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005322 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005323 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005324 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005325
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005326 //If 2 streams have need_metadata set to true, fail the request, unless
5327 //we copy/reference count the metadata buffer
5328 if (streams_need_metadata > 1) {
5329 LOGE("not supporting request in which two streams requires"
5330 " 2 HAL metadata for reprocessing");
5331 pthread_mutex_unlock(&mMutex);
5332 return -EINVAL;
5333 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005335 if (request->input_buffer == NULL) {
5336 /* Set the parameters to backend:
5337 * - For every request in NORMAL MODE
5338 * - For every request in HFR mode during preview only case
5339 * - Once every batch in HFR mode during video recording
5340 */
5341 if (!mBatchSize ||
5342 (mBatchSize && !isVidBufRequested) ||
5343 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5344 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5345 mBatchSize, isVidBufRequested,
5346 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005347
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005348 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5349 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5350 uint32_t m = 0;
5351 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5352 if (streamsArray.stream_request[k].streamID ==
5353 mBatchedStreamsArray.stream_request[m].streamID)
5354 break;
5355 }
5356 if (m == mBatchedStreamsArray.num_streams) {
5357 mBatchedStreamsArray.stream_request\
5358 [mBatchedStreamsArray.num_streams].streamID =
5359 streamsArray.stream_request[k].streamID;
5360 mBatchedStreamsArray.stream_request\
5361 [mBatchedStreamsArray.num_streams].buf_index =
5362 streamsArray.stream_request[k].buf_index;
5363 mBatchedStreamsArray.num_streams =
5364 mBatchedStreamsArray.num_streams + 1;
5365 }
5366 }
5367 streamsArray = mBatchedStreamsArray;
5368 }
5369 /* Update stream id of all the requested buffers */
5370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5371 streamsArray)) {
5372 LOGE("Failed to set stream type mask in the parameters");
5373 return BAD_VALUE;
5374 }
5375
5376 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5377 mParameters);
5378 if (rc < 0) {
5379 LOGE("set_parms failed");
5380 }
5381 /* reset to zero coz, the batch is queued */
5382 mToBeQueuedVidBufs = 0;
5383 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5384 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5385 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005386 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5387 uint32_t m = 0;
5388 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5389 if (streamsArray.stream_request[k].streamID ==
5390 mBatchedStreamsArray.stream_request[m].streamID)
5391 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005392 }
5393 if (m == mBatchedStreamsArray.num_streams) {
5394 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5395 streamID = streamsArray.stream_request[k].streamID;
5396 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5397 buf_index = streamsArray.stream_request[k].buf_index;
5398 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5399 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005400 }
5401 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005402 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005403 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 }
5405
5406 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5407
5408 mState = STARTED;
5409 // Added a timed condition wait
5410 struct timespec ts;
5411 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005412 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 if (rc < 0) {
5414 isValidTimeout = 0;
5415 LOGE("Error reading the real time clock!!");
5416 }
5417 else {
5418 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005419 int64_t timeout = 5;
5420 {
5421 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5422 // If there is a pending HDR+ request, the following requests may be blocked until the
5423 // HDR+ request is done. So allow a longer timeout.
5424 if (mHdrPlusPendingRequests.size() > 0) {
5425 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5426 }
5427 }
5428 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 }
5430 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005431 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005432 (mState != ERROR) && (mState != DEINIT)) {
5433 if (!isValidTimeout) {
5434 LOGD("Blocking on conditional wait");
5435 pthread_cond_wait(&mRequestCond, &mMutex);
5436 }
5437 else {
5438 LOGD("Blocking on timed conditional wait");
5439 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5440 if (rc == ETIMEDOUT) {
5441 rc = -ENODEV;
5442 LOGE("Unblocked on timeout!!!!");
5443 break;
5444 }
5445 }
5446 LOGD("Unblocked");
5447 if (mWokenUpByDaemon) {
5448 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005449 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005450 break;
5451 }
5452 }
5453 pthread_mutex_unlock(&mMutex);
5454
5455 return rc;
5456}
5457
5458/*===========================================================================
5459 * FUNCTION : dump
5460 *
5461 * DESCRIPTION:
5462 *
5463 * PARAMETERS :
5464 *
5465 *
5466 * RETURN :
5467 *==========================================================================*/
5468void QCamera3HardwareInterface::dump(int fd)
5469{
5470 pthread_mutex_lock(&mMutex);
5471 dprintf(fd, "\n Camera HAL3 information Begin \n");
5472
5473 dprintf(fd, "\nNumber of pending requests: %zu \n",
5474 mPendingRequestsList.size());
5475 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5476 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5477 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5478 for(pendingRequestIterator i = mPendingRequestsList.begin();
5479 i != mPendingRequestsList.end(); i++) {
5480 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5481 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5482 i->input_buffer);
5483 }
5484 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5485 mPendingBuffersMap.get_num_overall_buffers());
5486 dprintf(fd, "-------+------------------\n");
5487 dprintf(fd, " Frame | Stream type mask \n");
5488 dprintf(fd, "-------+------------------\n");
5489 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5490 for(auto &j : req.mPendingBufferList) {
5491 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5492 dprintf(fd, " %5d | %11d \n",
5493 req.frame_number, channel->getStreamTypeMask());
5494 }
5495 }
5496 dprintf(fd, "-------+------------------\n");
5497
5498 dprintf(fd, "\nPending frame drop list: %zu\n",
5499 mPendingFrameDropList.size());
5500 dprintf(fd, "-------+-----------\n");
5501 dprintf(fd, " Frame | Stream ID \n");
5502 dprintf(fd, "-------+-----------\n");
5503 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5504 i != mPendingFrameDropList.end(); i++) {
5505 dprintf(fd, " %5d | %9d \n",
5506 i->frame_number, i->stream_ID);
5507 }
5508 dprintf(fd, "-------+-----------\n");
5509
5510 dprintf(fd, "\n Camera HAL3 information End \n");
5511
5512 /* use dumpsys media.camera as trigger to send update debug level event */
5513 mUpdateDebugLevel = true;
5514 pthread_mutex_unlock(&mMutex);
5515 return;
5516}
5517
5518/*===========================================================================
5519 * FUNCTION : flush
5520 *
5521 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5522 * conditionally restarts channels
5523 *
5524 * PARAMETERS :
5525 * @ restartChannels: re-start all channels
5526 *
5527 *
5528 * RETURN :
5529 * 0 on success
5530 * Error code on failure
5531 *==========================================================================*/
5532int QCamera3HardwareInterface::flush(bool restartChannels)
5533{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005534 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005535 int32_t rc = NO_ERROR;
5536
5537 LOGD("Unblocking Process Capture Request");
5538 pthread_mutex_lock(&mMutex);
5539 mFlush = true;
5540 pthread_mutex_unlock(&mMutex);
5541
5542 rc = stopAllChannels();
5543 // unlink of dualcam
5544 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005545 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5546 &m_pDualCamCmdPtr->bundle_info;
5547 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005548 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5549 pthread_mutex_lock(&gCamLock);
5550
5551 if (mIsMainCamera == 1) {
5552 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5553 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005554 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005555 // related session id should be session id of linked session
5556 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5557 } else {
5558 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5559 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005560 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005561 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5562 }
5563 pthread_mutex_unlock(&gCamLock);
5564
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005565 rc = mCameraHandle->ops->set_dual_cam_cmd(
5566 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 if (rc < 0) {
5568 LOGE("Dualcam: Unlink failed, but still proceed to close");
5569 }
5570 }
5571
5572 if (rc < 0) {
5573 LOGE("stopAllChannels failed");
5574 return rc;
5575 }
5576 if (mChannelHandle) {
5577 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5578 mChannelHandle);
5579 }
5580
5581 // Reset bundle info
5582 rc = setBundleInfo();
5583 if (rc < 0) {
5584 LOGE("setBundleInfo failed %d", rc);
5585 return rc;
5586 }
5587
5588 // Mutex Lock
5589 pthread_mutex_lock(&mMutex);
5590
5591 // Unblock process_capture_request
5592 mPendingLiveRequest = 0;
5593 pthread_cond_signal(&mRequestCond);
5594
5595 rc = notifyErrorForPendingRequests();
5596 if (rc < 0) {
5597 LOGE("notifyErrorForPendingRequests failed");
5598 pthread_mutex_unlock(&mMutex);
5599 return rc;
5600 }
5601
5602 mFlush = false;
5603
5604 // Start the Streams/Channels
5605 if (restartChannels) {
5606 rc = startAllChannels();
5607 if (rc < 0) {
5608 LOGE("startAllChannels failed");
5609 pthread_mutex_unlock(&mMutex);
5610 return rc;
5611 }
5612 }
5613
5614 if (mChannelHandle) {
5615 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5616 mChannelHandle);
5617 if (rc < 0) {
5618 LOGE("start_channel failed");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
5621 }
5622 }
5623
5624 pthread_mutex_unlock(&mMutex);
5625
5626 return 0;
5627}
5628
5629/*===========================================================================
5630 * FUNCTION : flushPerf
5631 *
5632 * DESCRIPTION: This is the performance optimization version of flush that does
5633 * not use stream off, rather flushes the system
5634 *
5635 * PARAMETERS :
5636 *
5637 *
5638 * RETURN : 0 : success
5639 * -EINVAL: input is malformed (device is not valid)
5640 * -ENODEV: if the device has encountered a serious error
5641 *==========================================================================*/
5642int QCamera3HardwareInterface::flushPerf()
5643{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005644 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005645 int32_t rc = 0;
5646 struct timespec timeout;
5647 bool timed_wait = false;
5648
5649 pthread_mutex_lock(&mMutex);
5650 mFlushPerf = true;
5651 mPendingBuffersMap.numPendingBufsAtFlush =
5652 mPendingBuffersMap.get_num_overall_buffers();
5653 LOGD("Calling flush. Wait for %d buffers to return",
5654 mPendingBuffersMap.numPendingBufsAtFlush);
5655
5656 /* send the flush event to the backend */
5657 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5658 if (rc < 0) {
5659 LOGE("Error in flush: IOCTL failure");
5660 mFlushPerf = false;
5661 pthread_mutex_unlock(&mMutex);
5662 return -ENODEV;
5663 }
5664
5665 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5666 LOGD("No pending buffers in HAL, return flush");
5667 mFlushPerf = false;
5668 pthread_mutex_unlock(&mMutex);
5669 return rc;
5670 }
5671
5672 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005673 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005674 if (rc < 0) {
5675 LOGE("Error reading the real time clock, cannot use timed wait");
5676 } else {
5677 timeout.tv_sec += FLUSH_TIMEOUT;
5678 timed_wait = true;
5679 }
5680
5681 //Block on conditional variable
5682 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5683 LOGD("Waiting on mBuffersCond");
5684 if (!timed_wait) {
5685 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5686 if (rc != 0) {
5687 LOGE("pthread_cond_wait failed due to rc = %s",
5688 strerror(rc));
5689 break;
5690 }
5691 } else {
5692 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5693 if (rc != 0) {
5694 LOGE("pthread_cond_timedwait failed due to rc = %s",
5695 strerror(rc));
5696 break;
5697 }
5698 }
5699 }
5700 if (rc != 0) {
5701 mFlushPerf = false;
5702 pthread_mutex_unlock(&mMutex);
5703 return -ENODEV;
5704 }
5705
5706 LOGD("Received buffers, now safe to return them");
5707
5708 //make sure the channels handle flush
5709 //currently only required for the picture channel to release snapshot resources
5710 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5711 it != mStreamInfo.end(); it++) {
5712 QCamera3Channel *channel = (*it)->channel;
5713 if (channel) {
5714 rc = channel->flush();
5715 if (rc) {
5716 LOGE("Flushing the channels failed with error %d", rc);
5717 // even though the channel flush failed we need to continue and
5718 // return the buffers we have to the framework, however the return
5719 // value will be an error
5720 rc = -ENODEV;
5721 }
5722 }
5723 }
5724
5725 /* notify the frameworks and send errored results */
5726 rc = notifyErrorForPendingRequests();
5727 if (rc < 0) {
5728 LOGE("notifyErrorForPendingRequests failed");
5729 pthread_mutex_unlock(&mMutex);
5730 return rc;
5731 }
5732
5733 //unblock process_capture_request
5734 mPendingLiveRequest = 0;
5735 unblockRequestIfNecessary();
5736
5737 mFlushPerf = false;
5738 pthread_mutex_unlock(&mMutex);
5739 LOGD ("Flush Operation complete. rc = %d", rc);
5740 return rc;
5741}
5742
5743/*===========================================================================
5744 * FUNCTION : handleCameraDeviceError
5745 *
5746 * DESCRIPTION: This function calls internal flush and notifies the error to
5747 * framework and updates the state variable.
5748 *
5749 * PARAMETERS : None
5750 *
5751 * RETURN : NO_ERROR on Success
5752 * Error code on failure
5753 *==========================================================================*/
5754int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5755{
5756 int32_t rc = NO_ERROR;
5757
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005758 {
5759 Mutex::Autolock lock(mFlushLock);
5760 pthread_mutex_lock(&mMutex);
5761 if (mState != ERROR) {
5762 //if mState != ERROR, nothing to be done
5763 pthread_mutex_unlock(&mMutex);
5764 return NO_ERROR;
5765 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005766 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005767
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005768 rc = flush(false /* restart channels */);
5769 if (NO_ERROR != rc) {
5770 LOGE("internal flush to handle mState = ERROR failed");
5771 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005772
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005773 pthread_mutex_lock(&mMutex);
5774 mState = DEINIT;
5775 pthread_mutex_unlock(&mMutex);
5776 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005777
5778 camera3_notify_msg_t notify_msg;
5779 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5780 notify_msg.type = CAMERA3_MSG_ERROR;
5781 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5782 notify_msg.message.error.error_stream = NULL;
5783 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005784 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005785
5786 return rc;
5787}
5788
5789/*===========================================================================
5790 * FUNCTION : captureResultCb
5791 *
5792 * DESCRIPTION: Callback handler for all capture result
5793 * (streams, as well as metadata)
5794 *
5795 * PARAMETERS :
5796 * @metadata : metadata information
5797 * @buffer : actual gralloc buffer to be returned to frameworks.
5798 * NULL if metadata.
5799 *
5800 * RETURN : NONE
5801 *==========================================================================*/
5802void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5803 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5804{
5805 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005806 pthread_mutex_lock(&mMutex);
5807 uint8_t batchSize = mBatchSize;
5808 pthread_mutex_unlock(&mMutex);
5809 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005810 handleBatchMetadata(metadata_buf,
5811 true /* free_and_bufdone_meta_buf */);
5812 } else { /* mBatchSize = 0 */
5813 hdrPlusPerfLock(metadata_buf);
5814 pthread_mutex_lock(&mMutex);
5815 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005816 true /* free_and_bufdone_meta_buf */,
5817 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005818 pthread_mutex_unlock(&mMutex);
5819 }
5820 } else if (isInputBuffer) {
5821 pthread_mutex_lock(&mMutex);
5822 handleInputBufferWithLock(frame_number);
5823 pthread_mutex_unlock(&mMutex);
5824 } else {
5825 pthread_mutex_lock(&mMutex);
5826 handleBufferWithLock(buffer, frame_number);
5827 pthread_mutex_unlock(&mMutex);
5828 }
5829 return;
5830}
5831
5832/*===========================================================================
5833 * FUNCTION : getReprocessibleOutputStreamId
5834 *
5835 * DESCRIPTION: Get source output stream id for the input reprocess stream
5836 * based on size and format, which would be the largest
5837 * output stream if an input stream exists.
5838 *
5839 * PARAMETERS :
5840 * @id : return the stream id if found
5841 *
5842 * RETURN : int32_t type of status
5843 * NO_ERROR -- success
5844 * none-zero failure code
5845 *==========================================================================*/
5846int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5847{
5848 /* check if any output or bidirectional stream with the same size and format
5849 and return that stream */
5850 if ((mInputStreamInfo.dim.width > 0) &&
5851 (mInputStreamInfo.dim.height > 0)) {
5852 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5853 it != mStreamInfo.end(); it++) {
5854
5855 camera3_stream_t *stream = (*it)->stream;
5856 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5857 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5858 (stream->format == mInputStreamInfo.format)) {
5859 // Usage flag for an input stream and the source output stream
5860 // may be different.
5861 LOGD("Found reprocessible output stream! %p", *it);
5862 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5863 stream->usage, mInputStreamInfo.usage);
5864
5865 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5866 if (channel != NULL && channel->mStreams[0]) {
5867 id = channel->mStreams[0]->getMyServerID();
5868 return NO_ERROR;
5869 }
5870 }
5871 }
5872 } else {
5873 LOGD("No input stream, so no reprocessible output stream");
5874 }
5875 return NAME_NOT_FOUND;
5876}
5877
5878/*===========================================================================
5879 * FUNCTION : lookupFwkName
5880 *
5881 * DESCRIPTION: In case the enum is not same in fwk and backend
5882 * make sure the parameter is correctly propogated
5883 *
5884 * PARAMETERS :
5885 * @arr : map between the two enums
5886 * @len : len of the map
5887 * @hal_name : name of the hal_parm to map
5888 *
5889 * RETURN : int type of status
5890 * fwk_name -- success
5891 * none-zero failure code
5892 *==========================================================================*/
5893template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5894 size_t len, halType hal_name)
5895{
5896
5897 for (size_t i = 0; i < len; i++) {
5898 if (arr[i].hal_name == hal_name) {
5899 return arr[i].fwk_name;
5900 }
5901 }
5902
5903 /* Not able to find matching framework type is not necessarily
5904 * an error case. This happens when mm-camera supports more attributes
5905 * than the frameworks do */
5906 LOGH("Cannot find matching framework type");
5907 return NAME_NOT_FOUND;
5908}
5909
5910/*===========================================================================
5911 * FUNCTION : lookupHalName
5912 *
5913 * DESCRIPTION: In case the enum is not same in fwk and backend
5914 * make sure the parameter is correctly propogated
5915 *
5916 * PARAMETERS :
5917 * @arr : map between the two enums
5918 * @len : len of the map
5919 * @fwk_name : name of the hal_parm to map
5920 *
5921 * RETURN : int32_t type of status
5922 * hal_name -- success
5923 * none-zero failure code
5924 *==========================================================================*/
5925template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5926 size_t len, fwkType fwk_name)
5927{
5928 for (size_t i = 0; i < len; i++) {
5929 if (arr[i].fwk_name == fwk_name) {
5930 return arr[i].hal_name;
5931 }
5932 }
5933
5934 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5935 return NAME_NOT_FOUND;
5936}
5937
5938/*===========================================================================
5939 * FUNCTION : lookupProp
5940 *
5941 * DESCRIPTION: lookup a value by its name
5942 *
5943 * PARAMETERS :
5944 * @arr : map between the two enums
5945 * @len : size of the map
5946 * @name : name to be looked up
5947 *
5948 * RETURN : Value if found
5949 * CAM_CDS_MODE_MAX if not found
5950 *==========================================================================*/
5951template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5952 size_t len, const char *name)
5953{
5954 if (name) {
5955 for (size_t i = 0; i < len; i++) {
5956 if (!strcmp(arr[i].desc, name)) {
5957 return arr[i].val;
5958 }
5959 }
5960 }
5961 return CAM_CDS_MODE_MAX;
5962}
5963
5964/*===========================================================================
5965 *
5966 * DESCRIPTION:
5967 *
5968 * PARAMETERS :
5969 * @metadata : metadata information from callback
5970 * @timestamp: metadata buffer timestamp
5971 * @request_id: request id
5972 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005973 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5974 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 * @pprocDone: whether internal offline postprocsesing is done
5976 *
5977 * RETURN : camera_metadata_t*
5978 * metadata in a format specified by fwk
5979 *==========================================================================*/
5980camera_metadata_t*
5981QCamera3HardwareInterface::translateFromHalMetadata(
5982 metadata_buffer_t *metadata,
5983 nsecs_t timestamp,
5984 int32_t request_id,
5985 const CameraMetadata& jpegMetadata,
5986 uint8_t pipeline_depth,
5987 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005988 /* DevCamDebug metadata translateFromHalMetadata argument */
5989 uint8_t DevCamDebug_meta_enable,
5990 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005991 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005992 uint8_t fwk_cacMode,
5993 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005994{
5995 CameraMetadata camMetadata;
5996 camera_metadata_t *resultMetadata;
5997
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005998 if (mBatchSize && !firstMetadataInBatch) {
5999 /* In batch mode, use cached metadata from the first metadata
6000 in the batch */
6001 camMetadata.clear();
6002 camMetadata = mCachedMetadata;
6003 }
6004
Thierry Strudel3d639192016-09-09 11:52:26 -07006005 if (jpegMetadata.entryCount())
6006 camMetadata.append(jpegMetadata);
6007
6008 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6009 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6010 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6011 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006012 if (mBatchSize == 0) {
6013 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6014 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006016
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006017 if (mBatchSize && !firstMetadataInBatch) {
6018 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6019 resultMetadata = camMetadata.release();
6020 return resultMetadata;
6021 }
6022
Samuel Ha68ba5172016-12-15 18:41:12 -08006023 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6024 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6025 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6026 // DevCamDebug metadata translateFromHalMetadata AF
6027 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6028 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6029 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6030 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6031 }
6032 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6033 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6034 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6035 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6036 }
6037 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6038 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6039 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6040 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6041 }
6042 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6043 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6044 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6045 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6046 }
6047 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6048 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6049 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6050 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6051 }
6052 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6053 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6054 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6055 *DevCamDebug_af_monitor_pdaf_target_pos;
6056 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6057 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6058 }
6059 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6060 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6061 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6062 *DevCamDebug_af_monitor_pdaf_confidence;
6063 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6064 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6065 }
6066 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6067 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6068 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6069 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6070 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6071 }
6072 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6073 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6074 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6075 *DevCamDebug_af_monitor_tof_target_pos;
6076 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6077 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6078 }
6079 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6080 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6081 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6082 *DevCamDebug_af_monitor_tof_confidence;
6083 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6084 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6085 }
6086 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6087 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6088 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6089 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6090 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6091 }
6092 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6093 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6094 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6095 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6096 &fwk_DevCamDebug_af_monitor_type_select, 1);
6097 }
6098 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6099 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6100 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6101 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6102 &fwk_DevCamDebug_af_monitor_refocus, 1);
6103 }
6104 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6105 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6106 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6107 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6108 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6109 }
6110 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6111 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6112 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6113 *DevCamDebug_af_search_pdaf_target_pos;
6114 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6115 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6116 }
6117 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6118 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6119 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6120 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6121 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6122 }
6123 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6124 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6125 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6126 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6127 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6128 }
6129 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6130 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6131 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6132 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6133 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6134 }
6135 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6136 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6137 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6138 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6139 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6140 }
6141 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6142 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6143 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6144 *DevCamDebug_af_search_tof_target_pos;
6145 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6146 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6147 }
6148 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6149 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6150 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6151 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6152 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6153 }
6154 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6155 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6156 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6157 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6158 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6159 }
6160 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6161 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6162 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6163 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6164 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6165 }
6166 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6167 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6168 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6169 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6170 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6171 }
6172 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6173 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6174 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6175 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6176 &fwk_DevCamDebug_af_search_type_select, 1);
6177 }
6178 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6179 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6180 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6181 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6182 &fwk_DevCamDebug_af_search_next_pos, 1);
6183 }
6184 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6185 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6186 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6187 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6188 &fwk_DevCamDebug_af_search_target_pos, 1);
6189 }
6190 // DevCamDebug metadata translateFromHalMetadata AEC
6191 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6192 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6193 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6194 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6195 }
6196 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6197 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6198 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6199 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6200 }
6201 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6202 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6203 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6204 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6205 }
6206 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6207 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6208 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6209 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6210 }
6211 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6212 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6213 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6214 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6215 }
6216 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6217 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6218 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6219 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6220 }
6221 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6222 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6223 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6224 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6225 }
6226 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6227 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6228 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6229 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6230 }
6231 // DevCamDebug metadata translateFromHalMetadata AWB
6232 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6233 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6234 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6235 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6236 }
6237 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6238 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6239 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6240 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6241 }
6242 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6243 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6244 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6245 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6246 }
6247 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6248 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6249 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6250 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6251 }
6252 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6253 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6254 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6255 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6256 }
6257 }
6258 // atrace_end(ATRACE_TAG_ALWAYS);
6259
Thierry Strudel3d639192016-09-09 11:52:26 -07006260 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6261 int64_t fwk_frame_number = *frame_number;
6262 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6263 }
6264
6265 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6266 int32_t fps_range[2];
6267 fps_range[0] = (int32_t)float_range->min_fps;
6268 fps_range[1] = (int32_t)float_range->max_fps;
6269 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6270 fps_range, 2);
6271 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6272 fps_range[0], fps_range[1]);
6273 }
6274
6275 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6276 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6277 }
6278
6279 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6280 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6281 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6282 *sceneMode);
6283 if (NAME_NOT_FOUND != val) {
6284 uint8_t fwkSceneMode = (uint8_t)val;
6285 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6286 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6287 fwkSceneMode);
6288 }
6289 }
6290
6291 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6292 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6293 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6294 }
6295
6296 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6297 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6298 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6299 }
6300
6301 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6302 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6303 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6304 }
6305
6306 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6307 CAM_INTF_META_EDGE_MODE, metadata) {
6308 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6309 }
6310
6311 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6312 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6313 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6314 }
6315
6316 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6317 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6318 }
6319
6320 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6321 if (0 <= *flashState) {
6322 uint8_t fwk_flashState = (uint8_t) *flashState;
6323 if (!gCamCapability[mCameraId]->flash_available) {
6324 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6325 }
6326 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6327 }
6328 }
6329
6330 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6331 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6332 if (NAME_NOT_FOUND != val) {
6333 uint8_t fwk_flashMode = (uint8_t)val;
6334 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6335 }
6336 }
6337
6338 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6339 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6340 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6341 }
6342
6343 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6344 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6345 }
6346
6347 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6348 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6349 }
6350
6351 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6352 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6353 }
6354
6355 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6356 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6357 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6358 }
6359
6360 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6361 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6362 LOGD("fwk_videoStab = %d", fwk_videoStab);
6363 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6364 } else {
6365 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6366 // and so hardcoding the Video Stab result to OFF mode.
6367 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6368 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006369 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006370 }
6371
6372 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6373 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6374 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6375 }
6376
6377 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6378 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6379 }
6380
Thierry Strudel3d639192016-09-09 11:52:26 -07006381 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6382 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006383 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006384
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006385 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6386 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006387
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006388 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006389 blackLevelAppliedPattern->cam_black_level[0],
6390 blackLevelAppliedPattern->cam_black_level[1],
6391 blackLevelAppliedPattern->cam_black_level[2],
6392 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006393 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6394 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006395
6396#ifndef USE_HAL_3_3
6397 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006398 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6399 // depth space.
6400 fwk_blackLevelInd[0] /= 4.0;
6401 fwk_blackLevelInd[1] /= 4.0;
6402 fwk_blackLevelInd[2] /= 4.0;
6403 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006404 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6405 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006406#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006407 }
6408
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006409#ifndef USE_HAL_3_3
6410 // Fixed whitelevel is used by ISP/Sensor
6411 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6412 &gCamCapability[mCameraId]->white_level, 1);
6413#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006414
6415 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6416 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6417 int32_t scalerCropRegion[4];
6418 scalerCropRegion[0] = hScalerCropRegion->left;
6419 scalerCropRegion[1] = hScalerCropRegion->top;
6420 scalerCropRegion[2] = hScalerCropRegion->width;
6421 scalerCropRegion[3] = hScalerCropRegion->height;
6422
6423 // Adjust crop region from sensor output coordinate system to active
6424 // array coordinate system.
6425 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6426 scalerCropRegion[2], scalerCropRegion[3]);
6427
6428 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6429 }
6430
6431 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6432 LOGD("sensorExpTime = %lld", *sensorExpTime);
6433 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6434 }
6435
6436 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6437 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6438 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6439 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6440 }
6441
6442 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6443 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6444 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6445 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6446 sensorRollingShutterSkew, 1);
6447 }
6448
6449 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6450 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6451 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6452
6453 //calculate the noise profile based on sensitivity
6454 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6455 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6456 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6457 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6458 noise_profile[i] = noise_profile_S;
6459 noise_profile[i+1] = noise_profile_O;
6460 }
6461 LOGD("noise model entry (S, O) is (%f, %f)",
6462 noise_profile_S, noise_profile_O);
6463 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6464 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6465 }
6466
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006467#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006468 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006469 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006470 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006471 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006472 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6473 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6474 }
6475 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006476#endif
6477
Thierry Strudel3d639192016-09-09 11:52:26 -07006478 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6479 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6480 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6481 }
6482
6483 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6484 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6485 *faceDetectMode);
6486 if (NAME_NOT_FOUND != val) {
6487 uint8_t fwk_faceDetectMode = (uint8_t)val;
6488 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6489
6490 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6491 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6492 CAM_INTF_META_FACE_DETECTION, metadata) {
6493 uint8_t numFaces = MIN(
6494 faceDetectionInfo->num_faces_detected, MAX_ROI);
6495 int32_t faceIds[MAX_ROI];
6496 uint8_t faceScores[MAX_ROI];
6497 int32_t faceRectangles[MAX_ROI * 4];
6498 int32_t faceLandmarks[MAX_ROI * 6];
6499 size_t j = 0, k = 0;
6500
6501 for (size_t i = 0; i < numFaces; i++) {
6502 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6503 // Adjust crop region from sensor output coordinate system to active
6504 // array coordinate system.
6505 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6506 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6507 rect.width, rect.height);
6508
6509 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6510 faceRectangles+j, -1);
6511
6512 j+= 4;
6513 }
6514 if (numFaces <= 0) {
6515 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6516 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6517 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6518 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6519 }
6520
6521 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6522 numFaces);
6523 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6524 faceRectangles, numFaces * 4U);
6525 if (fwk_faceDetectMode ==
6526 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6527 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6528 CAM_INTF_META_FACE_LANDMARK, metadata) {
6529
6530 for (size_t i = 0; i < numFaces; i++) {
6531 // Map the co-ordinate sensor output coordinate system to active
6532 // array coordinate system.
6533 mCropRegionMapper.toActiveArray(
6534 landmarks->face_landmarks[i].left_eye_center.x,
6535 landmarks->face_landmarks[i].left_eye_center.y);
6536 mCropRegionMapper.toActiveArray(
6537 landmarks->face_landmarks[i].right_eye_center.x,
6538 landmarks->face_landmarks[i].right_eye_center.y);
6539 mCropRegionMapper.toActiveArray(
6540 landmarks->face_landmarks[i].mouth_center.x,
6541 landmarks->face_landmarks[i].mouth_center.y);
6542
6543 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006544 k+= TOTAL_LANDMARK_INDICES;
6545 }
6546 } else {
6547 for (size_t i = 0; i < numFaces; i++) {
6548 setInvalidLandmarks(faceLandmarks+k);
6549 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006550 }
6551 }
6552
6553 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6554 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6555 faceLandmarks, numFaces * 6U);
6556 }
6557 }
6558 }
6559 }
6560 }
6561
6562 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6563 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6564 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006565
6566 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6567 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6568 // process histogram statistics info
6569 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6570 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6571 cam_histogram_data_t rHistData, gHistData, bHistData;
6572 memset(&rHistData, 0, sizeof(rHistData));
6573 memset(&gHistData, 0, sizeof(gHistData));
6574 memset(&bHistData, 0, sizeof(bHistData));
6575
6576 switch (stats_data->type) {
6577 case CAM_HISTOGRAM_TYPE_BAYER:
6578 switch (stats_data->bayer_stats.data_type) {
6579 case CAM_STATS_CHANNEL_GR:
6580 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6581 break;
6582 case CAM_STATS_CHANNEL_GB:
6583 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6584 break;
6585 case CAM_STATS_CHANNEL_B:
6586 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6587 break;
6588 case CAM_STATS_CHANNEL_ALL:
6589 rHistData = stats_data->bayer_stats.r_stats;
6590 //Framework expects only 3 channels. So, for now,
6591 //use gb stats for G channel.
6592 gHistData = stats_data->bayer_stats.gb_stats;
6593 bHistData = stats_data->bayer_stats.b_stats;
6594 break;
6595 case CAM_STATS_CHANNEL_Y:
6596 case CAM_STATS_CHANNEL_R:
6597 default:
6598 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6599 break;
6600 }
6601 break;
6602 case CAM_HISTOGRAM_TYPE_YUV:
6603 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6604 break;
6605 }
6606
6607 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6608 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6609 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6610
6611 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6612 }
6613 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006614 }
6615
6616 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6617 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6618 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6619 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6620 }
6621
6622 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6623 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6624 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6625 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6626 }
6627
6628 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6629 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6630 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6631 CAM_MAX_SHADING_MAP_HEIGHT);
6632 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6633 CAM_MAX_SHADING_MAP_WIDTH);
6634 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6635 lensShadingMap->lens_shading, 4U * map_width * map_height);
6636 }
6637
6638 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6639 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6640 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6641 }
6642
6643 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6644 //Populate CAM_INTF_META_TONEMAP_CURVES
6645 /* ch0 = G, ch 1 = B, ch 2 = R*/
6646 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6647 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6648 tonemap->tonemap_points_cnt,
6649 CAM_MAX_TONEMAP_CURVE_SIZE);
6650 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6651 }
6652
6653 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6654 &tonemap->curves[0].tonemap_points[0][0],
6655 tonemap->tonemap_points_cnt * 2);
6656
6657 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6658 &tonemap->curves[1].tonemap_points[0][0],
6659 tonemap->tonemap_points_cnt * 2);
6660
6661 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6662 &tonemap->curves[2].tonemap_points[0][0],
6663 tonemap->tonemap_points_cnt * 2);
6664 }
6665
6666 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6667 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6668 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6669 CC_GAIN_MAX);
6670 }
6671
6672 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6673 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6674 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6675 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6676 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6677 }
6678
6679 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6680 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6681 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6682 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6683 toneCurve->tonemap_points_cnt,
6684 CAM_MAX_TONEMAP_CURVE_SIZE);
6685 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6686 }
6687 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6688 (float*)toneCurve->curve.tonemap_points,
6689 toneCurve->tonemap_points_cnt * 2);
6690 }
6691
6692 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6693 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6694 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6695 predColorCorrectionGains->gains, 4);
6696 }
6697
6698 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6699 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6700 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6701 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6702 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6703 }
6704
6705 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6706 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6707 }
6708
6709 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6710 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6711 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6712 }
6713
6714 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6715 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6716 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6717 }
6718
6719 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6720 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6721 *effectMode);
6722 if (NAME_NOT_FOUND != val) {
6723 uint8_t fwk_effectMode = (uint8_t)val;
6724 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6725 }
6726 }
6727
6728 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6729 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6730 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6731 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6732 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6733 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6734 }
6735 int32_t fwk_testPatternData[4];
6736 fwk_testPatternData[0] = testPatternData->r;
6737 fwk_testPatternData[3] = testPatternData->b;
6738 switch (gCamCapability[mCameraId]->color_arrangement) {
6739 case CAM_FILTER_ARRANGEMENT_RGGB:
6740 case CAM_FILTER_ARRANGEMENT_GRBG:
6741 fwk_testPatternData[1] = testPatternData->gr;
6742 fwk_testPatternData[2] = testPatternData->gb;
6743 break;
6744 case CAM_FILTER_ARRANGEMENT_GBRG:
6745 case CAM_FILTER_ARRANGEMENT_BGGR:
6746 fwk_testPatternData[2] = testPatternData->gr;
6747 fwk_testPatternData[1] = testPatternData->gb;
6748 break;
6749 default:
6750 LOGE("color arrangement %d is not supported",
6751 gCamCapability[mCameraId]->color_arrangement);
6752 break;
6753 }
6754 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6755 }
6756
6757 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6758 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6759 }
6760
6761 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6762 String8 str((const char *)gps_methods);
6763 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6764 }
6765
6766 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6767 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6768 }
6769
6770 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6771 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6772 }
6773
6774 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6775 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6776 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6777 }
6778
6779 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6780 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6781 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6782 }
6783
6784 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6785 int32_t fwk_thumb_size[2];
6786 fwk_thumb_size[0] = thumb_size->width;
6787 fwk_thumb_size[1] = thumb_size->height;
6788 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6789 }
6790
6791 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6792 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6793 privateData,
6794 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6795 }
6796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006797 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6798 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6799 meteringMode, 1);
6800 }
6801
Thierry Strudel3d639192016-09-09 11:52:26 -07006802 if (metadata->is_tuning_params_valid) {
6803 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6804 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6805 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6806
6807
6808 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6809 sizeof(uint32_t));
6810 data += sizeof(uint32_t);
6811
6812 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6813 sizeof(uint32_t));
6814 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6815 data += sizeof(uint32_t);
6816
6817 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6818 sizeof(uint32_t));
6819 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6820 data += sizeof(uint32_t);
6821
6822 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6823 sizeof(uint32_t));
6824 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6825 data += sizeof(uint32_t);
6826
6827 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6828 sizeof(uint32_t));
6829 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6830 data += sizeof(uint32_t);
6831
6832 metadata->tuning_params.tuning_mod3_data_size = 0;
6833 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6834 sizeof(uint32_t));
6835 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6836 data += sizeof(uint32_t);
6837
6838 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6839 TUNING_SENSOR_DATA_MAX);
6840 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6841 count);
6842 data += count;
6843
6844 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6845 TUNING_VFE_DATA_MAX);
6846 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6847 count);
6848 data += count;
6849
6850 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6851 TUNING_CPP_DATA_MAX);
6852 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6853 count);
6854 data += count;
6855
6856 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6857 TUNING_CAC_DATA_MAX);
6858 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6859 count);
6860 data += count;
6861
6862 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6863 (int32_t *)(void *)tuning_meta_data_blob,
6864 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6865 }
6866
6867 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6868 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6869 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6870 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6871 NEUTRAL_COL_POINTS);
6872 }
6873
6874 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6875 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6876 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6877 }
6878
6879 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6880 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6881 // Adjust crop region from sensor output coordinate system to active
6882 // array coordinate system.
6883 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6884 hAeRegions->rect.width, hAeRegions->rect.height);
6885
6886 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6887 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6888 REGIONS_TUPLE_COUNT);
6889 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6890 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6891 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6892 hAeRegions->rect.height);
6893 }
6894
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006895 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6896 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6897 if (NAME_NOT_FOUND != val) {
6898 uint8_t fwkAfMode = (uint8_t)val;
6899 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6900 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6901 } else {
6902 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6903 val);
6904 }
6905 }
6906
Thierry Strudel3d639192016-09-09 11:52:26 -07006907 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6908 uint8_t fwk_afState = (uint8_t) *afState;
6909 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006910 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006911 }
6912
6913 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6914 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6915 }
6916
6917 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6918 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6919 }
6920
6921 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6922 uint8_t fwk_lensState = *lensState;
6923 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6924 }
6925
6926 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6927 /*af regions*/
6928 int32_t afRegions[REGIONS_TUPLE_COUNT];
6929 // Adjust crop region from sensor output coordinate system to active
6930 // array coordinate system.
6931 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6932 hAfRegions->rect.width, hAfRegions->rect.height);
6933
6934 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6935 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6936 REGIONS_TUPLE_COUNT);
6937 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6938 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6939 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6940 hAfRegions->rect.height);
6941 }
6942
6943 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006944 uint32_t ab_mode = *hal_ab_mode;
6945 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6946 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6947 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006949 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006950 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006951 if (NAME_NOT_FOUND != val) {
6952 uint8_t fwk_ab_mode = (uint8_t)val;
6953 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6954 }
6955 }
6956
6957 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6958 int val = lookupFwkName(SCENE_MODES_MAP,
6959 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6960 if (NAME_NOT_FOUND != val) {
6961 uint8_t fwkBestshotMode = (uint8_t)val;
6962 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6963 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6964 } else {
6965 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6966 }
6967 }
6968
6969 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6970 uint8_t fwk_mode = (uint8_t) *mode;
6971 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6972 }
6973
6974 /* Constant metadata values to be update*/
6975 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6976 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6977
6978 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6979 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6980
6981 int32_t hotPixelMap[2];
6982 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6983
6984 // CDS
6985 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6986 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6987 }
6988
Thierry Strudel04e026f2016-10-10 11:27:36 -07006989 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6990 int32_t fwk_hdr;
6991 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6992 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6993 } else {
6994 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6995 }
6996 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6997 }
6998
6999 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007000 int32_t fwk_ir = (int32_t) *ir;
7001 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007002 }
7003
Thierry Strudel269c81a2016-10-12 12:13:59 -07007004 // AEC SPEED
7005 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7006 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7007 }
7008
7009 // AWB SPEED
7010 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7011 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7012 }
7013
Thierry Strudel3d639192016-09-09 11:52:26 -07007014 // TNR
7015 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7016 uint8_t tnr_enable = tnr->denoise_enable;
7017 int32_t tnr_process_type = (int32_t)tnr->process_plates;
7018
7019 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7020 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7021 }
7022
7023 // Reprocess crop data
7024 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7025 uint8_t cnt = crop_data->num_of_streams;
7026 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7027 // mm-qcamera-daemon only posts crop_data for streams
7028 // not linked to pproc. So no valid crop metadata is not
7029 // necessarily an error case.
7030 LOGD("No valid crop metadata entries");
7031 } else {
7032 uint32_t reproc_stream_id;
7033 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7034 LOGD("No reprocessible stream found, ignore crop data");
7035 } else {
7036 int rc = NO_ERROR;
7037 Vector<int32_t> roi_map;
7038 int32_t *crop = new int32_t[cnt*4];
7039 if (NULL == crop) {
7040 rc = NO_MEMORY;
7041 }
7042 if (NO_ERROR == rc) {
7043 int32_t streams_found = 0;
7044 for (size_t i = 0; i < cnt; i++) {
7045 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7046 if (pprocDone) {
7047 // HAL already does internal reprocessing,
7048 // either via reprocessing before JPEG encoding,
7049 // or offline postprocessing for pproc bypass case.
7050 crop[0] = 0;
7051 crop[1] = 0;
7052 crop[2] = mInputStreamInfo.dim.width;
7053 crop[3] = mInputStreamInfo.dim.height;
7054 } else {
7055 crop[0] = crop_data->crop_info[i].crop.left;
7056 crop[1] = crop_data->crop_info[i].crop.top;
7057 crop[2] = crop_data->crop_info[i].crop.width;
7058 crop[3] = crop_data->crop_info[i].crop.height;
7059 }
7060 roi_map.add(crop_data->crop_info[i].roi_map.left);
7061 roi_map.add(crop_data->crop_info[i].roi_map.top);
7062 roi_map.add(crop_data->crop_info[i].roi_map.width);
7063 roi_map.add(crop_data->crop_info[i].roi_map.height);
7064 streams_found++;
7065 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7066 crop[0], crop[1], crop[2], crop[3]);
7067 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7068 crop_data->crop_info[i].roi_map.left,
7069 crop_data->crop_info[i].roi_map.top,
7070 crop_data->crop_info[i].roi_map.width,
7071 crop_data->crop_info[i].roi_map.height);
7072 break;
7073
7074 }
7075 }
7076 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7077 &streams_found, 1);
7078 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7079 crop, (size_t)(streams_found * 4));
7080 if (roi_map.array()) {
7081 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7082 roi_map.array(), roi_map.size());
7083 }
7084 }
7085 if (crop) {
7086 delete [] crop;
7087 }
7088 }
7089 }
7090 }
7091
7092 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7093 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7094 // so hardcoding the CAC result to OFF mode.
7095 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7096 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7097 } else {
7098 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7099 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7100 *cacMode);
7101 if (NAME_NOT_FOUND != val) {
7102 uint8_t resultCacMode = (uint8_t)val;
7103 // check whether CAC result from CB is equal to Framework set CAC mode
7104 // If not equal then set the CAC mode came in corresponding request
7105 if (fwk_cacMode != resultCacMode) {
7106 resultCacMode = fwk_cacMode;
7107 }
7108 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7109 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7110 } else {
7111 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7112 }
7113 }
7114 }
7115
7116 // Post blob of cam_cds_data through vendor tag.
7117 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7118 uint8_t cnt = cdsInfo->num_of_streams;
7119 cam_cds_data_t cdsDataOverride;
7120 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7121 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7122 cdsDataOverride.num_of_streams = 1;
7123 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7124 uint32_t reproc_stream_id;
7125 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7126 LOGD("No reprocessible stream found, ignore cds data");
7127 } else {
7128 for (size_t i = 0; i < cnt; i++) {
7129 if (cdsInfo->cds_info[i].stream_id ==
7130 reproc_stream_id) {
7131 cdsDataOverride.cds_info[0].cds_enable =
7132 cdsInfo->cds_info[i].cds_enable;
7133 break;
7134 }
7135 }
7136 }
7137 } else {
7138 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7139 }
7140 camMetadata.update(QCAMERA3_CDS_INFO,
7141 (uint8_t *)&cdsDataOverride,
7142 sizeof(cam_cds_data_t));
7143 }
7144
7145 // Ldaf calibration data
7146 if (!mLdafCalibExist) {
7147 IF_META_AVAILABLE(uint32_t, ldafCalib,
7148 CAM_INTF_META_LDAF_EXIF, metadata) {
7149 mLdafCalibExist = true;
7150 mLdafCalib[0] = ldafCalib[0];
7151 mLdafCalib[1] = ldafCalib[1];
7152 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7153 ldafCalib[0], ldafCalib[1]);
7154 }
7155 }
7156
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007157 // Reprocess and DDM debug data through vendor tag
7158 cam_reprocess_info_t repro_info;
7159 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007160 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7161 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007162 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007163 }
7164 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7165 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007166 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007167 }
7168 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7169 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007170 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007171 }
7172 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7173 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007174 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007175 }
7176 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7177 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007178 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007179 }
7180 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007181 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007182 }
7183 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7184 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007185 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007186 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007187 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7188 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7189 }
7190 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7191 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7192 }
7193 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7194 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007195
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007196 // INSTANT AEC MODE
7197 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7198 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7199 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7200 }
7201
Shuzhen Wange763e802016-03-31 10:24:29 -07007202 // AF scene change
7203 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7204 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7205 }
7206
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007207 /* In batch mode, cache the first metadata in the batch */
7208 if (mBatchSize && firstMetadataInBatch) {
7209 mCachedMetadata.clear();
7210 mCachedMetadata = camMetadata;
7211 }
7212
Thierry Strudel3d639192016-09-09 11:52:26 -07007213 resultMetadata = camMetadata.release();
7214 return resultMetadata;
7215}
7216
7217/*===========================================================================
7218 * FUNCTION : saveExifParams
7219 *
7220 * DESCRIPTION:
7221 *
7222 * PARAMETERS :
7223 * @metadata : metadata information from callback
7224 *
7225 * RETURN : none
7226 *
7227 *==========================================================================*/
7228void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7229{
7230 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7231 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7232 if (mExifParams.debug_params) {
7233 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7234 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7235 }
7236 }
7237 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7238 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7239 if (mExifParams.debug_params) {
7240 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7241 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7242 }
7243 }
7244 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7245 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7246 if (mExifParams.debug_params) {
7247 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7248 mExifParams.debug_params->af_debug_params_valid = TRUE;
7249 }
7250 }
7251 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7252 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7253 if (mExifParams.debug_params) {
7254 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7255 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7256 }
7257 }
7258 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7259 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7260 if (mExifParams.debug_params) {
7261 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7262 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7263 }
7264 }
7265 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7266 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7267 if (mExifParams.debug_params) {
7268 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7269 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7270 }
7271 }
7272 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7273 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7274 if (mExifParams.debug_params) {
7275 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7276 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7277 }
7278 }
7279 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7280 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7281 if (mExifParams.debug_params) {
7282 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7283 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7284 }
7285 }
7286}
7287
7288/*===========================================================================
7289 * FUNCTION : get3AExifParams
7290 *
7291 * DESCRIPTION:
7292 *
7293 * PARAMETERS : none
7294 *
7295 *
7296 * RETURN : mm_jpeg_exif_params_t
7297 *
7298 *==========================================================================*/
7299mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7300{
7301 return mExifParams;
7302}
7303
7304/*===========================================================================
7305 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7306 *
7307 * DESCRIPTION:
7308 *
7309 * PARAMETERS :
7310 * @metadata : metadata information from callback
7311 *
7312 * RETURN : camera_metadata_t*
7313 * metadata in a format specified by fwk
7314 *==========================================================================*/
7315camera_metadata_t*
7316QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7317 (metadata_buffer_t *metadata)
7318{
7319 CameraMetadata camMetadata;
7320 camera_metadata_t *resultMetadata;
7321
7322
7323 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7324 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7325 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7326 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7327 }
7328
7329 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7330 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7331 &aecTrigger->trigger, 1);
7332 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7333 &aecTrigger->trigger_id, 1);
7334 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7335 aecTrigger->trigger);
7336 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7337 aecTrigger->trigger_id);
7338 }
7339
7340 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7341 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7342 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7343 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7344 }
7345
Thierry Strudel3d639192016-09-09 11:52:26 -07007346 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7347 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7348 &af_trigger->trigger, 1);
7349 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7350 af_trigger->trigger);
7351 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7352 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7353 af_trigger->trigger_id);
7354 }
7355
7356 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7357 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7358 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7359 if (NAME_NOT_FOUND != val) {
7360 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7361 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7362 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7363 } else {
7364 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7365 }
7366 }
7367
7368 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7369 uint32_t aeMode = CAM_AE_MODE_MAX;
7370 int32_t flashMode = CAM_FLASH_MODE_MAX;
7371 int32_t redeye = -1;
7372 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7373 aeMode = *pAeMode;
7374 }
7375 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7376 flashMode = *pFlashMode;
7377 }
7378 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7379 redeye = *pRedeye;
7380 }
7381
7382 if (1 == redeye) {
7383 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7384 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7385 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7386 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7387 flashMode);
7388 if (NAME_NOT_FOUND != val) {
7389 fwk_aeMode = (uint8_t)val;
7390 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7391 } else {
7392 LOGE("Unsupported flash mode %d", flashMode);
7393 }
7394 } else if (aeMode == CAM_AE_MODE_ON) {
7395 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7396 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7397 } else if (aeMode == CAM_AE_MODE_OFF) {
7398 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7399 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7400 } else {
7401 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7402 "flashMode:%d, aeMode:%u!!!",
7403 redeye, flashMode, aeMode);
7404 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007405 if (mInstantAEC) {
7406 // Increment frame Idx count untill a bound reached for instant AEC.
7407 mInstantAecFrameIdxCount++;
7408 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7409 CAM_INTF_META_AEC_INFO, metadata) {
7410 LOGH("ae_params->settled = %d",ae_params->settled);
7411 // If AEC settled, or if number of frames reached bound value,
7412 // should reset instant AEC.
7413 if (ae_params->settled ||
7414 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7415 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7416 mInstantAEC = false;
7417 mResetInstantAEC = true;
7418 mInstantAecFrameIdxCount = 0;
7419 }
7420 }
7421 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007422 resultMetadata = camMetadata.release();
7423 return resultMetadata;
7424}
7425
7426/*===========================================================================
7427 * FUNCTION : dumpMetadataToFile
7428 *
7429 * DESCRIPTION: Dumps tuning metadata to file system
7430 *
7431 * PARAMETERS :
7432 * @meta : tuning metadata
7433 * @dumpFrameCount : current dump frame count
7434 * @enabled : Enable mask
7435 *
7436 *==========================================================================*/
7437void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7438 uint32_t &dumpFrameCount,
7439 bool enabled,
7440 const char *type,
7441 uint32_t frameNumber)
7442{
7443 //Some sanity checks
7444 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7445 LOGE("Tuning sensor data size bigger than expected %d: %d",
7446 meta.tuning_sensor_data_size,
7447 TUNING_SENSOR_DATA_MAX);
7448 return;
7449 }
7450
7451 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7452 LOGE("Tuning VFE data size bigger than expected %d: %d",
7453 meta.tuning_vfe_data_size,
7454 TUNING_VFE_DATA_MAX);
7455 return;
7456 }
7457
7458 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7459 LOGE("Tuning CPP data size bigger than expected %d: %d",
7460 meta.tuning_cpp_data_size,
7461 TUNING_CPP_DATA_MAX);
7462 return;
7463 }
7464
7465 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7466 LOGE("Tuning CAC data size bigger than expected %d: %d",
7467 meta.tuning_cac_data_size,
7468 TUNING_CAC_DATA_MAX);
7469 return;
7470 }
7471 //
7472
7473 if(enabled){
7474 char timeBuf[FILENAME_MAX];
7475 char buf[FILENAME_MAX];
7476 memset(buf, 0, sizeof(buf));
7477 memset(timeBuf, 0, sizeof(timeBuf));
7478 time_t current_time;
7479 struct tm * timeinfo;
7480 time (&current_time);
7481 timeinfo = localtime (&current_time);
7482 if (timeinfo != NULL) {
7483 strftime (timeBuf, sizeof(timeBuf),
7484 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7485 }
7486 String8 filePath(timeBuf);
7487 snprintf(buf,
7488 sizeof(buf),
7489 "%dm_%s_%d.bin",
7490 dumpFrameCount,
7491 type,
7492 frameNumber);
7493 filePath.append(buf);
7494 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7495 if (file_fd >= 0) {
7496 ssize_t written_len = 0;
7497 meta.tuning_data_version = TUNING_DATA_VERSION;
7498 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7499 written_len += write(file_fd, data, sizeof(uint32_t));
7500 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7501 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7502 written_len += write(file_fd, data, sizeof(uint32_t));
7503 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7504 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7505 written_len += write(file_fd, data, sizeof(uint32_t));
7506 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7507 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7508 written_len += write(file_fd, data, sizeof(uint32_t));
7509 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7510 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7511 written_len += write(file_fd, data, sizeof(uint32_t));
7512 meta.tuning_mod3_data_size = 0;
7513 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7514 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7515 written_len += write(file_fd, data, sizeof(uint32_t));
7516 size_t total_size = meta.tuning_sensor_data_size;
7517 data = (void *)((uint8_t *)&meta.data);
7518 written_len += write(file_fd, data, total_size);
7519 total_size = meta.tuning_vfe_data_size;
7520 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7521 written_len += write(file_fd, data, total_size);
7522 total_size = meta.tuning_cpp_data_size;
7523 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7524 written_len += write(file_fd, data, total_size);
7525 total_size = meta.tuning_cac_data_size;
7526 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7527 written_len += write(file_fd, data, total_size);
7528 close(file_fd);
7529 }else {
7530 LOGE("fail to open file for metadata dumping");
7531 }
7532 }
7533}
7534
7535/*===========================================================================
7536 * FUNCTION : cleanAndSortStreamInfo
7537 *
7538 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7539 * and sort them such that raw stream is at the end of the list
7540 * This is a workaround for camera daemon constraint.
7541 *
7542 * PARAMETERS : None
7543 *
7544 *==========================================================================*/
7545void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7546{
7547 List<stream_info_t *> newStreamInfo;
7548
7549 /*clean up invalid streams*/
7550 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7551 it != mStreamInfo.end();) {
7552 if(((*it)->status) == INVALID){
7553 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7554 delete channel;
7555 free(*it);
7556 it = mStreamInfo.erase(it);
7557 } else {
7558 it++;
7559 }
7560 }
7561
7562 // Move preview/video/callback/snapshot streams into newList
7563 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7564 it != mStreamInfo.end();) {
7565 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7566 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7567 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7568 newStreamInfo.push_back(*it);
7569 it = mStreamInfo.erase(it);
7570 } else
7571 it++;
7572 }
7573 // Move raw streams into newList
7574 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7575 it != mStreamInfo.end();) {
7576 newStreamInfo.push_back(*it);
7577 it = mStreamInfo.erase(it);
7578 }
7579
7580 mStreamInfo = newStreamInfo;
7581}
7582
7583/*===========================================================================
7584 * FUNCTION : extractJpegMetadata
7585 *
7586 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7587 * JPEG metadata is cached in HAL, and return as part of capture
7588 * result when metadata is returned from camera daemon.
7589 *
7590 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7591 * @request: capture request
7592 *
7593 *==========================================================================*/
7594void QCamera3HardwareInterface::extractJpegMetadata(
7595 CameraMetadata& jpegMetadata,
7596 const camera3_capture_request_t *request)
7597{
7598 CameraMetadata frame_settings;
7599 frame_settings = request->settings;
7600
7601 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7602 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7603 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7604 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7605
7606 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7607 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7608 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7609 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7610
7611 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7612 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7613 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7614 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7615
7616 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7617 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7618 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7619 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7620
7621 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7622 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7623 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7624 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7625
7626 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7627 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7628 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7629 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7630
7631 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7632 int32_t thumbnail_size[2];
7633 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7634 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7635 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7636 int32_t orientation =
7637 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007638 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007639 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7640 int32_t temp;
7641 temp = thumbnail_size[0];
7642 thumbnail_size[0] = thumbnail_size[1];
7643 thumbnail_size[1] = temp;
7644 }
7645 }
7646 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7647 thumbnail_size,
7648 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7649 }
7650
7651}
7652
7653/*===========================================================================
7654 * FUNCTION : convertToRegions
7655 *
7656 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7657 *
7658 * PARAMETERS :
7659 * @rect : cam_rect_t struct to convert
7660 * @region : int32_t destination array
7661 * @weight : if we are converting from cam_area_t, weight is valid
7662 * else weight = -1
7663 *
7664 *==========================================================================*/
7665void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7666 int32_t *region, int weight)
7667{
7668 region[0] = rect.left;
7669 region[1] = rect.top;
7670 region[2] = rect.left + rect.width;
7671 region[3] = rect.top + rect.height;
7672 if (weight > -1) {
7673 region[4] = weight;
7674 }
7675}
7676
7677/*===========================================================================
7678 * FUNCTION : convertFromRegions
7679 *
7680 * DESCRIPTION: helper method to convert from array to cam_rect_t
7681 *
7682 * PARAMETERS :
7683 * @rect : cam_rect_t struct to convert
7684 * @region : int32_t destination array
7685 * @weight : if we are converting from cam_area_t, weight is valid
7686 * else weight = -1
7687 *
7688 *==========================================================================*/
7689void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08007690 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07007691{
Thierry Strudel3d639192016-09-09 11:52:26 -07007692 int32_t x_min = frame_settings.find(tag).data.i32[0];
7693 int32_t y_min = frame_settings.find(tag).data.i32[1];
7694 int32_t x_max = frame_settings.find(tag).data.i32[2];
7695 int32_t y_max = frame_settings.find(tag).data.i32[3];
7696 roi.weight = frame_settings.find(tag).data.i32[4];
7697 roi.rect.left = x_min;
7698 roi.rect.top = y_min;
7699 roi.rect.width = x_max - x_min;
7700 roi.rect.height = y_max - y_min;
7701}
7702
7703/*===========================================================================
7704 * FUNCTION : resetIfNeededROI
7705 *
7706 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7707 * crop region
7708 *
7709 * PARAMETERS :
7710 * @roi : cam_area_t struct to resize
7711 * @scalerCropRegion : cam_crop_region_t region to compare against
7712 *
7713 *
7714 *==========================================================================*/
7715bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7716 const cam_crop_region_t* scalerCropRegion)
7717{
7718 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7719 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7720 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7721 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7722
7723 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7724 * without having this check the calculations below to validate if the roi
7725 * is inside scalar crop region will fail resulting in the roi not being
7726 * reset causing algorithm to continue to use stale roi window
7727 */
7728 if (roi->weight == 0) {
7729 return true;
7730 }
7731
7732 if ((roi_x_max < scalerCropRegion->left) ||
7733 // right edge of roi window is left of scalar crop's left edge
7734 (roi_y_max < scalerCropRegion->top) ||
7735 // bottom edge of roi window is above scalar crop's top edge
7736 (roi->rect.left > crop_x_max) ||
7737 // left edge of roi window is beyond(right) of scalar crop's right edge
7738 (roi->rect.top > crop_y_max)){
7739 // top edge of roi windo is above scalar crop's top edge
7740 return false;
7741 }
7742 if (roi->rect.left < scalerCropRegion->left) {
7743 roi->rect.left = scalerCropRegion->left;
7744 }
7745 if (roi->rect.top < scalerCropRegion->top) {
7746 roi->rect.top = scalerCropRegion->top;
7747 }
7748 if (roi_x_max > crop_x_max) {
7749 roi_x_max = crop_x_max;
7750 }
7751 if (roi_y_max > crop_y_max) {
7752 roi_y_max = crop_y_max;
7753 }
7754 roi->rect.width = roi_x_max - roi->rect.left;
7755 roi->rect.height = roi_y_max - roi->rect.top;
7756 return true;
7757}
7758
7759/*===========================================================================
7760 * FUNCTION : convertLandmarks
7761 *
7762 * DESCRIPTION: helper method to extract the landmarks from face detection info
7763 *
7764 * PARAMETERS :
7765 * @landmark_data : input landmark data to be converted
7766 * @landmarks : int32_t destination array
7767 *
7768 *
7769 *==========================================================================*/
7770void QCamera3HardwareInterface::convertLandmarks(
7771 cam_face_landmarks_info_t landmark_data,
7772 int32_t *landmarks)
7773{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007774 if (landmark_data.is_left_eye_valid) {
7775 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7776 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7777 } else {
7778 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7779 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7780 }
7781
7782 if (landmark_data.is_right_eye_valid) {
7783 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7784 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7785 } else {
7786 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7787 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7788 }
7789
7790 if (landmark_data.is_mouth_valid) {
7791 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7792 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7793 } else {
7794 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7795 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7796 }
7797}
7798
7799/*===========================================================================
7800 * FUNCTION : setInvalidLandmarks
7801 *
7802 * DESCRIPTION: helper method to set invalid landmarks
7803 *
7804 * PARAMETERS :
7805 * @landmarks : int32_t destination array
7806 *
7807 *
7808 *==========================================================================*/
7809void QCamera3HardwareInterface::setInvalidLandmarks(
7810 int32_t *landmarks)
7811{
7812 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7813 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7814 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7815 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7816 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7817 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007818}
7819
7820#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007821
7822/*===========================================================================
7823 * FUNCTION : getCapabilities
7824 *
7825 * DESCRIPTION: query camera capability from back-end
7826 *
7827 * PARAMETERS :
7828 * @ops : mm-interface ops structure
7829 * @cam_handle : camera handle for which we need capability
7830 *
7831 * RETURN : ptr type of capability structure
7832 * capability for success
7833 * NULL for failure
7834 *==========================================================================*/
7835cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7836 uint32_t cam_handle)
7837{
7838 int rc = NO_ERROR;
7839 QCamera3HeapMemory *capabilityHeap = NULL;
7840 cam_capability_t *cap_ptr = NULL;
7841
7842 if (ops == NULL) {
7843 LOGE("Invalid arguments");
7844 return NULL;
7845 }
7846
7847 capabilityHeap = new QCamera3HeapMemory(1);
7848 if (capabilityHeap == NULL) {
7849 LOGE("creation of capabilityHeap failed");
7850 return NULL;
7851 }
7852
7853 /* Allocate memory for capability buffer */
7854 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7855 if(rc != OK) {
7856 LOGE("No memory for cappability");
7857 goto allocate_failed;
7858 }
7859
7860 /* Map memory for capability buffer */
7861 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7862
7863 rc = ops->map_buf(cam_handle,
7864 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7865 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7866 if(rc < 0) {
7867 LOGE("failed to map capability buffer");
7868 rc = FAILED_TRANSACTION;
7869 goto map_failed;
7870 }
7871
7872 /* Query Capability */
7873 rc = ops->query_capability(cam_handle);
7874 if(rc < 0) {
7875 LOGE("failed to query capability");
7876 rc = FAILED_TRANSACTION;
7877 goto query_failed;
7878 }
7879
7880 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7881 if (cap_ptr == NULL) {
7882 LOGE("out of memory");
7883 rc = NO_MEMORY;
7884 goto query_failed;
7885 }
7886
7887 memset(cap_ptr, 0, sizeof(cam_capability_t));
7888 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7889
7890 int index;
7891 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7892 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7893 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7894 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7895 }
7896
7897query_failed:
7898 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7899map_failed:
7900 capabilityHeap->deallocate();
7901allocate_failed:
7902 delete capabilityHeap;
7903
7904 if (rc != NO_ERROR) {
7905 return NULL;
7906 } else {
7907 return cap_ptr;
7908 }
7909}
7910
Thierry Strudel3d639192016-09-09 11:52:26 -07007911/*===========================================================================
7912 * FUNCTION : initCapabilities
7913 *
7914 * DESCRIPTION: initialize camera capabilities in static data struct
7915 *
7916 * PARAMETERS :
7917 * @cameraId : camera Id
7918 *
7919 * RETURN : int32_t type of status
7920 * NO_ERROR -- success
7921 * none-zero failure code
7922 *==========================================================================*/
7923int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7924{
7925 int rc = 0;
7926 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007927 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007928
7929 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7930 if (rc) {
7931 LOGE("camera_open failed. rc = %d", rc);
7932 goto open_failed;
7933 }
7934 if (!cameraHandle) {
7935 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7936 goto open_failed;
7937 }
7938
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007939 handle = get_main_camera_handle(cameraHandle->camera_handle);
7940 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7941 if (gCamCapability[cameraId] == NULL) {
7942 rc = FAILED_TRANSACTION;
7943 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007944 }
7945
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007946 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007947 if (is_dual_camera_by_idx(cameraId)) {
7948 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7949 gCamCapability[cameraId]->aux_cam_cap =
7950 getCapabilities(cameraHandle->ops, handle);
7951 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7952 rc = FAILED_TRANSACTION;
7953 free(gCamCapability[cameraId]);
7954 goto failed_op;
7955 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007956
7957 // Copy the main camera capability to main_cam_cap struct
7958 gCamCapability[cameraId]->main_cam_cap =
7959 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7960 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7961 LOGE("out of memory");
7962 rc = NO_MEMORY;
7963 goto failed_op;
7964 }
7965 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7966 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007967 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007968failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007969 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7970 cameraHandle = NULL;
7971open_failed:
7972 return rc;
7973}
7974
7975/*==========================================================================
7976 * FUNCTION : get3Aversion
7977 *
7978 * DESCRIPTION: get the Q3A S/W version
7979 *
7980 * PARAMETERS :
7981 * @sw_version: Reference of Q3A structure which will hold version info upon
7982 * return
7983 *
7984 * RETURN : None
7985 *
7986 *==========================================================================*/
7987void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7988{
7989 if(gCamCapability[mCameraId])
7990 sw_version = gCamCapability[mCameraId]->q3a_version;
7991 else
7992 LOGE("Capability structure NULL!");
7993}
7994
7995
7996/*===========================================================================
7997 * FUNCTION : initParameters
7998 *
7999 * DESCRIPTION: initialize camera parameters
8000 *
8001 * PARAMETERS :
8002 *
8003 * RETURN : int32_t type of status
8004 * NO_ERROR -- success
8005 * none-zero failure code
8006 *==========================================================================*/
8007int QCamera3HardwareInterface::initParameters()
8008{
8009 int rc = 0;
8010
8011 //Allocate Set Param Buffer
8012 mParamHeap = new QCamera3HeapMemory(1);
8013 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8014 if(rc != OK) {
8015 rc = NO_MEMORY;
8016 LOGE("Failed to allocate SETPARM Heap memory");
8017 delete mParamHeap;
8018 mParamHeap = NULL;
8019 return rc;
8020 }
8021
8022 //Map memory for parameters buffer
8023 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8024 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8025 mParamHeap->getFd(0),
8026 sizeof(metadata_buffer_t),
8027 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8028 if(rc < 0) {
8029 LOGE("failed to map SETPARM buffer");
8030 rc = FAILED_TRANSACTION;
8031 mParamHeap->deallocate();
8032 delete mParamHeap;
8033 mParamHeap = NULL;
8034 return rc;
8035 }
8036
8037 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8038
8039 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8040 return rc;
8041}
8042
8043/*===========================================================================
8044 * FUNCTION : deinitParameters
8045 *
8046 * DESCRIPTION: de-initialize camera parameters
8047 *
8048 * PARAMETERS :
8049 *
8050 * RETURN : NONE
8051 *==========================================================================*/
8052void QCamera3HardwareInterface::deinitParameters()
8053{
8054 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8055 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8056
8057 mParamHeap->deallocate();
8058 delete mParamHeap;
8059 mParamHeap = NULL;
8060
8061 mParameters = NULL;
8062
8063 free(mPrevParameters);
8064 mPrevParameters = NULL;
8065}
8066
8067/*===========================================================================
8068 * FUNCTION : calcMaxJpegSize
8069 *
8070 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8071 *
8072 * PARAMETERS :
8073 *
8074 * RETURN : max_jpeg_size
8075 *==========================================================================*/
8076size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8077{
8078 size_t max_jpeg_size = 0;
8079 size_t temp_width, temp_height;
8080 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8081 MAX_SIZES_CNT);
8082 for (size_t i = 0; i < count; i++) {
8083 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8084 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8085 if (temp_width * temp_height > max_jpeg_size ) {
8086 max_jpeg_size = temp_width * temp_height;
8087 }
8088 }
8089 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8090 return max_jpeg_size;
8091}
8092
8093/*===========================================================================
8094 * FUNCTION : getMaxRawSize
8095 *
8096 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8097 *
8098 * PARAMETERS :
8099 *
8100 * RETURN : Largest supported Raw Dimension
8101 *==========================================================================*/
8102cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8103{
8104 int max_width = 0;
8105 cam_dimension_t maxRawSize;
8106
8107 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8108 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8109 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8110 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8111 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8112 }
8113 }
8114 return maxRawSize;
8115}
8116
8117
8118/*===========================================================================
8119 * FUNCTION : calcMaxJpegDim
8120 *
8121 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8122 *
8123 * PARAMETERS :
8124 *
8125 * RETURN : max_jpeg_dim
8126 *==========================================================================*/
8127cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8128{
8129 cam_dimension_t max_jpeg_dim;
8130 cam_dimension_t curr_jpeg_dim;
8131 max_jpeg_dim.width = 0;
8132 max_jpeg_dim.height = 0;
8133 curr_jpeg_dim.width = 0;
8134 curr_jpeg_dim.height = 0;
8135 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8136 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8137 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8138 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8139 max_jpeg_dim.width * max_jpeg_dim.height ) {
8140 max_jpeg_dim.width = curr_jpeg_dim.width;
8141 max_jpeg_dim.height = curr_jpeg_dim.height;
8142 }
8143 }
8144 return max_jpeg_dim;
8145}
8146
8147/*===========================================================================
8148 * FUNCTION : addStreamConfig
8149 *
8150 * DESCRIPTION: adds the stream configuration to the array
8151 *
8152 * PARAMETERS :
8153 * @available_stream_configs : pointer to stream configuration array
8154 * @scalar_format : scalar format
8155 * @dim : configuration dimension
8156 * @config_type : input or output configuration type
8157 *
8158 * RETURN : NONE
8159 *==========================================================================*/
8160void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8161 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8162{
8163 available_stream_configs.add(scalar_format);
8164 available_stream_configs.add(dim.width);
8165 available_stream_configs.add(dim.height);
8166 available_stream_configs.add(config_type);
8167}
8168
8169/*===========================================================================
8170 * FUNCTION : suppportBurstCapture
8171 *
8172 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8173 *
8174 * PARAMETERS :
8175 * @cameraId : camera Id
8176 *
8177 * RETURN : true if camera supports BURST_CAPTURE
8178 * false otherwise
8179 *==========================================================================*/
8180bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8181{
8182 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8183 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8184 const int32_t highResWidth = 3264;
8185 const int32_t highResHeight = 2448;
8186
8187 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8188 // Maximum resolution images cannot be captured at >= 10fps
8189 // -> not supporting BURST_CAPTURE
8190 return false;
8191 }
8192
8193 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8194 // Maximum resolution images can be captured at >= 20fps
8195 // --> supporting BURST_CAPTURE
8196 return true;
8197 }
8198
8199 // Find the smallest highRes resolution, or largest resolution if there is none
8200 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8201 MAX_SIZES_CNT);
8202 size_t highRes = 0;
8203 while ((highRes + 1 < totalCnt) &&
8204 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8205 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8206 highResWidth * highResHeight)) {
8207 highRes++;
8208 }
8209 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8210 return true;
8211 } else {
8212 return false;
8213 }
8214}
8215
8216/*===========================================================================
8217 * FUNCTION : initStaticMetadata
8218 *
8219 * DESCRIPTION: initialize the static metadata
8220 *
8221 * PARAMETERS :
8222 * @cameraId : camera Id
8223 *
8224 * RETURN : int32_t type of status
8225 * 0 -- success
8226 * non-zero failure code
8227 *==========================================================================*/
8228int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8229{
8230 int rc = 0;
8231 CameraMetadata staticInfo;
8232 size_t count = 0;
8233 bool limitedDevice = false;
8234 char prop[PROPERTY_VALUE_MAX];
8235 bool supportBurst = false;
8236
8237 supportBurst = supportBurstCapture(cameraId);
8238
8239 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8240 * guaranteed or if min fps of max resolution is less than 20 fps, its
8241 * advertised as limited device*/
8242 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8243 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8244 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8245 !supportBurst;
8246
8247 uint8_t supportedHwLvl = limitedDevice ?
8248 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008249#ifndef USE_HAL_3_3
8250 // LEVEL_3 - This device will support level 3.
8251 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8252#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008253 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008254#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008255
8256 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8257 &supportedHwLvl, 1);
8258
8259 bool facingBack = false;
8260 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8261 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8262 facingBack = true;
8263 }
8264 /*HAL 3 only*/
8265 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8266 &gCamCapability[cameraId]->min_focus_distance, 1);
8267
8268 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8269 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8270
8271 /*should be using focal lengths but sensor doesn't provide that info now*/
8272 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8273 &gCamCapability[cameraId]->focal_length,
8274 1);
8275
8276 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8277 gCamCapability[cameraId]->apertures,
8278 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8279
8280 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8281 gCamCapability[cameraId]->filter_densities,
8282 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8283
8284
8285 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8286 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8287 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8288
8289 int32_t lens_shading_map_size[] = {
8290 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8291 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8292 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8293 lens_shading_map_size,
8294 sizeof(lens_shading_map_size)/sizeof(int32_t));
8295
8296 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8297 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8298
8299 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8300 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8301
8302 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8303 &gCamCapability[cameraId]->max_frame_duration, 1);
8304
8305 camera_metadata_rational baseGainFactor = {
8306 gCamCapability[cameraId]->base_gain_factor.numerator,
8307 gCamCapability[cameraId]->base_gain_factor.denominator};
8308 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8309 &baseGainFactor, 1);
8310
8311 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8312 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8313
8314 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8315 gCamCapability[cameraId]->pixel_array_size.height};
8316 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8317 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8318
8319 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8320 gCamCapability[cameraId]->active_array_size.top,
8321 gCamCapability[cameraId]->active_array_size.width,
8322 gCamCapability[cameraId]->active_array_size.height};
8323 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8324 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8325
8326 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8327 &gCamCapability[cameraId]->white_level, 1);
8328
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008329 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8330 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8331 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008332 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008333 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008334
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008335#ifndef USE_HAL_3_3
8336 bool hasBlackRegions = false;
8337 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8338 LOGW("black_region_count: %d is bounded to %d",
8339 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8340 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8341 }
8342 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8343 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8344 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8345 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8346 }
8347 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8348 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8349 hasBlackRegions = true;
8350 }
8351#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008352 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8353 &gCamCapability[cameraId]->flash_charge_duration, 1);
8354
8355 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8356 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8357
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008358 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8359 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8360 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008361 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8362 &timestampSource, 1);
8363
8364 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8365 &gCamCapability[cameraId]->histogram_size, 1);
8366
8367 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8368 &gCamCapability[cameraId]->max_histogram_count, 1);
8369
8370 int32_t sharpness_map_size[] = {
8371 gCamCapability[cameraId]->sharpness_map_size.width,
8372 gCamCapability[cameraId]->sharpness_map_size.height};
8373
8374 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8375 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8376
8377 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8378 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8379
8380 int32_t scalar_formats[] = {
8381 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8382 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8383 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8384 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8385 HAL_PIXEL_FORMAT_RAW10,
8386 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8387 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8388 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8389 scalar_formats,
8390 scalar_formats_count);
8391
8392 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8393 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8394 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8395 count, MAX_SIZES_CNT, available_processed_sizes);
8396 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8397 available_processed_sizes, count * 2);
8398
8399 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8400 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8401 makeTable(gCamCapability[cameraId]->raw_dim,
8402 count, MAX_SIZES_CNT, available_raw_sizes);
8403 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8404 available_raw_sizes, count * 2);
8405
8406 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8407 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8408 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8409 count, MAX_SIZES_CNT, available_fps_ranges);
8410 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8411 available_fps_ranges, count * 2);
8412
8413 camera_metadata_rational exposureCompensationStep = {
8414 gCamCapability[cameraId]->exp_compensation_step.numerator,
8415 gCamCapability[cameraId]->exp_compensation_step.denominator};
8416 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8417 &exposureCompensationStep, 1);
8418
8419 Vector<uint8_t> availableVstabModes;
8420 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8421 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008422 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008423 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008424 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008425 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008426 count = IS_TYPE_MAX;
8427 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8428 for (size_t i = 0; i < count; i++) {
8429 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8430 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8431 eisSupported = true;
8432 break;
8433 }
8434 }
8435 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008436 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8437 }
8438 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8439 availableVstabModes.array(), availableVstabModes.size());
8440
8441 /*HAL 1 and HAL 3 common*/
8442 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8443 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8444 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8445 float maxZoom = maxZoomStep/minZoomStep;
8446 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8447 &maxZoom, 1);
8448
8449 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8450 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8451
8452 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8453 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8454 max3aRegions[2] = 0; /* AF not supported */
8455 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8456 max3aRegions, 3);
8457
8458 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8459 memset(prop, 0, sizeof(prop));
8460 property_get("persist.camera.facedetect", prop, "1");
8461 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8462 LOGD("Support face detection mode: %d",
8463 supportedFaceDetectMode);
8464
8465 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008466 /* support mode should be OFF if max number of face is 0 */
8467 if (maxFaces <= 0) {
8468 supportedFaceDetectMode = 0;
8469 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008470 Vector<uint8_t> availableFaceDetectModes;
8471 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8472 if (supportedFaceDetectMode == 1) {
8473 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8474 } else if (supportedFaceDetectMode == 2) {
8475 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8476 } else if (supportedFaceDetectMode == 3) {
8477 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8478 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8479 } else {
8480 maxFaces = 0;
8481 }
8482 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8483 availableFaceDetectModes.array(),
8484 availableFaceDetectModes.size());
8485 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8486 (int32_t *)&maxFaces, 1);
8487
8488 int32_t exposureCompensationRange[] = {
8489 gCamCapability[cameraId]->exposure_compensation_min,
8490 gCamCapability[cameraId]->exposure_compensation_max};
8491 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8492 exposureCompensationRange,
8493 sizeof(exposureCompensationRange)/sizeof(int32_t));
8494
8495 uint8_t lensFacing = (facingBack) ?
8496 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8497 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8498
8499 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8500 available_thumbnail_sizes,
8501 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8502
8503 /*all sizes will be clubbed into this tag*/
8504 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8505 /*android.scaler.availableStreamConfigurations*/
8506 Vector<int32_t> available_stream_configs;
8507 cam_dimension_t active_array_dim;
8508 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8509 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8510 /* Add input/output stream configurations for each scalar formats*/
8511 for (size_t j = 0; j < scalar_formats_count; j++) {
8512 switch (scalar_formats[j]) {
8513 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8514 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8515 case HAL_PIXEL_FORMAT_RAW10:
8516 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8517 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8518 addStreamConfig(available_stream_configs, scalar_formats[j],
8519 gCamCapability[cameraId]->raw_dim[i],
8520 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8521 }
8522 break;
8523 case HAL_PIXEL_FORMAT_BLOB:
8524 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8525 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8526 addStreamConfig(available_stream_configs, scalar_formats[j],
8527 gCamCapability[cameraId]->picture_sizes_tbl[i],
8528 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8529 }
8530 break;
8531 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8532 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8533 default:
8534 cam_dimension_t largest_picture_size;
8535 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8536 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8537 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8538 addStreamConfig(available_stream_configs, scalar_formats[j],
8539 gCamCapability[cameraId]->picture_sizes_tbl[i],
8540 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8541 /* Book keep largest */
8542 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8543 >= largest_picture_size.width &&
8544 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8545 >= largest_picture_size.height)
8546 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8547 }
8548 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8549 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8550 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8551 addStreamConfig(available_stream_configs, scalar_formats[j],
8552 largest_picture_size,
8553 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8554 }
8555 break;
8556 }
8557 }
8558
8559 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8560 available_stream_configs.array(), available_stream_configs.size());
8561 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8562 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8563
8564 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8565 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8566
8567 /* android.scaler.availableMinFrameDurations */
8568 Vector<int64_t> available_min_durations;
8569 for (size_t j = 0; j < scalar_formats_count; j++) {
8570 switch (scalar_formats[j]) {
8571 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8572 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8573 case HAL_PIXEL_FORMAT_RAW10:
8574 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8575 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8576 available_min_durations.add(scalar_formats[j]);
8577 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8578 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8579 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8580 }
8581 break;
8582 default:
8583 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8584 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8585 available_min_durations.add(scalar_formats[j]);
8586 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8587 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8588 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8589 }
8590 break;
8591 }
8592 }
8593 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8594 available_min_durations.array(), available_min_durations.size());
8595
8596 Vector<int32_t> available_hfr_configs;
8597 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8598 int32_t fps = 0;
8599 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8600 case CAM_HFR_MODE_60FPS:
8601 fps = 60;
8602 break;
8603 case CAM_HFR_MODE_90FPS:
8604 fps = 90;
8605 break;
8606 case CAM_HFR_MODE_120FPS:
8607 fps = 120;
8608 break;
8609 case CAM_HFR_MODE_150FPS:
8610 fps = 150;
8611 break;
8612 case CAM_HFR_MODE_180FPS:
8613 fps = 180;
8614 break;
8615 case CAM_HFR_MODE_210FPS:
8616 fps = 210;
8617 break;
8618 case CAM_HFR_MODE_240FPS:
8619 fps = 240;
8620 break;
8621 case CAM_HFR_MODE_480FPS:
8622 fps = 480;
8623 break;
8624 case CAM_HFR_MODE_OFF:
8625 case CAM_HFR_MODE_MAX:
8626 default:
8627 break;
8628 }
8629
8630 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8631 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8632 /* For each HFR frame rate, need to advertise one variable fps range
8633 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8634 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8635 * set by the app. When video recording is started, [120, 120] is
8636 * set. This way sensor configuration does not change when recording
8637 * is started */
8638
8639 /* (width, height, fps_min, fps_max, batch_size_max) */
8640 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8641 j < MAX_SIZES_CNT; j++) {
8642 available_hfr_configs.add(
8643 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8644 available_hfr_configs.add(
8645 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8646 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8647 available_hfr_configs.add(fps);
8648 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8649
8650 /* (width, height, fps_min, fps_max, batch_size_max) */
8651 available_hfr_configs.add(
8652 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8653 available_hfr_configs.add(
8654 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8655 available_hfr_configs.add(fps);
8656 available_hfr_configs.add(fps);
8657 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8658 }
8659 }
8660 }
8661 //Advertise HFR capability only if the property is set
8662 memset(prop, 0, sizeof(prop));
8663 property_get("persist.camera.hal3hfr.enable", prop, "1");
8664 uint8_t hfrEnable = (uint8_t)atoi(prop);
8665
8666 if(hfrEnable && available_hfr_configs.array()) {
8667 staticInfo.update(
8668 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8669 available_hfr_configs.array(), available_hfr_configs.size());
8670 }
8671
8672 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8673 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8674 &max_jpeg_size, 1);
8675
8676 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8677 size_t size = 0;
8678 count = CAM_EFFECT_MODE_MAX;
8679 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8680 for (size_t i = 0; i < count; i++) {
8681 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8682 gCamCapability[cameraId]->supported_effects[i]);
8683 if (NAME_NOT_FOUND != val) {
8684 avail_effects[size] = (uint8_t)val;
8685 size++;
8686 }
8687 }
8688 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8689 avail_effects,
8690 size);
8691
8692 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8693 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8694 size_t supported_scene_modes_cnt = 0;
8695 count = CAM_SCENE_MODE_MAX;
8696 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8697 for (size_t i = 0; i < count; i++) {
8698 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8699 CAM_SCENE_MODE_OFF) {
8700 int val = lookupFwkName(SCENE_MODES_MAP,
8701 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8702 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08008703
Thierry Strudel3d639192016-09-09 11:52:26 -07008704 if (NAME_NOT_FOUND != val) {
8705 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8706 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8707 supported_scene_modes_cnt++;
8708 }
8709 }
8710 }
8711 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8712 avail_scene_modes,
8713 supported_scene_modes_cnt);
8714
8715 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8716 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8717 supported_scene_modes_cnt,
8718 CAM_SCENE_MODE_MAX,
8719 scene_mode_overrides,
8720 supported_indexes,
8721 cameraId);
8722
8723 if (supported_scene_modes_cnt == 0) {
8724 supported_scene_modes_cnt = 1;
8725 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8726 }
8727
8728 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8729 scene_mode_overrides, supported_scene_modes_cnt * 3);
8730
8731 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8732 ANDROID_CONTROL_MODE_AUTO,
8733 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8734 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8735 available_control_modes,
8736 3);
8737
8738 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8739 size = 0;
8740 count = CAM_ANTIBANDING_MODE_MAX;
8741 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8742 for (size_t i = 0; i < count; i++) {
8743 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8744 gCamCapability[cameraId]->supported_antibandings[i]);
8745 if (NAME_NOT_FOUND != val) {
8746 avail_antibanding_modes[size] = (uint8_t)val;
8747 size++;
8748 }
8749
8750 }
8751 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8752 avail_antibanding_modes,
8753 size);
8754
8755 uint8_t avail_abberation_modes[] = {
8756 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8757 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8758 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8759 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8760 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8761 if (0 == count) {
8762 // If no aberration correction modes are available for a device, this advertise OFF mode
8763 size = 1;
8764 } else {
8765 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8766 // So, advertize all 3 modes if atleast any one mode is supported as per the
8767 // new M requirement
8768 size = 3;
8769 }
8770 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8771 avail_abberation_modes,
8772 size);
8773
8774 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8775 size = 0;
8776 count = CAM_FOCUS_MODE_MAX;
8777 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8778 for (size_t i = 0; i < count; i++) {
8779 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8780 gCamCapability[cameraId]->supported_focus_modes[i]);
8781 if (NAME_NOT_FOUND != val) {
8782 avail_af_modes[size] = (uint8_t)val;
8783 size++;
8784 }
8785 }
8786 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8787 avail_af_modes,
8788 size);
8789
8790 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8791 size = 0;
8792 count = CAM_WB_MODE_MAX;
8793 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8794 for (size_t i = 0; i < count; i++) {
8795 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8796 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8797 gCamCapability[cameraId]->supported_white_balances[i]);
8798 if (NAME_NOT_FOUND != val) {
8799 avail_awb_modes[size] = (uint8_t)val;
8800 size++;
8801 }
8802 }
8803 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8804 avail_awb_modes,
8805 size);
8806
8807 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8808 count = CAM_FLASH_FIRING_LEVEL_MAX;
8809 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8810 count);
8811 for (size_t i = 0; i < count; i++) {
8812 available_flash_levels[i] =
8813 gCamCapability[cameraId]->supported_firing_levels[i];
8814 }
8815 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8816 available_flash_levels, count);
8817
8818 uint8_t flashAvailable;
8819 if (gCamCapability[cameraId]->flash_available)
8820 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8821 else
8822 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8823 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8824 &flashAvailable, 1);
8825
8826 Vector<uint8_t> avail_ae_modes;
8827 count = CAM_AE_MODE_MAX;
8828 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8829 for (size_t i = 0; i < count; i++) {
8830 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8831 }
8832 if (flashAvailable) {
8833 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8834 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8835 }
8836 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8837 avail_ae_modes.array(),
8838 avail_ae_modes.size());
8839
8840 int32_t sensitivity_range[2];
8841 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8842 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8843 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8844 sensitivity_range,
8845 sizeof(sensitivity_range) / sizeof(int32_t));
8846
8847 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8848 &gCamCapability[cameraId]->max_analog_sensitivity,
8849 1);
8850
8851 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8852 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8853 &sensor_orientation,
8854 1);
8855
8856 int32_t max_output_streams[] = {
8857 MAX_STALLING_STREAMS,
8858 MAX_PROCESSED_STREAMS,
8859 MAX_RAW_STREAMS};
8860 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8861 max_output_streams,
8862 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8863
8864 uint8_t avail_leds = 0;
8865 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8866 &avail_leds, 0);
8867
8868 uint8_t focus_dist_calibrated;
8869 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8870 gCamCapability[cameraId]->focus_dist_calibrated);
8871 if (NAME_NOT_FOUND != val) {
8872 focus_dist_calibrated = (uint8_t)val;
8873 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8874 &focus_dist_calibrated, 1);
8875 }
8876
8877 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8878 size = 0;
8879 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8880 MAX_TEST_PATTERN_CNT);
8881 for (size_t i = 0; i < count; i++) {
8882 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8883 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8884 if (NAME_NOT_FOUND != testpatternMode) {
8885 avail_testpattern_modes[size] = testpatternMode;
8886 size++;
8887 }
8888 }
8889 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8890 avail_testpattern_modes,
8891 size);
8892
8893 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8894 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8895 &max_pipeline_depth,
8896 1);
8897
8898 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8899 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8900 &partial_result_count,
8901 1);
8902
8903 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8904 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8905
8906 Vector<uint8_t> available_capabilities;
8907 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8908 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8909 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8910 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8911 if (supportBurst) {
8912 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8913 }
8914 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8915 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8916 if (hfrEnable && available_hfr_configs.array()) {
8917 available_capabilities.add(
8918 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8919 }
8920
8921 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8922 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8923 }
8924 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8925 available_capabilities.array(),
8926 available_capabilities.size());
8927
8928 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8929 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8930 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8931 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8932
8933 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8934 &aeLockAvailable, 1);
8935
8936 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8937 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8938 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8939 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8940
8941 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8942 &awbLockAvailable, 1);
8943
8944 int32_t max_input_streams = 1;
8945 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8946 &max_input_streams,
8947 1);
8948
8949 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8950 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8951 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8952 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8953 HAL_PIXEL_FORMAT_YCbCr_420_888};
8954 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8955 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8956
8957 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8958 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8959 &max_latency,
8960 1);
8961
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008962#ifndef USE_HAL_3_3
8963 int32_t isp_sensitivity_range[2];
8964 isp_sensitivity_range[0] =
8965 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8966 isp_sensitivity_range[1] =
8967 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8968 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8969 isp_sensitivity_range,
8970 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8971#endif
8972
Thierry Strudel3d639192016-09-09 11:52:26 -07008973 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8974 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8975 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8976 available_hot_pixel_modes,
8977 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8978
8979 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8980 ANDROID_SHADING_MODE_FAST,
8981 ANDROID_SHADING_MODE_HIGH_QUALITY};
8982 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8983 available_shading_modes,
8984 3);
8985
8986 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8987 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8988 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8989 available_lens_shading_map_modes,
8990 2);
8991
8992 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8993 ANDROID_EDGE_MODE_FAST,
8994 ANDROID_EDGE_MODE_HIGH_QUALITY,
8995 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8996 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8997 available_edge_modes,
8998 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8999
9000 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9001 ANDROID_NOISE_REDUCTION_MODE_FAST,
9002 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9003 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9004 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9005 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9006 available_noise_red_modes,
9007 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9008
9009 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9010 ANDROID_TONEMAP_MODE_FAST,
9011 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9012 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9013 available_tonemap_modes,
9014 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9015
9016 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9017 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9018 available_hot_pixel_map_modes,
9019 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9020
9021 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9022 gCamCapability[cameraId]->reference_illuminant1);
9023 if (NAME_NOT_FOUND != val) {
9024 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9025 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9026 }
9027
9028 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9029 gCamCapability[cameraId]->reference_illuminant2);
9030 if (NAME_NOT_FOUND != val) {
9031 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9032 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9033 }
9034
9035 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9036 (void *)gCamCapability[cameraId]->forward_matrix1,
9037 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9038
9039 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9040 (void *)gCamCapability[cameraId]->forward_matrix2,
9041 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9042
9043 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9044 (void *)gCamCapability[cameraId]->color_transform1,
9045 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9046
9047 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9048 (void *)gCamCapability[cameraId]->color_transform2,
9049 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9050
9051 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9052 (void *)gCamCapability[cameraId]->calibration_transform1,
9053 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9054
9055 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9056 (void *)gCamCapability[cameraId]->calibration_transform2,
9057 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9058
9059 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9060 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9061 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9062 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9063 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9064 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9065 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9066 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9067 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9068 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9069 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9070 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9071 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9072 ANDROID_JPEG_GPS_COORDINATES,
9073 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9074 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9075 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9076 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9077 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9078 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9079 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9080 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9081 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9082 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009083#ifndef USE_HAL_3_3
9084 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9085#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009086 ANDROID_STATISTICS_FACE_DETECT_MODE,
9087 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9088 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9089 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009090 ANDROID_BLACK_LEVEL_LOCK,
9091 /* DevCamDebug metadata request_keys_basic */
9092 DEVCAMDEBUG_META_ENABLE,
9093 /* DevCamDebug metadata end */
9094 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009095
9096 size_t request_keys_cnt =
9097 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9098 Vector<int32_t> available_request_keys;
9099 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9100 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9101 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9102 }
9103
9104 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9105 available_request_keys.array(), available_request_keys.size());
9106
9107 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9108 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9109 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9110 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9111 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9112 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9113 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9114 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9115 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9116 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9117 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9118 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9119 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9120 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9121 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9122 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9123 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
9124 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
9125 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9126 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9127 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009128 ANDROID_STATISTICS_FACE_SCORES,
9129#ifndef USE_HAL_3_3
9130 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9131#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07009132 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009133 // DevCamDebug metadata result_keys_basic
9134 DEVCAMDEBUG_META_ENABLE,
9135 // DevCamDebug metadata result_keys AF
9136 DEVCAMDEBUG_AF_LENS_POSITION,
9137 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9138 DEVCAMDEBUG_AF_TOF_DISTANCE,
9139 DEVCAMDEBUG_AF_LUMA,
9140 DEVCAMDEBUG_AF_HAF_STATE,
9141 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9142 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9143 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9144 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9145 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9146 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9147 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9148 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9149 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9150 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9151 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9152 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9153 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9154 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9155 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9156 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9157 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9158 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9159 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9160 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9161 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9162 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9163 // DevCamDebug metadata result_keys AEC
9164 DEVCAMDEBUG_AEC_TARGET_LUMA,
9165 DEVCAMDEBUG_AEC_COMP_LUMA,
9166 DEVCAMDEBUG_AEC_AVG_LUMA,
9167 DEVCAMDEBUG_AEC_CUR_LUMA,
9168 DEVCAMDEBUG_AEC_LINECOUNT,
9169 DEVCAMDEBUG_AEC_REAL_GAIN,
9170 DEVCAMDEBUG_AEC_EXP_INDEX,
9171 DEVCAMDEBUG_AEC_LUX_IDX,
9172 // DevCamDebug metadata result_keys AWB
9173 DEVCAMDEBUG_AWB_R_GAIN,
9174 DEVCAMDEBUG_AWB_G_GAIN,
9175 DEVCAMDEBUG_AWB_B_GAIN,
9176 DEVCAMDEBUG_AWB_CCT,
9177 DEVCAMDEBUG_AWB_DECISION,
9178 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009179 };
9180
Thierry Strudel3d639192016-09-09 11:52:26 -07009181 size_t result_keys_cnt =
9182 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9183
9184 Vector<int32_t> available_result_keys;
9185 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9186 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9187 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9188 }
9189 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9190 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9191 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9192 }
9193 if (supportedFaceDetectMode == 1) {
9194 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9195 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9196 } else if ((supportedFaceDetectMode == 2) ||
9197 (supportedFaceDetectMode == 3)) {
9198 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9199 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9200 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009201#ifndef USE_HAL_3_3
9202 if (hasBlackRegions) {
9203 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9204 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9205 }
9206#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009207 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9208 available_result_keys.array(), available_result_keys.size());
9209
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009210 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009211 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9212 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9213 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9214 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9215 ANDROID_SCALER_CROPPING_TYPE,
9216 ANDROID_SYNC_MAX_LATENCY,
9217 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9218 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9219 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9220 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9221 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9222 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9223 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9224 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9225 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9226 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9227 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9228 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9229 ANDROID_LENS_FACING,
9230 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9231 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9232 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9233 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9234 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9235 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9236 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9237 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9238 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9239 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9240 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9241 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9242 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9243 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9244 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9245 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9246 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9247 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9248 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9249 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9250 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
9251 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
9252 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9253 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9254 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9255 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9256 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9257 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9258 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9259 ANDROID_CONTROL_AVAILABLE_MODES,
9260 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9261 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9262 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9263 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009264 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9265#ifndef USE_HAL_3_3
9266 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9267 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9268#endif
9269 };
9270
9271 Vector<int32_t> available_characteristics_keys;
9272 available_characteristics_keys.appendArray(characteristics_keys_basic,
9273 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9274#ifndef USE_HAL_3_3
9275 if (hasBlackRegions) {
9276 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9277 }
9278#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009279 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009280 available_characteristics_keys.array(),
9281 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009282
9283 /*available stall durations depend on the hw + sw and will be different for different devices */
9284 /*have to add for raw after implementation*/
9285 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9286 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9287
9288 Vector<int64_t> available_stall_durations;
9289 for (uint32_t j = 0; j < stall_formats_count; j++) {
9290 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9291 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9292 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9293 available_stall_durations.add(stall_formats[j]);
9294 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9295 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9296 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9297 }
9298 } else {
9299 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9300 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9301 available_stall_durations.add(stall_formats[j]);
9302 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9303 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9304 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9305 }
9306 }
9307 }
9308 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9309 available_stall_durations.array(),
9310 available_stall_durations.size());
9311
9312 //QCAMERA3_OPAQUE_RAW
9313 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9314 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9315 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9316 case LEGACY_RAW:
9317 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9318 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9319 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9320 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9321 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9322 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9323 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9324 break;
9325 case MIPI_RAW:
9326 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9327 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9328 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9329 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9330 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9331 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9332 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9333 break;
9334 default:
9335 LOGE("unknown opaque_raw_format %d",
9336 gCamCapability[cameraId]->opaque_raw_fmt);
9337 break;
9338 }
9339 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9340
9341 Vector<int32_t> strides;
9342 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9343 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9344 cam_stream_buf_plane_info_t buf_planes;
9345 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9346 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9347 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9348 &gCamCapability[cameraId]->padding_info, &buf_planes);
9349 strides.add(buf_planes.plane_info.mp[0].stride);
9350 }
9351 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9352 strides.size());
9353
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009354 //TBD: remove the following line once backend advertises zzHDR in feature mask
9355 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009356 //Video HDR default
9357 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9358 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009359 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009360 int32_t vhdr_mode[] = {
9361 QCAMERA3_VIDEO_HDR_MODE_OFF,
9362 QCAMERA3_VIDEO_HDR_MODE_ON};
9363
9364 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9365 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9366 vhdr_mode, vhdr_mode_count);
9367 }
9368
Thierry Strudel3d639192016-09-09 11:52:26 -07009369 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9370 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9371 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9372
9373 uint8_t isMonoOnly =
9374 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9375 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9376 &isMonoOnly, 1);
9377
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009378#ifndef USE_HAL_3_3
9379 Vector<int32_t> opaque_size;
9380 for (size_t j = 0; j < scalar_formats_count; j++) {
9381 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9382 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9383 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9384 cam_stream_buf_plane_info_t buf_planes;
9385
9386 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9387 &gCamCapability[cameraId]->padding_info, &buf_planes);
9388
9389 if (rc == 0) {
9390 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9391 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9392 opaque_size.add(buf_planes.plane_info.frame_len);
9393 }else {
9394 LOGE("raw frame calculation failed!");
9395 }
9396 }
9397 }
9398 }
9399
9400 if ((opaque_size.size() > 0) &&
9401 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9402 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9403 else
9404 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9405#endif
9406
Thierry Strudel04e026f2016-10-10 11:27:36 -07009407 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9408 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9409 size = 0;
9410 count = CAM_IR_MODE_MAX;
9411 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9412 for (size_t i = 0; i < count; i++) {
9413 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9414 gCamCapability[cameraId]->supported_ir_modes[i]);
9415 if (NAME_NOT_FOUND != val) {
9416 avail_ir_modes[size] = (int32_t)val;
9417 size++;
9418 }
9419 }
9420 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9421 avail_ir_modes, size);
9422 }
9423
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009424 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9425 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9426 size = 0;
9427 count = CAM_AEC_CONVERGENCE_MAX;
9428 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9429 for (size_t i = 0; i < count; i++) {
9430 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9431 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9432 if (NAME_NOT_FOUND != val) {
9433 available_instant_aec_modes[size] = (int32_t)val;
9434 size++;
9435 }
9436 }
9437 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9438 available_instant_aec_modes, size);
9439 }
9440
Thierry Strudel3d639192016-09-09 11:52:26 -07009441 gStaticMetadata[cameraId] = staticInfo.release();
9442 return rc;
9443}
9444
9445/*===========================================================================
9446 * FUNCTION : makeTable
9447 *
9448 * DESCRIPTION: make a table of sizes
9449 *
9450 * PARAMETERS :
9451 *
9452 *
9453 *==========================================================================*/
9454void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9455 size_t max_size, int32_t *sizeTable)
9456{
9457 size_t j = 0;
9458 if (size > max_size) {
9459 size = max_size;
9460 }
9461 for (size_t i = 0; i < size; i++) {
9462 sizeTable[j] = dimTable[i].width;
9463 sizeTable[j+1] = dimTable[i].height;
9464 j+=2;
9465 }
9466}
9467
9468/*===========================================================================
9469 * FUNCTION : makeFPSTable
9470 *
9471 * DESCRIPTION: make a table of fps ranges
9472 *
9473 * PARAMETERS :
9474 *
9475 *==========================================================================*/
9476void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9477 size_t max_size, int32_t *fpsRangesTable)
9478{
9479 size_t j = 0;
9480 if (size > max_size) {
9481 size = max_size;
9482 }
9483 for (size_t i = 0; i < size; i++) {
9484 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9485 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9486 j+=2;
9487 }
9488}
9489
9490/*===========================================================================
9491 * FUNCTION : makeOverridesList
9492 *
9493 * DESCRIPTION: make a list of scene mode overrides
9494 *
9495 * PARAMETERS :
9496 *
9497 *
9498 *==========================================================================*/
9499void QCamera3HardwareInterface::makeOverridesList(
9500 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9501 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9502{
9503 /*daemon will give a list of overrides for all scene modes.
9504 However we should send the fwk only the overrides for the scene modes
9505 supported by the framework*/
9506 size_t j = 0;
9507 if (size > max_size) {
9508 size = max_size;
9509 }
9510 size_t focus_count = CAM_FOCUS_MODE_MAX;
9511 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9512 focus_count);
9513 for (size_t i = 0; i < size; i++) {
9514 bool supt = false;
9515 size_t index = supported_indexes[i];
9516 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9517 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9518 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9519 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9520 overridesTable[index].awb_mode);
9521 if (NAME_NOT_FOUND != val) {
9522 overridesList[j+1] = (uint8_t)val;
9523 }
9524 uint8_t focus_override = overridesTable[index].af_mode;
9525 for (size_t k = 0; k < focus_count; k++) {
9526 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9527 supt = true;
9528 break;
9529 }
9530 }
9531 if (supt) {
9532 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9533 focus_override);
9534 if (NAME_NOT_FOUND != val) {
9535 overridesList[j+2] = (uint8_t)val;
9536 }
9537 } else {
9538 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9539 }
9540 j+=3;
9541 }
9542}
9543
9544/*===========================================================================
9545 * FUNCTION : filterJpegSizes
9546 *
9547 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9548 * could be downscaled to
9549 *
9550 * PARAMETERS :
9551 *
9552 * RETURN : length of jpegSizes array
9553 *==========================================================================*/
9554
9555size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9556 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9557 uint8_t downscale_factor)
9558{
9559 if (0 == downscale_factor) {
9560 downscale_factor = 1;
9561 }
9562
9563 int32_t min_width = active_array_size.width / downscale_factor;
9564 int32_t min_height = active_array_size.height / downscale_factor;
9565 size_t jpegSizesCnt = 0;
9566 if (processedSizesCnt > maxCount) {
9567 processedSizesCnt = maxCount;
9568 }
9569 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9570 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9571 jpegSizes[jpegSizesCnt] = processedSizes[i];
9572 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9573 jpegSizesCnt += 2;
9574 }
9575 }
9576 return jpegSizesCnt;
9577}
9578
9579/*===========================================================================
9580 * FUNCTION : computeNoiseModelEntryS
9581 *
9582 * DESCRIPTION: function to map a given sensitivity to the S noise
9583 * model parameters in the DNG noise model.
9584 *
9585 * PARAMETERS : sens : the sensor sensitivity
9586 *
9587 ** RETURN : S (sensor amplification) noise
9588 *
9589 *==========================================================================*/
9590double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9591 double s = gCamCapability[mCameraId]->gradient_S * sens +
9592 gCamCapability[mCameraId]->offset_S;
9593 return ((s < 0.0) ? 0.0 : s);
9594}
9595
9596/*===========================================================================
9597 * FUNCTION : computeNoiseModelEntryO
9598 *
9599 * DESCRIPTION: function to map a given sensitivity to the O noise
9600 * model parameters in the DNG noise model.
9601 *
9602 * PARAMETERS : sens : the sensor sensitivity
9603 *
9604 ** RETURN : O (sensor readout) noise
9605 *
9606 *==========================================================================*/
9607double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9608 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9609 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9610 1.0 : (1.0 * sens / max_analog_sens);
9611 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9612 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9613 return ((o < 0.0) ? 0.0 : o);
9614}
9615
9616/*===========================================================================
9617 * FUNCTION : getSensorSensitivity
9618 *
9619 * DESCRIPTION: convert iso_mode to an integer value
9620 *
9621 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9622 *
9623 ** RETURN : sensitivity supported by sensor
9624 *
9625 *==========================================================================*/
9626int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9627{
9628 int32_t sensitivity;
9629
9630 switch (iso_mode) {
9631 case CAM_ISO_MODE_100:
9632 sensitivity = 100;
9633 break;
9634 case CAM_ISO_MODE_200:
9635 sensitivity = 200;
9636 break;
9637 case CAM_ISO_MODE_400:
9638 sensitivity = 400;
9639 break;
9640 case CAM_ISO_MODE_800:
9641 sensitivity = 800;
9642 break;
9643 case CAM_ISO_MODE_1600:
9644 sensitivity = 1600;
9645 break;
9646 default:
9647 sensitivity = -1;
9648 break;
9649 }
9650 return sensitivity;
9651}
9652
9653/*===========================================================================
9654 * FUNCTION : getCamInfo
9655 *
9656 * DESCRIPTION: query camera capabilities
9657 *
9658 * PARAMETERS :
9659 * @cameraId : camera Id
9660 * @info : camera info struct to be filled in with camera capabilities
9661 *
9662 * RETURN : int type of status
9663 * NO_ERROR -- success
9664 * none-zero failure code
9665 *==========================================================================*/
9666int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9667 struct camera_info *info)
9668{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009669 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009670 int rc = 0;
9671
9672 pthread_mutex_lock(&gCamLock);
9673 if (NULL == gCamCapability[cameraId]) {
9674 rc = initCapabilities(cameraId);
9675 if (rc < 0) {
9676 pthread_mutex_unlock(&gCamLock);
9677 return rc;
9678 }
9679 }
9680
9681 if (NULL == gStaticMetadata[cameraId]) {
9682 rc = initStaticMetadata(cameraId);
9683 if (rc < 0) {
9684 pthread_mutex_unlock(&gCamLock);
9685 return rc;
9686 }
9687 }
9688
9689 switch(gCamCapability[cameraId]->position) {
9690 case CAM_POSITION_BACK:
9691 case CAM_POSITION_BACK_AUX:
9692 info->facing = CAMERA_FACING_BACK;
9693 break;
9694
9695 case CAM_POSITION_FRONT:
9696 case CAM_POSITION_FRONT_AUX:
9697 info->facing = CAMERA_FACING_FRONT;
9698 break;
9699
9700 default:
9701 LOGE("Unknown position type %d for camera id:%d",
9702 gCamCapability[cameraId]->position, cameraId);
9703 rc = -1;
9704 break;
9705 }
9706
9707
9708 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009709#ifndef USE_HAL_3_3
9710 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9711#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009712 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009713#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009714 info->static_camera_characteristics = gStaticMetadata[cameraId];
9715
9716 //For now assume both cameras can operate independently.
9717 info->conflicting_devices = NULL;
9718 info->conflicting_devices_length = 0;
9719
9720 //resource cost is 100 * MIN(1.0, m/M),
9721 //where m is throughput requirement with maximum stream configuration
9722 //and M is CPP maximum throughput.
9723 float max_fps = 0.0;
9724 for (uint32_t i = 0;
9725 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9726 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9727 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9728 }
9729 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9730 gCamCapability[cameraId]->active_array_size.width *
9731 gCamCapability[cameraId]->active_array_size.height * max_fps /
9732 gCamCapability[cameraId]->max_pixel_bandwidth;
9733 info->resource_cost = 100 * MIN(1.0, ratio);
9734 LOGI("camera %d resource cost is %d", cameraId,
9735 info->resource_cost);
9736
9737 pthread_mutex_unlock(&gCamLock);
9738 return rc;
9739}
9740
9741/*===========================================================================
9742 * FUNCTION : translateCapabilityToMetadata
9743 *
9744 * DESCRIPTION: translate the capability into camera_metadata_t
9745 *
9746 * PARAMETERS : type of the request
9747 *
9748 *
9749 * RETURN : success: camera_metadata_t*
9750 * failure: NULL
9751 *
9752 *==========================================================================*/
9753camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9754{
9755 if (mDefaultMetadata[type] != NULL) {
9756 return mDefaultMetadata[type];
9757 }
9758 //first time we are handling this request
9759 //fill up the metadata structure using the wrapper class
9760 CameraMetadata settings;
9761 //translate from cam_capability_t to camera_metadata_tag_t
9762 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9763 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9764 int32_t defaultRequestID = 0;
9765 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9766
9767 /* OIS disable */
9768 char ois_prop[PROPERTY_VALUE_MAX];
9769 memset(ois_prop, 0, sizeof(ois_prop));
9770 property_get("persist.camera.ois.disable", ois_prop, "0");
9771 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9772
9773 /* Force video to use OIS */
9774 char videoOisProp[PROPERTY_VALUE_MAX];
9775 memset(videoOisProp, 0, sizeof(videoOisProp));
9776 property_get("persist.camera.ois.video", videoOisProp, "1");
9777 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009778
9779 // Hybrid AE enable/disable
9780 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9781 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9782 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9783 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9784
Thierry Strudel3d639192016-09-09 11:52:26 -07009785 uint8_t controlIntent = 0;
9786 uint8_t focusMode;
9787 uint8_t vsMode;
9788 uint8_t optStabMode;
9789 uint8_t cacMode;
9790 uint8_t edge_mode;
9791 uint8_t noise_red_mode;
9792 uint8_t tonemap_mode;
9793 bool highQualityModeEntryAvailable = FALSE;
9794 bool fastModeEntryAvailable = FALSE;
9795 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9796 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009797
9798 char videoHdrProp[PROPERTY_VALUE_MAX];
9799 memset(videoHdrProp, 0, sizeof(videoHdrProp));
9800 property_get("persist.camera.hdr.video", videoHdrProp, "0");
9801 uint8_t hdr_mode = (uint8_t)atoi(videoHdrProp);
9802
Thierry Strudel3d639192016-09-09 11:52:26 -07009803 switch (type) {
9804 case CAMERA3_TEMPLATE_PREVIEW:
9805 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9806 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9807 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9808 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9809 edge_mode = ANDROID_EDGE_MODE_FAST;
9810 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9811 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9812 break;
9813 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9814 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9815 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9816 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9817 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9818 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9819 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9820 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9821 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9822 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9823 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9824 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9825 highQualityModeEntryAvailable = TRUE;
9826 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9827 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9828 fastModeEntryAvailable = TRUE;
9829 }
9830 }
9831 if (highQualityModeEntryAvailable) {
9832 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9833 } else if (fastModeEntryAvailable) {
9834 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9835 }
9836 break;
9837 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9838 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9839 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9840 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009841 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9842 edge_mode = ANDROID_EDGE_MODE_FAST;
9843 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9844 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9845 if (forceVideoOis)
9846 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009847 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009848 break;
9849 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9850 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9851 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9852 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009853 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9854 edge_mode = ANDROID_EDGE_MODE_FAST;
9855 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9856 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9857 if (forceVideoOis)
9858 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009859 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009860 break;
9861 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9862 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9863 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9864 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9865 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9866 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9867 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9868 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9869 break;
9870 case CAMERA3_TEMPLATE_MANUAL:
9871 edge_mode = ANDROID_EDGE_MODE_FAST;
9872 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9873 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9874 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9875 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9876 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9877 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9878 break;
9879 default:
9880 edge_mode = ANDROID_EDGE_MODE_FAST;
9881 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9882 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9883 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9884 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9885 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9886 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9887 break;
9888 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009889 // Set CAC to OFF if underlying device doesn't support
9890 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9891 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9892 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009893 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9894 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9895 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9896 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9897 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9898 }
9899 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9900
9901 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9902 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9903 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9904 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9905 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9906 || ois_disable)
9907 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9908 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9909
9910 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9911 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9912
9913 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9914 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9915
9916 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9917 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9918
9919 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9920 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9921
9922 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9923 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9924
9925 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9926 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9927
9928 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9929 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9930
9931 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9932 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9933
9934 /*flash*/
9935 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9936 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9937
9938 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9939 settings.update(ANDROID_FLASH_FIRING_POWER,
9940 &flashFiringLevel, 1);
9941
9942 /* lens */
9943 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9944 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9945
9946 if (gCamCapability[mCameraId]->filter_densities_count) {
9947 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9948 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9949 gCamCapability[mCameraId]->filter_densities_count);
9950 }
9951
9952 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9953 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9954
9955 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9956 float default_focus_distance = 0;
9957 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9958 }
9959
9960 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9961 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9962
9963 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9964 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9965
9966 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9967 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9968
9969 /* face detection (default to OFF) */
9970 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9971 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9972
9973 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9974 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9975
9976 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9977 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9978
9979 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9980 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9981
9982 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9983 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9984
9985 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9986 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9987
9988 /* Exposure time(Update the Min Exposure Time)*/
9989 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9990 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9991
9992 /* frame duration */
9993 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9994 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9995
9996 /* sensitivity */
9997 static const int32_t default_sensitivity = 100;
9998 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009999#ifndef USE_HAL_3_3
10000 static const int32_t default_isp_sensitivity =
10001 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10002 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10003#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010004
10005 /*edge mode*/
10006 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10007
10008 /*noise reduction mode*/
10009 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10010
10011 /*color correction mode*/
10012 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10013 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10014
10015 /*transform matrix mode*/
10016 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10017
10018 int32_t scaler_crop_region[4];
10019 scaler_crop_region[0] = 0;
10020 scaler_crop_region[1] = 0;
10021 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10022 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10023 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10024
10025 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10026 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10027
10028 /*focus distance*/
10029 float focus_distance = 0.0;
10030 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10031
10032 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010033 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010034 float max_range = 0.0;
10035 float max_fixed_fps = 0.0;
10036 int32_t fps_range[2] = {0, 0};
10037 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10038 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010039 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10040 TEMPLATE_MAX_PREVIEW_FPS) {
10041 continue;
10042 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010043 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10044 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10045 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10046 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10047 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10048 if (range > max_range) {
10049 fps_range[0] =
10050 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10051 fps_range[1] =
10052 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10053 max_range = range;
10054 }
10055 } else {
10056 if (range < 0.01 && max_fixed_fps <
10057 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10058 fps_range[0] =
10059 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10060 fps_range[1] =
10061 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10062 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10063 }
10064 }
10065 }
10066 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10067
10068 /*precapture trigger*/
10069 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10070 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10071
10072 /*af trigger*/
10073 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10074 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10075
10076 /* ae & af regions */
10077 int32_t active_region[] = {
10078 gCamCapability[mCameraId]->active_array_size.left,
10079 gCamCapability[mCameraId]->active_array_size.top,
10080 gCamCapability[mCameraId]->active_array_size.left +
10081 gCamCapability[mCameraId]->active_array_size.width,
10082 gCamCapability[mCameraId]->active_array_size.top +
10083 gCamCapability[mCameraId]->active_array_size.height,
10084 0};
10085 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10086 sizeof(active_region) / sizeof(active_region[0]));
10087 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10088 sizeof(active_region) / sizeof(active_region[0]));
10089
10090 /* black level lock */
10091 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10092 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10093
10094 /* lens shading map mode */
10095 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10096 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10097 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10098 }
10099 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10100
10101 //special defaults for manual template
10102 if (type == CAMERA3_TEMPLATE_MANUAL) {
10103 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10104 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10105
10106 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10107 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10108
10109 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10110 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10111
10112 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10113 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10114
10115 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10116 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10117
10118 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10119 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10120 }
10121
10122
10123 /* TNR
10124 * We'll use this location to determine which modes TNR will be set.
10125 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10126 * This is not to be confused with linking on a per stream basis that decision
10127 * is still on per-session basis and will be handled as part of config stream
10128 */
10129 uint8_t tnr_enable = 0;
10130
10131 if (m_bTnrPreview || m_bTnrVideo) {
10132
10133 switch (type) {
10134 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10135 tnr_enable = 1;
10136 break;
10137
10138 default:
10139 tnr_enable = 0;
10140 break;
10141 }
10142
10143 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10144 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10145 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10146
10147 LOGD("TNR:%d with process plate %d for template:%d",
10148 tnr_enable, tnr_process_type, type);
10149 }
10150
10151 //Update Link tags to default
10152 int32_t sync_type = CAM_TYPE_STANDALONE;
10153 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10154
10155 int32_t is_main = 0; //this doesn't matter as app should overwrite
10156 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10157
10158 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10159
10160 /* CDS default */
10161 char prop[PROPERTY_VALUE_MAX];
10162 memset(prop, 0, sizeof(prop));
10163 property_get("persist.camera.CDS", prop, "Auto");
10164 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10165 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10166 if (CAM_CDS_MODE_MAX == cds_mode) {
10167 cds_mode = CAM_CDS_MODE_AUTO;
10168 }
10169
10170 /* Disabling CDS in templates which have TNR enabled*/
10171 if (tnr_enable)
10172 cds_mode = CAM_CDS_MODE_OFF;
10173
10174 int32_t mode = cds_mode;
10175 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010176
Thierry Strudel04e026f2016-10-10 11:27:36 -070010177 /* IR Mode Default Off */
10178 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
10179 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
10180
Thierry Strudel269c81a2016-10-12 12:13:59 -070010181 /* Manual Convergence AEC Speed is disabled by default*/
10182 float default_aec_speed = 0;
10183 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10184
10185 /* Manual Convergence AWB Speed is disabled by default*/
10186 float default_awb_speed = 0;
10187 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10188
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010189 // Set instant AEC to normal convergence by default
10190 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10191 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10192
Shuzhen Wang19463d72016-03-08 11:09:52 -080010193 /* hybrid ae */
10194 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10195
Thierry Strudel3d639192016-09-09 11:52:26 -070010196 mDefaultMetadata[type] = settings.release();
10197
10198 return mDefaultMetadata[type];
10199}
10200
10201/*===========================================================================
10202 * FUNCTION : setFrameParameters
10203 *
10204 * DESCRIPTION: set parameters per frame as requested in the metadata from
10205 * framework
10206 *
10207 * PARAMETERS :
10208 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010209 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010210 * @blob_request: Whether this request is a blob request or not
10211 *
10212 * RETURN : success: NO_ERROR
10213 * failure:
10214 *==========================================================================*/
10215int QCamera3HardwareInterface::setFrameParameters(
10216 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010217 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010218 int blob_request,
10219 uint32_t snapshotStreamId)
10220{
10221 /*translate from camera_metadata_t type to parm_type_t*/
10222 int rc = 0;
10223 int32_t hal_version = CAM_HAL_V3;
10224
10225 clear_metadata_buffer(mParameters);
10226 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10227 LOGE("Failed to set hal version in the parameters");
10228 return BAD_VALUE;
10229 }
10230
10231 /*we need to update the frame number in the parameters*/
10232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10233 request->frame_number)) {
10234 LOGE("Failed to set the frame number in the parameters");
10235 return BAD_VALUE;
10236 }
10237
10238 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010240 LOGE("Failed to set stream type mask in the parameters");
10241 return BAD_VALUE;
10242 }
10243
10244 if (mUpdateDebugLevel) {
10245 uint32_t dummyDebugLevel = 0;
10246 /* The value of dummyDebugLevel is irrelavent. On
10247 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10248 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10249 dummyDebugLevel)) {
10250 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10251 return BAD_VALUE;
10252 }
10253 mUpdateDebugLevel = false;
10254 }
10255
10256 if(request->settings != NULL){
10257 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10258 if (blob_request)
10259 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10260 }
10261
10262 return rc;
10263}
10264
10265/*===========================================================================
10266 * FUNCTION : setReprocParameters
10267 *
10268 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10269 * return it.
10270 *
10271 * PARAMETERS :
10272 * @request : request that needs to be serviced
10273 *
10274 * RETURN : success: NO_ERROR
10275 * failure:
10276 *==========================================================================*/
10277int32_t QCamera3HardwareInterface::setReprocParameters(
10278 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10279 uint32_t snapshotStreamId)
10280{
10281 /*translate from camera_metadata_t type to parm_type_t*/
10282 int rc = 0;
10283
10284 if (NULL == request->settings){
10285 LOGE("Reprocess settings cannot be NULL");
10286 return BAD_VALUE;
10287 }
10288
10289 if (NULL == reprocParam) {
10290 LOGE("Invalid reprocessing metadata buffer");
10291 return BAD_VALUE;
10292 }
10293 clear_metadata_buffer(reprocParam);
10294
10295 /*we need to update the frame number in the parameters*/
10296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10297 request->frame_number)) {
10298 LOGE("Failed to set the frame number in the parameters");
10299 return BAD_VALUE;
10300 }
10301
10302 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10303 if (rc < 0) {
10304 LOGE("Failed to translate reproc request");
10305 return rc;
10306 }
10307
10308 CameraMetadata frame_settings;
10309 frame_settings = request->settings;
10310 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10311 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10312 int32_t *crop_count =
10313 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10314 int32_t *crop_data =
10315 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10316 int32_t *roi_map =
10317 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10318 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10319 cam_crop_data_t crop_meta;
10320 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10321 crop_meta.num_of_streams = 1;
10322 crop_meta.crop_info[0].crop.left = crop_data[0];
10323 crop_meta.crop_info[0].crop.top = crop_data[1];
10324 crop_meta.crop_info[0].crop.width = crop_data[2];
10325 crop_meta.crop_info[0].crop.height = crop_data[3];
10326
10327 crop_meta.crop_info[0].roi_map.left =
10328 roi_map[0];
10329 crop_meta.crop_info[0].roi_map.top =
10330 roi_map[1];
10331 crop_meta.crop_info[0].roi_map.width =
10332 roi_map[2];
10333 crop_meta.crop_info[0].roi_map.height =
10334 roi_map[3];
10335
10336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10337 rc = BAD_VALUE;
10338 }
10339 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10340 request->input_buffer->stream,
10341 crop_meta.crop_info[0].crop.left,
10342 crop_meta.crop_info[0].crop.top,
10343 crop_meta.crop_info[0].crop.width,
10344 crop_meta.crop_info[0].crop.height);
10345 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10346 request->input_buffer->stream,
10347 crop_meta.crop_info[0].roi_map.left,
10348 crop_meta.crop_info[0].roi_map.top,
10349 crop_meta.crop_info[0].roi_map.width,
10350 crop_meta.crop_info[0].roi_map.height);
10351 } else {
10352 LOGE("Invalid reprocess crop count %d!", *crop_count);
10353 }
10354 } else {
10355 LOGE("No crop data from matching output stream");
10356 }
10357
10358 /* These settings are not needed for regular requests so handle them specially for
10359 reprocess requests; information needed for EXIF tags */
10360 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10361 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10362 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10363 if (NAME_NOT_FOUND != val) {
10364 uint32_t flashMode = (uint32_t)val;
10365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10366 rc = BAD_VALUE;
10367 }
10368 } else {
10369 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10370 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10371 }
10372 } else {
10373 LOGH("No flash mode in reprocess settings");
10374 }
10375
10376 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10377 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10379 rc = BAD_VALUE;
10380 }
10381 } else {
10382 LOGH("No flash state in reprocess settings");
10383 }
10384
10385 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10386 uint8_t *reprocessFlags =
10387 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10389 *reprocessFlags)) {
10390 rc = BAD_VALUE;
10391 }
10392 }
10393
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010394 // Add metadata which reprocess needs
10395 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10396 cam_reprocess_info_t *repro_info =
10397 (cam_reprocess_info_t *)frame_settings.find
10398 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010399 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010400 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010401 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010402 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010403 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010404 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010405 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010406 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010407 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010408 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010409 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010410 repro_info->pipeline_flip);
10411 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10412 repro_info->af_roi);
10413 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10414 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010415 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10416 CAM_INTF_PARM_ROTATION metadata then has been added in
10417 translateToHalMetadata. HAL need to keep this new rotation
10418 metadata. Otherwise, the old rotation info saved in the vendor tag
10419 would be used */
10420 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10421 CAM_INTF_PARM_ROTATION, reprocParam) {
10422 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10423 } else {
10424 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010425 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010426 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010427 }
10428
10429 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10430 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10431 roi.width and roi.height would be the final JPEG size.
10432 For now, HAL only checks this for reprocess request */
10433 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10434 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10435 uint8_t *enable =
10436 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10437 if (*enable == TRUE) {
10438 int32_t *crop_data =
10439 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10440 cam_stream_crop_info_t crop_meta;
10441 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10442 crop_meta.stream_id = 0;
10443 crop_meta.crop.left = crop_data[0];
10444 crop_meta.crop.top = crop_data[1];
10445 crop_meta.crop.width = crop_data[2];
10446 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010447 // The JPEG crop roi should match cpp output size
10448 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10449 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10450 crop_meta.roi_map.left = 0;
10451 crop_meta.roi_map.top = 0;
10452 crop_meta.roi_map.width = cpp_crop->crop.width;
10453 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010454 }
10455 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10456 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010457 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010458 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010459 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10460 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010461 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010462 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10463
10464 // Add JPEG scale information
10465 cam_dimension_t scale_dim;
10466 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10467 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10468 int32_t *roi =
10469 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10470 scale_dim.width = roi[2];
10471 scale_dim.height = roi[3];
10472 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10473 scale_dim);
10474 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10475 scale_dim.width, scale_dim.height, mCameraId);
10476 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010477 }
10478 }
10479
10480 return rc;
10481}
10482
10483/*===========================================================================
10484 * FUNCTION : saveRequestSettings
10485 *
10486 * DESCRIPTION: Add any settings that might have changed to the request settings
10487 * and save the settings to be applied on the frame
10488 *
10489 * PARAMETERS :
10490 * @jpegMetadata : the extracted and/or modified jpeg metadata
10491 * @request : request with initial settings
10492 *
10493 * RETURN :
10494 * camera_metadata_t* : pointer to the saved request settings
10495 *==========================================================================*/
10496camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10497 const CameraMetadata &jpegMetadata,
10498 camera3_capture_request_t *request)
10499{
10500 camera_metadata_t *resultMetadata;
10501 CameraMetadata camMetadata;
10502 camMetadata = request->settings;
10503
10504 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10505 int32_t thumbnail_size[2];
10506 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10507 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10508 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10509 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10510 }
10511
10512 if (request->input_buffer != NULL) {
10513 uint8_t reprocessFlags = 1;
10514 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10515 (uint8_t*)&reprocessFlags,
10516 sizeof(reprocessFlags));
10517 }
10518
10519 resultMetadata = camMetadata.release();
10520 return resultMetadata;
10521}
10522
10523/*===========================================================================
10524 * FUNCTION : setHalFpsRange
10525 *
10526 * DESCRIPTION: set FPS range parameter
10527 *
10528 *
10529 * PARAMETERS :
10530 * @settings : Metadata from framework
10531 * @hal_metadata: Metadata buffer
10532 *
10533 *
10534 * RETURN : success: NO_ERROR
10535 * failure:
10536 *==========================================================================*/
10537int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10538 metadata_buffer_t *hal_metadata)
10539{
10540 int32_t rc = NO_ERROR;
10541 cam_fps_range_t fps_range;
10542 fps_range.min_fps = (float)
10543 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10544 fps_range.max_fps = (float)
10545 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10546 fps_range.video_min_fps = fps_range.min_fps;
10547 fps_range.video_max_fps = fps_range.max_fps;
10548
10549 LOGD("aeTargetFpsRange fps: [%f %f]",
10550 fps_range.min_fps, fps_range.max_fps);
10551 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10552 * follows:
10553 * ---------------------------------------------------------------|
10554 * Video stream is absent in configure_streams |
10555 * (Camcorder preview before the first video record |
10556 * ---------------------------------------------------------------|
10557 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10558 * | | | vid_min/max_fps|
10559 * ---------------------------------------------------------------|
10560 * NO | [ 30, 240] | 240 | [240, 240] |
10561 * |-------------|-------------|----------------|
10562 * | [240, 240] | 240 | [240, 240] |
10563 * ---------------------------------------------------------------|
10564 * Video stream is present in configure_streams |
10565 * ---------------------------------------------------------------|
10566 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10567 * | | | vid_min/max_fps|
10568 * ---------------------------------------------------------------|
10569 * NO | [ 30, 240] | 240 | [240, 240] |
10570 * (camcorder prev |-------------|-------------|----------------|
10571 * after video rec | [240, 240] | 240 | [240, 240] |
10572 * is stopped) | | | |
10573 * ---------------------------------------------------------------|
10574 * YES | [ 30, 240] | 240 | [240, 240] |
10575 * |-------------|-------------|----------------|
10576 * | [240, 240] | 240 | [240, 240] |
10577 * ---------------------------------------------------------------|
10578 * When Video stream is absent in configure_streams,
10579 * preview fps = sensor_fps / batchsize
10580 * Eg: for 240fps at batchSize 4, preview = 60fps
10581 * for 120fps at batchSize 4, preview = 30fps
10582 *
10583 * When video stream is present in configure_streams, preview fps is as per
10584 * the ratio of preview buffers to video buffers requested in process
10585 * capture request
10586 */
10587 mBatchSize = 0;
10588 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10589 fps_range.min_fps = fps_range.video_max_fps;
10590 fps_range.video_min_fps = fps_range.video_max_fps;
10591 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10592 fps_range.max_fps);
10593 if (NAME_NOT_FOUND != val) {
10594 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10595 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10596 return BAD_VALUE;
10597 }
10598
10599 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10600 /* If batchmode is currently in progress and the fps changes,
10601 * set the flag to restart the sensor */
10602 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10603 (mHFRVideoFps != fps_range.max_fps)) {
10604 mNeedSensorRestart = true;
10605 }
10606 mHFRVideoFps = fps_range.max_fps;
10607 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10608 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10609 mBatchSize = MAX_HFR_BATCH_SIZE;
10610 }
10611 }
10612 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10613
10614 }
10615 } else {
10616 /* HFR mode is session param in backend/ISP. This should be reset when
10617 * in non-HFR mode */
10618 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10620 return BAD_VALUE;
10621 }
10622 }
10623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10624 return BAD_VALUE;
10625 }
10626 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10627 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10628 return rc;
10629}
10630
10631/*===========================================================================
10632 * FUNCTION : translateToHalMetadata
10633 *
10634 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10635 *
10636 *
10637 * PARAMETERS :
10638 * @request : request sent from framework
10639 *
10640 *
10641 * RETURN : success: NO_ERROR
10642 * failure:
10643 *==========================================================================*/
10644int QCamera3HardwareInterface::translateToHalMetadata
10645 (const camera3_capture_request_t *request,
10646 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080010647 uint32_t snapshotStreamId) {
10648 if (request == nullptr || hal_metadata == nullptr) {
10649 return BAD_VALUE;
10650 }
10651
10652 int64_t minFrameDuration = getMinFrameDuration(request);
10653
10654 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
10655 minFrameDuration);
10656}
10657
10658int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
10659 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
10660 uint32_t snapshotStreamId, int64_t minFrameDuration) {
10661
Thierry Strudel3d639192016-09-09 11:52:26 -070010662 int rc = 0;
10663 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080010664 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070010665
10666 /* Do not change the order of the following list unless you know what you are
10667 * doing.
10668 * The order is laid out in such a way that parameters in the front of the table
10669 * may be used to override the parameters later in the table. Examples are:
10670 * 1. META_MODE should precede AEC/AWB/AF MODE
10671 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10672 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10673 * 4. Any mode should precede it's corresponding settings
10674 */
10675 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10676 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10678 rc = BAD_VALUE;
10679 }
10680 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10681 if (rc != NO_ERROR) {
10682 LOGE("extractSceneMode failed");
10683 }
10684 }
10685
10686 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10687 uint8_t fwk_aeMode =
10688 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10689 uint8_t aeMode;
10690 int32_t redeye;
10691
10692 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10693 aeMode = CAM_AE_MODE_OFF;
10694 } else {
10695 aeMode = CAM_AE_MODE_ON;
10696 }
10697 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10698 redeye = 1;
10699 } else {
10700 redeye = 0;
10701 }
10702
10703 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10704 fwk_aeMode);
10705 if (NAME_NOT_FOUND != val) {
10706 int32_t flashMode = (int32_t)val;
10707 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10708 }
10709
10710 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10712 rc = BAD_VALUE;
10713 }
10714 }
10715
10716 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10717 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10718 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10719 fwk_whiteLevel);
10720 if (NAME_NOT_FOUND != val) {
10721 uint8_t whiteLevel = (uint8_t)val;
10722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10723 rc = BAD_VALUE;
10724 }
10725 }
10726 }
10727
10728 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10729 uint8_t fwk_cacMode =
10730 frame_settings.find(
10731 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10732 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10733 fwk_cacMode);
10734 if (NAME_NOT_FOUND != val) {
10735 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10736 bool entryAvailable = FALSE;
10737 // Check whether Frameworks set CAC mode is supported in device or not
10738 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10739 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10740 entryAvailable = TRUE;
10741 break;
10742 }
10743 }
10744 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10745 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10746 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10747 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10748 if (entryAvailable == FALSE) {
10749 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10750 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10751 } else {
10752 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10753 // High is not supported and so set the FAST as spec say's underlying
10754 // device implementation can be the same for both modes.
10755 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10756 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10757 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10758 // in order to avoid the fps drop due to high quality
10759 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10760 } else {
10761 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10762 }
10763 }
10764 }
10765 LOGD("Final cacMode is %d", cacMode);
10766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10767 rc = BAD_VALUE;
10768 }
10769 } else {
10770 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10771 }
10772 }
10773
10774 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10775 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10776 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10777 fwk_focusMode);
10778 if (NAME_NOT_FOUND != val) {
10779 uint8_t focusMode = (uint8_t)val;
10780 LOGD("set focus mode %d", focusMode);
10781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10782 rc = BAD_VALUE;
10783 }
10784 }
10785 }
10786
10787 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10788 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10790 focalDistance)) {
10791 rc = BAD_VALUE;
10792 }
10793 }
10794
10795 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10796 uint8_t fwk_antibandingMode =
10797 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10798 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10799 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10800 if (NAME_NOT_FOUND != val) {
10801 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010802 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10803 if (m60HzZone) {
10804 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10805 } else {
10806 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10807 }
10808 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10810 hal_antibandingMode)) {
10811 rc = BAD_VALUE;
10812 }
10813 }
10814 }
10815
10816 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10817 int32_t expCompensation = frame_settings.find(
10818 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10819 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10820 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10821 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10822 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010823 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10825 expCompensation)) {
10826 rc = BAD_VALUE;
10827 }
10828 }
10829
10830 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10831 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10833 rc = BAD_VALUE;
10834 }
10835 }
10836 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10837 rc = setHalFpsRange(frame_settings, hal_metadata);
10838 if (rc != NO_ERROR) {
10839 LOGE("setHalFpsRange failed");
10840 }
10841 }
10842
10843 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10844 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10846 rc = BAD_VALUE;
10847 }
10848 }
10849
10850 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10851 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10852 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10853 fwk_effectMode);
10854 if (NAME_NOT_FOUND != val) {
10855 uint8_t effectMode = (uint8_t)val;
10856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10857 rc = BAD_VALUE;
10858 }
10859 }
10860 }
10861
10862 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10863 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10864 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10865 colorCorrectMode)) {
10866 rc = BAD_VALUE;
10867 }
10868 }
10869
10870 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10871 cam_color_correct_gains_t colorCorrectGains;
10872 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10873 colorCorrectGains.gains[i] =
10874 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10875 }
10876 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10877 colorCorrectGains)) {
10878 rc = BAD_VALUE;
10879 }
10880 }
10881
10882 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10883 cam_color_correct_matrix_t colorCorrectTransform;
10884 cam_rational_type_t transform_elem;
10885 size_t num = 0;
10886 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10887 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10888 transform_elem.numerator =
10889 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10890 transform_elem.denominator =
10891 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10892 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10893 num++;
10894 }
10895 }
10896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10897 colorCorrectTransform)) {
10898 rc = BAD_VALUE;
10899 }
10900 }
10901
10902 cam_trigger_t aecTrigger;
10903 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10904 aecTrigger.trigger_id = -1;
10905 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10906 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10907 aecTrigger.trigger =
10908 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10909 aecTrigger.trigger_id =
10910 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10911 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10912 aecTrigger)) {
10913 rc = BAD_VALUE;
10914 }
10915 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10916 aecTrigger.trigger, aecTrigger.trigger_id);
10917 }
10918
10919 /*af_trigger must come with a trigger id*/
10920 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10921 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10922 cam_trigger_t af_trigger;
10923 af_trigger.trigger =
10924 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10925 af_trigger.trigger_id =
10926 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10928 rc = BAD_VALUE;
10929 }
10930 LOGD("AfTrigger: %d AfTriggerID: %d",
10931 af_trigger.trigger, af_trigger.trigger_id);
10932 }
10933
10934 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10935 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10937 rc = BAD_VALUE;
10938 }
10939 }
10940 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10941 cam_edge_application_t edge_application;
10942 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10943 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10944 edge_application.sharpness = 0;
10945 } else {
10946 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10947 }
10948 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10949 rc = BAD_VALUE;
10950 }
10951 }
10952
10953 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10954 int32_t respectFlashMode = 1;
10955 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10956 uint8_t fwk_aeMode =
10957 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10958 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10959 respectFlashMode = 0;
10960 LOGH("AE Mode controls flash, ignore android.flash.mode");
10961 }
10962 }
10963 if (respectFlashMode) {
10964 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10965 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10966 LOGH("flash mode after mapping %d", val);
10967 // To check: CAM_INTF_META_FLASH_MODE usage
10968 if (NAME_NOT_FOUND != val) {
10969 uint8_t flashMode = (uint8_t)val;
10970 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10971 rc = BAD_VALUE;
10972 }
10973 }
10974 }
10975 }
10976
10977 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10978 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10980 rc = BAD_VALUE;
10981 }
10982 }
10983
10984 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10985 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10987 flashFiringTime)) {
10988 rc = BAD_VALUE;
10989 }
10990 }
10991
10992 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10993 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10995 hotPixelMode)) {
10996 rc = BAD_VALUE;
10997 }
10998 }
10999
11000 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11001 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11003 lensAperture)) {
11004 rc = BAD_VALUE;
11005 }
11006 }
11007
11008 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11009 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11011 filterDensity)) {
11012 rc = BAD_VALUE;
11013 }
11014 }
11015
11016 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11017 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11019 focalLength)) {
11020 rc = BAD_VALUE;
11021 }
11022 }
11023
11024 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11025 uint8_t optStabMode =
11026 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11027 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11028 optStabMode)) {
11029 rc = BAD_VALUE;
11030 }
11031 }
11032
11033 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11034 uint8_t videoStabMode =
11035 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11036 LOGD("videoStabMode from APP = %d", videoStabMode);
11037 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11038 videoStabMode)) {
11039 rc = BAD_VALUE;
11040 }
11041 }
11042
11043
11044 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11045 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11047 noiseRedMode)) {
11048 rc = BAD_VALUE;
11049 }
11050 }
11051
11052 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11053 float reprocessEffectiveExposureFactor =
11054 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11056 reprocessEffectiveExposureFactor)) {
11057 rc = BAD_VALUE;
11058 }
11059 }
11060
11061 cam_crop_region_t scalerCropRegion;
11062 bool scalerCropSet = false;
11063 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11064 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11065 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11066 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11067 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11068
11069 // Map coordinate system from active array to sensor output.
11070 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11071 scalerCropRegion.width, scalerCropRegion.height);
11072
11073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11074 scalerCropRegion)) {
11075 rc = BAD_VALUE;
11076 }
11077 scalerCropSet = true;
11078 }
11079
11080 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11081 int64_t sensorExpTime =
11082 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11083 LOGD("setting sensorExpTime %lld", sensorExpTime);
11084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11085 sensorExpTime)) {
11086 rc = BAD_VALUE;
11087 }
11088 }
11089
11090 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11091 int64_t sensorFrameDuration =
11092 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011093 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11094 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11095 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11096 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11098 sensorFrameDuration)) {
11099 rc = BAD_VALUE;
11100 }
11101 }
11102
11103 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11104 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11105 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11106 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11107 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11108 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11109 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11111 sensorSensitivity)) {
11112 rc = BAD_VALUE;
11113 }
11114 }
11115
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011116#ifndef USE_HAL_3_3
11117 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11118 int32_t ispSensitivity =
11119 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11120 if (ispSensitivity <
11121 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11122 ispSensitivity =
11123 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11124 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11125 }
11126 if (ispSensitivity >
11127 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11128 ispSensitivity =
11129 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11130 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11131 }
11132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11133 ispSensitivity)) {
11134 rc = BAD_VALUE;
11135 }
11136 }
11137#endif
11138
Thierry Strudel3d639192016-09-09 11:52:26 -070011139 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11140 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11142 rc = BAD_VALUE;
11143 }
11144 }
11145
11146 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11147 uint8_t fwk_facedetectMode =
11148 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11149
11150 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11151 fwk_facedetectMode);
11152
11153 if (NAME_NOT_FOUND != val) {
11154 uint8_t facedetectMode = (uint8_t)val;
11155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11156 facedetectMode)) {
11157 rc = BAD_VALUE;
11158 }
11159 }
11160 }
11161
11162 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
11163 uint8_t histogramMode =
11164 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
11165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11166 histogramMode)) {
11167 rc = BAD_VALUE;
11168 }
11169 }
11170
11171 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11172 uint8_t sharpnessMapMode =
11173 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11175 sharpnessMapMode)) {
11176 rc = BAD_VALUE;
11177 }
11178 }
11179
11180 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11181 uint8_t tonemapMode =
11182 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11184 rc = BAD_VALUE;
11185 }
11186 }
11187 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11188 /*All tonemap channels will have the same number of points*/
11189 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11190 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11191 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11192 cam_rgb_tonemap_curves tonemapCurves;
11193 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11194 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11195 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11196 tonemapCurves.tonemap_points_cnt,
11197 CAM_MAX_TONEMAP_CURVE_SIZE);
11198 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11199 }
11200
11201 /* ch0 = G*/
11202 size_t point = 0;
11203 cam_tonemap_curve_t tonemapCurveGreen;
11204 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11205 for (size_t j = 0; j < 2; j++) {
11206 tonemapCurveGreen.tonemap_points[i][j] =
11207 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11208 point++;
11209 }
11210 }
11211 tonemapCurves.curves[0] = tonemapCurveGreen;
11212
11213 /* ch 1 = B */
11214 point = 0;
11215 cam_tonemap_curve_t tonemapCurveBlue;
11216 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11217 for (size_t j = 0; j < 2; j++) {
11218 tonemapCurveBlue.tonemap_points[i][j] =
11219 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11220 point++;
11221 }
11222 }
11223 tonemapCurves.curves[1] = tonemapCurveBlue;
11224
11225 /* ch 2 = R */
11226 point = 0;
11227 cam_tonemap_curve_t tonemapCurveRed;
11228 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11229 for (size_t j = 0; j < 2; j++) {
11230 tonemapCurveRed.tonemap_points[i][j] =
11231 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11232 point++;
11233 }
11234 }
11235 tonemapCurves.curves[2] = tonemapCurveRed;
11236
11237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11238 tonemapCurves)) {
11239 rc = BAD_VALUE;
11240 }
11241 }
11242
11243 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11244 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11245 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11246 captureIntent)) {
11247 rc = BAD_VALUE;
11248 }
11249 }
11250
11251 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11252 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11254 blackLevelLock)) {
11255 rc = BAD_VALUE;
11256 }
11257 }
11258
11259 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11260 uint8_t lensShadingMapMode =
11261 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11263 lensShadingMapMode)) {
11264 rc = BAD_VALUE;
11265 }
11266 }
11267
11268 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11269 cam_area_t roi;
11270 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011271 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011272
11273 // Map coordinate system from active array to sensor output.
11274 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11275 roi.rect.height);
11276
11277 if (scalerCropSet) {
11278 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11279 }
11280 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11281 rc = BAD_VALUE;
11282 }
11283 }
11284
11285 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11286 cam_area_t roi;
11287 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011288 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011289
11290 // Map coordinate system from active array to sensor output.
11291 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11292 roi.rect.height);
11293
11294 if (scalerCropSet) {
11295 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11296 }
11297 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11298 rc = BAD_VALUE;
11299 }
11300 }
11301
11302 // CDS for non-HFR non-video mode
11303 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11304 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11305 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11306 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11307 LOGE("Invalid CDS mode %d!", *fwk_cds);
11308 } else {
11309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11310 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11311 rc = BAD_VALUE;
11312 }
11313 }
11314 }
11315
Thierry Strudel04e026f2016-10-10 11:27:36 -070011316 // Video HDR
11317 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
11318 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
11319 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11320 rc = setVideoHdrMode(mParameters, vhdr);
11321 if (rc != NO_ERROR) {
11322 LOGE("setVideoHDR is failed");
11323 }
11324 }
11325
11326 //IR
11327 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11328 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11329 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
11330 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11331 LOGE("Invalid IR mode %d!", fwk_ir);
11332 } else {
11333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11334 CAM_INTF_META_IR_MODE, fwk_ir)) {
11335 rc = BAD_VALUE;
11336 }
11337 }
11338 }
11339
Thierry Strudel269c81a2016-10-12 12:13:59 -070011340 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11341 float aec_speed;
11342 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11343 LOGD("AEC Speed :%f", aec_speed);
11344 if ( aec_speed < 0 ) {
11345 LOGE("Invalid AEC mode %f!", aec_speed);
11346 } else {
11347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11348 aec_speed)) {
11349 rc = BAD_VALUE;
11350 }
11351 }
11352 }
11353
11354 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11355 float awb_speed;
11356 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11357 LOGD("AWB Speed :%f", awb_speed);
11358 if ( awb_speed < 0 ) {
11359 LOGE("Invalid AWB mode %f!", awb_speed);
11360 } else {
11361 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11362 awb_speed)) {
11363 rc = BAD_VALUE;
11364 }
11365 }
11366 }
11367
Thierry Strudel3d639192016-09-09 11:52:26 -070011368 // TNR
11369 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11370 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11371 uint8_t b_TnrRequested = 0;
11372 cam_denoise_param_t tnr;
11373 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11374 tnr.process_plates =
11375 (cam_denoise_process_type_t)frame_settings.find(
11376 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11377 b_TnrRequested = tnr.denoise_enable;
11378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11379 rc = BAD_VALUE;
11380 }
11381 }
11382
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011383 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
11384 int32_t* exposure_metering_mode =
11385 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
11386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11387 *exposure_metering_mode)) {
11388 rc = BAD_VALUE;
11389 }
11390 }
11391
Thierry Strudel3d639192016-09-09 11:52:26 -070011392 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11393 int32_t fwk_testPatternMode =
11394 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11395 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11396 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11397
11398 if (NAME_NOT_FOUND != testPatternMode) {
11399 cam_test_pattern_data_t testPatternData;
11400 memset(&testPatternData, 0, sizeof(testPatternData));
11401 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11402 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11403 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11404 int32_t *fwk_testPatternData =
11405 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11406 testPatternData.r = fwk_testPatternData[0];
11407 testPatternData.b = fwk_testPatternData[3];
11408 switch (gCamCapability[mCameraId]->color_arrangement) {
11409 case CAM_FILTER_ARRANGEMENT_RGGB:
11410 case CAM_FILTER_ARRANGEMENT_GRBG:
11411 testPatternData.gr = fwk_testPatternData[1];
11412 testPatternData.gb = fwk_testPatternData[2];
11413 break;
11414 case CAM_FILTER_ARRANGEMENT_GBRG:
11415 case CAM_FILTER_ARRANGEMENT_BGGR:
11416 testPatternData.gr = fwk_testPatternData[2];
11417 testPatternData.gb = fwk_testPatternData[1];
11418 break;
11419 default:
11420 LOGE("color arrangement %d is not supported",
11421 gCamCapability[mCameraId]->color_arrangement);
11422 break;
11423 }
11424 }
11425 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11426 testPatternData)) {
11427 rc = BAD_VALUE;
11428 }
11429 } else {
11430 LOGE("Invalid framework sensor test pattern mode %d",
11431 fwk_testPatternMode);
11432 }
11433 }
11434
11435 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11436 size_t count = 0;
11437 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11438 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11439 gps_coords.data.d, gps_coords.count, count);
11440 if (gps_coords.count != count) {
11441 rc = BAD_VALUE;
11442 }
11443 }
11444
11445 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11446 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11447 size_t count = 0;
11448 const char *gps_methods_src = (const char *)
11449 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11450 memset(gps_methods, '\0', sizeof(gps_methods));
11451 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11452 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11453 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11454 if (GPS_PROCESSING_METHOD_SIZE != count) {
11455 rc = BAD_VALUE;
11456 }
11457 }
11458
11459 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11460 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11462 gps_timestamp)) {
11463 rc = BAD_VALUE;
11464 }
11465 }
11466
11467 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11468 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11469 cam_rotation_info_t rotation_info;
11470 if (orientation == 0) {
11471 rotation_info.rotation = ROTATE_0;
11472 } else if (orientation == 90) {
11473 rotation_info.rotation = ROTATE_90;
11474 } else if (orientation == 180) {
11475 rotation_info.rotation = ROTATE_180;
11476 } else if (orientation == 270) {
11477 rotation_info.rotation = ROTATE_270;
11478 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011479 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011480 rotation_info.streamId = snapshotStreamId;
11481 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11482 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11483 rc = BAD_VALUE;
11484 }
11485 }
11486
11487 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11488 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11490 rc = BAD_VALUE;
11491 }
11492 }
11493
11494 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11495 uint32_t thumb_quality = (uint32_t)
11496 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11497 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11498 thumb_quality)) {
11499 rc = BAD_VALUE;
11500 }
11501 }
11502
11503 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11504 cam_dimension_t dim;
11505 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11506 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11508 rc = BAD_VALUE;
11509 }
11510 }
11511
11512 // Internal metadata
11513 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11514 size_t count = 0;
11515 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11516 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11517 privatedata.data.i32, privatedata.count, count);
11518 if (privatedata.count != count) {
11519 rc = BAD_VALUE;
11520 }
11521 }
11522
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011523 // ISO/Exposure Priority
11524 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11525 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11526 cam_priority_mode_t mode =
11527 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11528 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11529 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11530 use_iso_exp_pty.previewOnly = FALSE;
11531 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11532 use_iso_exp_pty.value = *ptr;
11533
11534 if(CAM_ISO_PRIORITY == mode) {
11535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11536 use_iso_exp_pty)) {
11537 rc = BAD_VALUE;
11538 }
11539 }
11540 else {
11541 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11542 use_iso_exp_pty)) {
11543 rc = BAD_VALUE;
11544 }
11545 }
11546 }
11547 }
11548
11549 // Saturation
11550 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11551 int32_t* use_saturation =
11552 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11554 rc = BAD_VALUE;
11555 }
11556 }
11557
Thierry Strudel3d639192016-09-09 11:52:26 -070011558 // EV step
11559 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11560 gCamCapability[mCameraId]->exp_compensation_step)) {
11561 rc = BAD_VALUE;
11562 }
11563
11564 // CDS info
11565 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11566 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11567 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11568
11569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11570 CAM_INTF_META_CDS_DATA, *cdsData)) {
11571 rc = BAD_VALUE;
11572 }
11573 }
11574
Shuzhen Wang19463d72016-03-08 11:09:52 -080011575 // Hybrid AE
11576 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11577 uint8_t *hybrid_ae = (uint8_t *)
11578 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11579
11580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11581 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11582 rc = BAD_VALUE;
11583 }
11584 }
11585
Thierry Strudel3d639192016-09-09 11:52:26 -070011586 return rc;
11587}
11588
11589/*===========================================================================
11590 * FUNCTION : captureResultCb
11591 *
11592 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11593 *
11594 * PARAMETERS :
11595 * @frame : frame information from mm-camera-interface
11596 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11597 * @userdata: userdata
11598 *
11599 * RETURN : NONE
11600 *==========================================================================*/
11601void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11602 camera3_stream_buffer_t *buffer,
11603 uint32_t frame_number, bool isInputBuffer, void *userdata)
11604{
11605 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11606 if (hw == NULL) {
11607 LOGE("Invalid hw %p", hw);
11608 return;
11609 }
11610
11611 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11612 return;
11613}
11614
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011615/*===========================================================================
11616 * FUNCTION : setBufferErrorStatus
11617 *
11618 * DESCRIPTION: Callback handler for channels to report any buffer errors
11619 *
11620 * PARAMETERS :
11621 * @ch : Channel on which buffer error is reported from
11622 * @frame_number : frame number on which buffer error is reported on
11623 * @buffer_status : buffer error status
11624 * @userdata: userdata
11625 *
11626 * RETURN : NONE
11627 *==========================================================================*/
11628void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11629 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11630{
11631 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11632 if (hw == NULL) {
11633 LOGE("Invalid hw %p", hw);
11634 return;
11635 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011636
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011637 hw->setBufferErrorStatus(ch, frame_number, err);
11638 return;
11639}
11640
11641void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11642 uint32_t frameNumber, camera3_buffer_status_t err)
11643{
11644 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11645 pthread_mutex_lock(&mMutex);
11646
11647 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11648 if (req.frame_number != frameNumber)
11649 continue;
11650 for (auto& k : req.mPendingBufferList) {
11651 if(k.stream->priv == ch) {
11652 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11653 }
11654 }
11655 }
11656
11657 pthread_mutex_unlock(&mMutex);
11658 return;
11659}
Thierry Strudel3d639192016-09-09 11:52:26 -070011660/*===========================================================================
11661 * FUNCTION : initialize
11662 *
11663 * DESCRIPTION: Pass framework callback pointers to HAL
11664 *
11665 * PARAMETERS :
11666 *
11667 *
11668 * RETURN : Success : 0
11669 * Failure: -ENODEV
11670 *==========================================================================*/
11671
11672int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11673 const camera3_callback_ops_t *callback_ops)
11674{
11675 LOGD("E");
11676 QCamera3HardwareInterface *hw =
11677 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11678 if (!hw) {
11679 LOGE("NULL camera device");
11680 return -ENODEV;
11681 }
11682
11683 int rc = hw->initialize(callback_ops);
11684 LOGD("X");
11685 return rc;
11686}
11687
11688/*===========================================================================
11689 * FUNCTION : configure_streams
11690 *
11691 * DESCRIPTION:
11692 *
11693 * PARAMETERS :
11694 *
11695 *
11696 * RETURN : Success: 0
11697 * Failure: -EINVAL (if stream configuration is invalid)
11698 * -ENODEV (fatal error)
11699 *==========================================================================*/
11700
11701int QCamera3HardwareInterface::configure_streams(
11702 const struct camera3_device *device,
11703 camera3_stream_configuration_t *stream_list)
11704{
11705 LOGD("E");
11706 QCamera3HardwareInterface *hw =
11707 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11708 if (!hw) {
11709 LOGE("NULL camera device");
11710 return -ENODEV;
11711 }
11712 int rc = hw->configureStreams(stream_list);
11713 LOGD("X");
11714 return rc;
11715}
11716
11717/*===========================================================================
11718 * FUNCTION : construct_default_request_settings
11719 *
11720 * DESCRIPTION: Configure a settings buffer to meet the required use case
11721 *
11722 * PARAMETERS :
11723 *
11724 *
11725 * RETURN : Success: Return valid metadata
11726 * Failure: Return NULL
11727 *==========================================================================*/
11728const camera_metadata_t* QCamera3HardwareInterface::
11729 construct_default_request_settings(const struct camera3_device *device,
11730 int type)
11731{
11732
11733 LOGD("E");
11734 camera_metadata_t* fwk_metadata = NULL;
11735 QCamera3HardwareInterface *hw =
11736 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11737 if (!hw) {
11738 LOGE("NULL camera device");
11739 return NULL;
11740 }
11741
11742 fwk_metadata = hw->translateCapabilityToMetadata(type);
11743
11744 LOGD("X");
11745 return fwk_metadata;
11746}
11747
11748/*===========================================================================
11749 * FUNCTION : process_capture_request
11750 *
11751 * DESCRIPTION:
11752 *
11753 * PARAMETERS :
11754 *
11755 *
11756 * RETURN :
11757 *==========================================================================*/
11758int QCamera3HardwareInterface::process_capture_request(
11759 const struct camera3_device *device,
11760 camera3_capture_request_t *request)
11761{
11762 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011763 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011764 QCamera3HardwareInterface *hw =
11765 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11766 if (!hw) {
11767 LOGE("NULL camera device");
11768 return -EINVAL;
11769 }
11770
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011771 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011772 LOGD("X");
11773 return rc;
11774}
11775
11776/*===========================================================================
11777 * FUNCTION : dump
11778 *
11779 * DESCRIPTION:
11780 *
11781 * PARAMETERS :
11782 *
11783 *
11784 * RETURN :
11785 *==========================================================================*/
11786
11787void QCamera3HardwareInterface::dump(
11788 const struct camera3_device *device, int fd)
11789{
11790 /* Log level property is read when "adb shell dumpsys media.camera" is
11791 called so that the log level can be controlled without restarting
11792 the media server */
11793 getLogLevel();
11794
11795 LOGD("E");
11796 QCamera3HardwareInterface *hw =
11797 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11798 if (!hw) {
11799 LOGE("NULL camera device");
11800 return;
11801 }
11802
11803 hw->dump(fd);
11804 LOGD("X");
11805 return;
11806}
11807
11808/*===========================================================================
11809 * FUNCTION : flush
11810 *
11811 * DESCRIPTION:
11812 *
11813 * PARAMETERS :
11814 *
11815 *
11816 * RETURN :
11817 *==========================================================================*/
11818
11819int QCamera3HardwareInterface::flush(
11820 const struct camera3_device *device)
11821{
11822 int rc;
11823 LOGD("E");
11824 QCamera3HardwareInterface *hw =
11825 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11826 if (!hw) {
11827 LOGE("NULL camera device");
11828 return -EINVAL;
11829 }
11830
11831 pthread_mutex_lock(&hw->mMutex);
11832 // Validate current state
11833 switch (hw->mState) {
11834 case STARTED:
11835 /* valid state */
11836 break;
11837
11838 case ERROR:
11839 pthread_mutex_unlock(&hw->mMutex);
11840 hw->handleCameraDeviceError();
11841 return -ENODEV;
11842
11843 default:
11844 LOGI("Flush returned during state %d", hw->mState);
11845 pthread_mutex_unlock(&hw->mMutex);
11846 return 0;
11847 }
11848 pthread_mutex_unlock(&hw->mMutex);
11849
11850 rc = hw->flush(true /* restart channels */ );
11851 LOGD("X");
11852 return rc;
11853}
11854
11855/*===========================================================================
11856 * FUNCTION : close_camera_device
11857 *
11858 * DESCRIPTION:
11859 *
11860 * PARAMETERS :
11861 *
11862 *
11863 * RETURN :
11864 *==========================================================================*/
11865int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11866{
11867 int ret = NO_ERROR;
11868 QCamera3HardwareInterface *hw =
11869 reinterpret_cast<QCamera3HardwareInterface *>(
11870 reinterpret_cast<camera3_device_t *>(device)->priv);
11871 if (!hw) {
11872 LOGE("NULL camera device");
11873 return BAD_VALUE;
11874 }
11875
11876 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11877 delete hw;
11878 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011879 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011880 return ret;
11881}
11882
11883/*===========================================================================
11884 * FUNCTION : getWaveletDenoiseProcessPlate
11885 *
11886 * DESCRIPTION: query wavelet denoise process plate
11887 *
11888 * PARAMETERS : None
11889 *
11890 * RETURN : WNR prcocess plate value
11891 *==========================================================================*/
11892cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11893{
11894 char prop[PROPERTY_VALUE_MAX];
11895 memset(prop, 0, sizeof(prop));
11896 property_get("persist.denoise.process.plates", prop, "0");
11897 int processPlate = atoi(prop);
11898 switch(processPlate) {
11899 case 0:
11900 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11901 case 1:
11902 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11903 case 2:
11904 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11905 case 3:
11906 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11907 default:
11908 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11909 }
11910}
11911
11912
11913/*===========================================================================
11914 * FUNCTION : getTemporalDenoiseProcessPlate
11915 *
11916 * DESCRIPTION: query temporal denoise process plate
11917 *
11918 * PARAMETERS : None
11919 *
11920 * RETURN : TNR prcocess plate value
11921 *==========================================================================*/
11922cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11923{
11924 char prop[PROPERTY_VALUE_MAX];
11925 memset(prop, 0, sizeof(prop));
11926 property_get("persist.tnr.process.plates", prop, "0");
11927 int processPlate = atoi(prop);
11928 switch(processPlate) {
11929 case 0:
11930 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11931 case 1:
11932 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11933 case 2:
11934 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11935 case 3:
11936 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11937 default:
11938 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11939 }
11940}
11941
11942
11943/*===========================================================================
11944 * FUNCTION : extractSceneMode
11945 *
11946 * DESCRIPTION: Extract scene mode from frameworks set metadata
11947 *
11948 * PARAMETERS :
11949 * @frame_settings: CameraMetadata reference
11950 * @metaMode: ANDROID_CONTORL_MODE
11951 * @hal_metadata: hal metadata structure
11952 *
11953 * RETURN : None
11954 *==========================================================================*/
11955int32_t QCamera3HardwareInterface::extractSceneMode(
11956 const CameraMetadata &frame_settings, uint8_t metaMode,
11957 metadata_buffer_t *hal_metadata)
11958{
11959 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011960 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11961
11962 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
11963 LOGD("Ignoring control mode OFF_KEEP_STATE");
11964 return NO_ERROR;
11965 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011966
11967 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11968 camera_metadata_ro_entry entry =
11969 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11970 if (0 == entry.count)
11971 return rc;
11972
11973 uint8_t fwk_sceneMode = entry.data.u8[0];
11974
11975 int val = lookupHalName(SCENE_MODES_MAP,
11976 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11977 fwk_sceneMode);
11978 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011979 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070011980 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070011981 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011982 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011983
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011984 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
11985 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
11986 }
11987
11988 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
11989 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011990 cam_hdr_param_t hdr_params;
11991 hdr_params.hdr_enable = 1;
11992 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11993 hdr_params.hdr_need_1x = false;
11994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11995 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11996 rc = BAD_VALUE;
11997 }
11998 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011999
Thierry Strudel3d639192016-09-09 11:52:26 -070012000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12001 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12002 rc = BAD_VALUE;
12003 }
12004 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012005
12006 if (mForceHdrSnapshot) {
12007 cam_hdr_param_t hdr_params;
12008 hdr_params.hdr_enable = 1;
12009 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12010 hdr_params.hdr_need_1x = false;
12011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12012 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12013 rc = BAD_VALUE;
12014 }
12015 }
12016
Thierry Strudel3d639192016-09-09 11:52:26 -070012017 return rc;
12018}
12019
12020/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012021 * FUNCTION : setVideoHdrMode
12022 *
12023 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12024 *
12025 * PARAMETERS :
12026 * @hal_metadata: hal metadata structure
12027 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12028 *
12029 * RETURN : None
12030 *==========================================================================*/
12031int32_t QCamera3HardwareInterface::setVideoHdrMode(
12032 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12033{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012034 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12035 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12036 }
12037
12038 LOGE("Invalid Video HDR mode %d!", vhdr);
12039 return BAD_VALUE;
12040}
12041
12042/*===========================================================================
12043 * FUNCTION : setSensorHDR
12044 *
12045 * DESCRIPTION: Enable/disable sensor HDR.
12046 *
12047 * PARAMETERS :
12048 * @hal_metadata: hal metadata structure
12049 * @enable: boolean whether to enable/disable sensor HDR
12050 *
12051 * RETURN : None
12052 *==========================================================================*/
12053int32_t QCamera3HardwareInterface::setSensorHDR(
12054 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12055{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012056 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012057 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12058
12059 if (enable) {
12060 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12061 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12062 #ifdef _LE_CAMERA_
12063 //Default to staggered HDR for IOT
12064 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12065 #else
12066 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12067 #endif
12068 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12069 }
12070
12071 bool isSupported = false;
12072 switch (sensor_hdr) {
12073 case CAM_SENSOR_HDR_IN_SENSOR:
12074 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12075 CAM_QCOM_FEATURE_SENSOR_HDR) {
12076 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012077 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012078 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012079 break;
12080 case CAM_SENSOR_HDR_ZIGZAG:
12081 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12082 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12083 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012084 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012085 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012086 break;
12087 case CAM_SENSOR_HDR_STAGGERED:
12088 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12089 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12090 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012091 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012092 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012093 break;
12094 case CAM_SENSOR_HDR_OFF:
12095 isSupported = true;
12096 LOGD("Turning off sensor HDR");
12097 break;
12098 default:
12099 LOGE("HDR mode %d not supported", sensor_hdr);
12100 rc = BAD_VALUE;
12101 break;
12102 }
12103
12104 if(isSupported) {
12105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12106 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12107 rc = BAD_VALUE;
12108 } else {
12109 if(!isVideoHdrEnable)
12110 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012111 }
12112 }
12113 return rc;
12114}
12115
12116/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012117 * FUNCTION : needRotationReprocess
12118 *
12119 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12120 *
12121 * PARAMETERS : none
12122 *
12123 * RETURN : true: needed
12124 * false: no need
12125 *==========================================================================*/
12126bool QCamera3HardwareInterface::needRotationReprocess()
12127{
12128 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12129 // current rotation is not zero, and pp has the capability to process rotation
12130 LOGH("need do reprocess for rotation");
12131 return true;
12132 }
12133
12134 return false;
12135}
12136
12137/*===========================================================================
12138 * FUNCTION : needReprocess
12139 *
12140 * DESCRIPTION: if reprocess in needed
12141 *
12142 * PARAMETERS : none
12143 *
12144 * RETURN : true: needed
12145 * false: no need
12146 *==========================================================================*/
12147bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12148{
12149 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12150 // TODO: add for ZSL HDR later
12151 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12152 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12153 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12154 return true;
12155 } else {
12156 LOGH("already post processed frame");
12157 return false;
12158 }
12159 }
12160 return needRotationReprocess();
12161}
12162
12163/*===========================================================================
12164 * FUNCTION : needJpegExifRotation
12165 *
12166 * DESCRIPTION: if rotation from jpeg is needed
12167 *
12168 * PARAMETERS : none
12169 *
12170 * RETURN : true: needed
12171 * false: no need
12172 *==========================================================================*/
12173bool QCamera3HardwareInterface::needJpegExifRotation()
12174{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012175 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012176 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12177 LOGD("Need use Jpeg EXIF Rotation");
12178 return true;
12179 }
12180 return false;
12181}
12182
12183/*===========================================================================
12184 * FUNCTION : addOfflineReprocChannel
12185 *
12186 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12187 * coming from input channel
12188 *
12189 * PARAMETERS :
12190 * @config : reprocess configuration
12191 * @inputChHandle : pointer to the input (source) channel
12192 *
12193 *
12194 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12195 *==========================================================================*/
12196QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12197 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12198{
12199 int32_t rc = NO_ERROR;
12200 QCamera3ReprocessChannel *pChannel = NULL;
12201
12202 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012203 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12204 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012205 if (NULL == pChannel) {
12206 LOGE("no mem for reprocess channel");
12207 return NULL;
12208 }
12209
12210 rc = pChannel->initialize(IS_TYPE_NONE);
12211 if (rc != NO_ERROR) {
12212 LOGE("init reprocess channel failed, ret = %d", rc);
12213 delete pChannel;
12214 return NULL;
12215 }
12216
12217 // pp feature config
12218 cam_pp_feature_config_t pp_config;
12219 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12220
12221 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12222 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12223 & CAM_QCOM_FEATURE_DSDN) {
12224 //Use CPP CDS incase h/w supports it.
12225 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12226 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12227 }
12228 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12229 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12230 }
12231
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012232 if (config.hdr_param.hdr_enable) {
12233 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12234 pp_config.hdr_param = config.hdr_param;
12235 }
12236
12237 if (mForceHdrSnapshot) {
12238 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12239 pp_config.hdr_param.hdr_enable = 1;
12240 pp_config.hdr_param.hdr_need_1x = 0;
12241 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12242 }
12243
Thierry Strudel3d639192016-09-09 11:52:26 -070012244 rc = pChannel->addReprocStreamsFromSource(pp_config,
12245 config,
12246 IS_TYPE_NONE,
12247 mMetadataChannel);
12248
12249 if (rc != NO_ERROR) {
12250 delete pChannel;
12251 return NULL;
12252 }
12253 return pChannel;
12254}
12255
12256/*===========================================================================
12257 * FUNCTION : getMobicatMask
12258 *
12259 * DESCRIPTION: returns mobicat mask
12260 *
12261 * PARAMETERS : none
12262 *
12263 * RETURN : mobicat mask
12264 *
12265 *==========================================================================*/
12266uint8_t QCamera3HardwareInterface::getMobicatMask()
12267{
12268 return m_MobicatMask;
12269}
12270
12271/*===========================================================================
12272 * FUNCTION : setMobicat
12273 *
12274 * DESCRIPTION: set Mobicat on/off.
12275 *
12276 * PARAMETERS :
12277 * @params : none
12278 *
12279 * RETURN : int32_t type of status
12280 * NO_ERROR -- success
12281 * none-zero failure code
12282 *==========================================================================*/
12283int32_t QCamera3HardwareInterface::setMobicat()
12284{
12285 char value [PROPERTY_VALUE_MAX];
12286 property_get("persist.camera.mobicat", value, "0");
12287 int32_t ret = NO_ERROR;
12288 uint8_t enableMobi = (uint8_t)atoi(value);
12289
12290 if (enableMobi) {
12291 tune_cmd_t tune_cmd;
12292 tune_cmd.type = SET_RELOAD_CHROMATIX;
12293 tune_cmd.module = MODULE_ALL;
12294 tune_cmd.value = TRUE;
12295 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12296 CAM_INTF_PARM_SET_VFE_COMMAND,
12297 tune_cmd);
12298
12299 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12300 CAM_INTF_PARM_SET_PP_COMMAND,
12301 tune_cmd);
12302 }
12303 m_MobicatMask = enableMobi;
12304
12305 return ret;
12306}
12307
12308/*===========================================================================
12309* FUNCTION : getLogLevel
12310*
12311* DESCRIPTION: Reads the log level property into a variable
12312*
12313* PARAMETERS :
12314* None
12315*
12316* RETURN :
12317* None
12318*==========================================================================*/
12319void QCamera3HardwareInterface::getLogLevel()
12320{
12321 char prop[PROPERTY_VALUE_MAX];
12322 uint32_t globalLogLevel = 0;
12323
12324 property_get("persist.camera.hal.debug", prop, "0");
12325 int val = atoi(prop);
12326 if (0 <= val) {
12327 gCamHal3LogLevel = (uint32_t)val;
12328 }
12329
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012330 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012331 gKpiDebugLevel = atoi(prop);
12332
12333 property_get("persist.camera.global.debug", prop, "0");
12334 val = atoi(prop);
12335 if (0 <= val) {
12336 globalLogLevel = (uint32_t)val;
12337 }
12338
12339 /* Highest log level among hal.logs and global.logs is selected */
12340 if (gCamHal3LogLevel < globalLogLevel)
12341 gCamHal3LogLevel = globalLogLevel;
12342
12343 return;
12344}
12345
12346/*===========================================================================
12347 * FUNCTION : validateStreamRotations
12348 *
12349 * DESCRIPTION: Check if the rotations requested are supported
12350 *
12351 * PARAMETERS :
12352 * @stream_list : streams to be configured
12353 *
12354 * RETURN : NO_ERROR on success
12355 * -EINVAL on failure
12356 *
12357 *==========================================================================*/
12358int QCamera3HardwareInterface::validateStreamRotations(
12359 camera3_stream_configuration_t *streamList)
12360{
12361 int rc = NO_ERROR;
12362
12363 /*
12364 * Loop through all streams requested in configuration
12365 * Check if unsupported rotations have been requested on any of them
12366 */
12367 for (size_t j = 0; j < streamList->num_streams; j++){
12368 camera3_stream_t *newStream = streamList->streams[j];
12369
12370 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12371 bool isImplDef = (newStream->format ==
12372 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12373 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12374 isImplDef);
12375
12376 if (isRotated && (!isImplDef || isZsl)) {
12377 LOGE("Error: Unsupported rotation of %d requested for stream"
12378 "type:%d and stream format:%d",
12379 newStream->rotation, newStream->stream_type,
12380 newStream->format);
12381 rc = -EINVAL;
12382 break;
12383 }
12384 }
12385
12386 return rc;
12387}
12388
12389/*===========================================================================
12390* FUNCTION : getFlashInfo
12391*
12392* DESCRIPTION: Retrieve information about whether the device has a flash.
12393*
12394* PARAMETERS :
12395* @cameraId : Camera id to query
12396* @hasFlash : Boolean indicating whether there is a flash device
12397* associated with given camera
12398* @flashNode : If a flash device exists, this will be its device node.
12399*
12400* RETURN :
12401* None
12402*==========================================================================*/
12403void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12404 bool& hasFlash,
12405 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12406{
12407 cam_capability_t* camCapability = gCamCapability[cameraId];
12408 if (NULL == camCapability) {
12409 hasFlash = false;
12410 flashNode[0] = '\0';
12411 } else {
12412 hasFlash = camCapability->flash_available;
12413 strlcpy(flashNode,
12414 (char*)camCapability->flash_dev_name,
12415 QCAMERA_MAX_FILEPATH_LENGTH);
12416 }
12417}
12418
12419/*===========================================================================
12420* FUNCTION : getEepromVersionInfo
12421*
12422* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12423*
12424* PARAMETERS : None
12425*
12426* RETURN : string describing EEPROM version
12427* "\0" if no such info available
12428*==========================================================================*/
12429const char *QCamera3HardwareInterface::getEepromVersionInfo()
12430{
12431 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12432}
12433
12434/*===========================================================================
12435* FUNCTION : getLdafCalib
12436*
12437* DESCRIPTION: Retrieve Laser AF calibration data
12438*
12439* PARAMETERS : None
12440*
12441* RETURN : Two uint32_t describing laser AF calibration data
12442* NULL if none is available.
12443*==========================================================================*/
12444const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12445{
12446 if (mLdafCalibExist) {
12447 return &mLdafCalib[0];
12448 } else {
12449 return NULL;
12450 }
12451}
12452
12453/*===========================================================================
12454 * FUNCTION : dynamicUpdateMetaStreamInfo
12455 *
12456 * DESCRIPTION: This function:
12457 * (1) stops all the channels
12458 * (2) returns error on pending requests and buffers
12459 * (3) sends metastream_info in setparams
12460 * (4) starts all channels
12461 * This is useful when sensor has to be restarted to apply any
12462 * settings such as frame rate from a different sensor mode
12463 *
12464 * PARAMETERS : None
12465 *
12466 * RETURN : NO_ERROR on success
12467 * Error codes on failure
12468 *
12469 *==========================================================================*/
12470int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12471{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012472 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012473 int rc = NO_ERROR;
12474
12475 LOGD("E");
12476
12477 rc = stopAllChannels();
12478 if (rc < 0) {
12479 LOGE("stopAllChannels failed");
12480 return rc;
12481 }
12482
12483 rc = notifyErrorForPendingRequests();
12484 if (rc < 0) {
12485 LOGE("notifyErrorForPendingRequests failed");
12486 return rc;
12487 }
12488
12489 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12490 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12491 "Format:%d",
12492 mStreamConfigInfo.type[i],
12493 mStreamConfigInfo.stream_sizes[i].width,
12494 mStreamConfigInfo.stream_sizes[i].height,
12495 mStreamConfigInfo.postprocess_mask[i],
12496 mStreamConfigInfo.format[i]);
12497 }
12498
12499 /* Send meta stream info once again so that ISP can start */
12500 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12501 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12502 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12503 mParameters);
12504 if (rc < 0) {
12505 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12506 }
12507
12508 rc = startAllChannels();
12509 if (rc < 0) {
12510 LOGE("startAllChannels failed");
12511 return rc;
12512 }
12513
12514 LOGD("X");
12515 return rc;
12516}
12517
12518/*===========================================================================
12519 * FUNCTION : stopAllChannels
12520 *
12521 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12522 *
12523 * PARAMETERS : None
12524 *
12525 * RETURN : NO_ERROR on success
12526 * Error codes on failure
12527 *
12528 *==========================================================================*/
12529int32_t QCamera3HardwareInterface::stopAllChannels()
12530{
12531 int32_t rc = NO_ERROR;
12532
12533 LOGD("Stopping all channels");
12534 // Stop the Streams/Channels
12535 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12536 it != mStreamInfo.end(); it++) {
12537 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12538 if (channel) {
12539 channel->stop();
12540 }
12541 (*it)->status = INVALID;
12542 }
12543
12544 if (mSupportChannel) {
12545 mSupportChannel->stop();
12546 }
12547 if (mAnalysisChannel) {
12548 mAnalysisChannel->stop();
12549 }
12550 if (mRawDumpChannel) {
12551 mRawDumpChannel->stop();
12552 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012553 if (mHdrPlusRawSrcChannel) {
12554 mHdrPlusRawSrcChannel->stop();
12555 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012556 if (mMetadataChannel) {
12557 /* If content of mStreamInfo is not 0, there is metadata stream */
12558 mMetadataChannel->stop();
12559 }
12560
12561 LOGD("All channels stopped");
12562 return rc;
12563}
12564
12565/*===========================================================================
12566 * FUNCTION : startAllChannels
12567 *
12568 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12569 *
12570 * PARAMETERS : None
12571 *
12572 * RETURN : NO_ERROR on success
12573 * Error codes on failure
12574 *
12575 *==========================================================================*/
12576int32_t QCamera3HardwareInterface::startAllChannels()
12577{
12578 int32_t rc = NO_ERROR;
12579
12580 LOGD("Start all channels ");
12581 // Start the Streams/Channels
12582 if (mMetadataChannel) {
12583 /* If content of mStreamInfo is not 0, there is metadata stream */
12584 rc = mMetadataChannel->start();
12585 if (rc < 0) {
12586 LOGE("META channel start failed");
12587 return rc;
12588 }
12589 }
12590 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12591 it != mStreamInfo.end(); it++) {
12592 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12593 if (channel) {
12594 rc = channel->start();
12595 if (rc < 0) {
12596 LOGE("channel start failed");
12597 return rc;
12598 }
12599 }
12600 }
12601 if (mAnalysisChannel) {
12602 mAnalysisChannel->start();
12603 }
12604 if (mSupportChannel) {
12605 rc = mSupportChannel->start();
12606 if (rc < 0) {
12607 LOGE("Support channel start failed");
12608 return rc;
12609 }
12610 }
12611 if (mRawDumpChannel) {
12612 rc = mRawDumpChannel->start();
12613 if (rc < 0) {
12614 LOGE("RAW dump channel start failed");
12615 return rc;
12616 }
12617 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012618 if (mHdrPlusRawSrcChannel) {
12619 rc = mHdrPlusRawSrcChannel->start();
12620 if (rc < 0) {
12621 LOGE("HDR+ RAW channel start failed");
12622 return rc;
12623 }
12624 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012625
12626 LOGD("All channels started");
12627 return rc;
12628}
12629
12630/*===========================================================================
12631 * FUNCTION : notifyErrorForPendingRequests
12632 *
12633 * DESCRIPTION: This function sends error for all the pending requests/buffers
12634 *
12635 * PARAMETERS : None
12636 *
12637 * RETURN : Error codes
12638 * NO_ERROR on success
12639 *
12640 *==========================================================================*/
12641int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12642{
12643 int32_t rc = NO_ERROR;
12644 unsigned int frameNum = 0;
12645 camera3_capture_result_t result;
12646 camera3_stream_buffer_t *pStream_Buf = NULL;
12647
12648 memset(&result, 0, sizeof(camera3_capture_result_t));
12649
12650 if (mPendingRequestsList.size() > 0) {
12651 pendingRequestIterator i = mPendingRequestsList.begin();
12652 frameNum = i->frame_number;
12653 } else {
12654 /* There might still be pending buffers even though there are
12655 no pending requests. Setting the frameNum to MAX so that
12656 all the buffers with smaller frame numbers are returned */
12657 frameNum = UINT_MAX;
12658 }
12659
12660 LOGH("Oldest frame num on mPendingRequestsList = %u",
12661 frameNum);
12662
12663 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12664 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12665
12666 if (req->frame_number < frameNum) {
12667 // Send Error notify to frameworks for each buffer for which
12668 // metadata buffer is already sent
12669 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12670 req->frame_number, req->mPendingBufferList.size());
12671
12672 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12673 if (NULL == pStream_Buf) {
12674 LOGE("No memory for pending buffers array");
12675 return NO_MEMORY;
12676 }
12677 memset(pStream_Buf, 0,
12678 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12679 result.result = NULL;
12680 result.frame_number = req->frame_number;
12681 result.num_output_buffers = req->mPendingBufferList.size();
12682 result.output_buffers = pStream_Buf;
12683
12684 size_t index = 0;
12685 for (auto info = req->mPendingBufferList.begin();
12686 info != req->mPendingBufferList.end(); ) {
12687
12688 camera3_notify_msg_t notify_msg;
12689 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12690 notify_msg.type = CAMERA3_MSG_ERROR;
12691 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12692 notify_msg.message.error.error_stream = info->stream;
12693 notify_msg.message.error.frame_number = req->frame_number;
12694 pStream_Buf[index].acquire_fence = -1;
12695 pStream_Buf[index].release_fence = -1;
12696 pStream_Buf[index].buffer = info->buffer;
12697 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12698 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012699 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012700 index++;
12701 // Remove buffer from list
12702 info = req->mPendingBufferList.erase(info);
12703 }
12704
12705 // Remove this request from Map
12706 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12707 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12708 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12709
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012710 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012711
12712 delete [] pStream_Buf;
12713 } else {
12714
12715 // Go through the pending requests info and send error request to framework
12716 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12717
12718 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12719
12720 // Send error notify to frameworks
12721 camera3_notify_msg_t notify_msg;
12722 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12723 notify_msg.type = CAMERA3_MSG_ERROR;
12724 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12725 notify_msg.message.error.error_stream = NULL;
12726 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012727 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012728
12729 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12730 if (NULL == pStream_Buf) {
12731 LOGE("No memory for pending buffers array");
12732 return NO_MEMORY;
12733 }
12734 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12735
12736 result.result = NULL;
12737 result.frame_number = req->frame_number;
12738 result.input_buffer = i->input_buffer;
12739 result.num_output_buffers = req->mPendingBufferList.size();
12740 result.output_buffers = pStream_Buf;
12741
12742 size_t index = 0;
12743 for (auto info = req->mPendingBufferList.begin();
12744 info != req->mPendingBufferList.end(); ) {
12745 pStream_Buf[index].acquire_fence = -1;
12746 pStream_Buf[index].release_fence = -1;
12747 pStream_Buf[index].buffer = info->buffer;
12748 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12749 pStream_Buf[index].stream = info->stream;
12750 index++;
12751 // Remove buffer from list
12752 info = req->mPendingBufferList.erase(info);
12753 }
12754
12755 // Remove this request from Map
12756 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12757 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12758 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12759
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012760 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012761 delete [] pStream_Buf;
12762 i = erasePendingRequest(i);
12763 }
12764 }
12765
12766 /* Reset pending frame Drop list and requests list */
12767 mPendingFrameDropList.clear();
12768
12769 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12770 req.mPendingBufferList.clear();
12771 }
12772 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070012773 LOGH("Cleared all the pending buffers ");
12774
12775 return rc;
12776}
12777
12778bool QCamera3HardwareInterface::isOnEncoder(
12779 const cam_dimension_t max_viewfinder_size,
12780 uint32_t width, uint32_t height)
12781{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012782 return ((width > (uint32_t)max_viewfinder_size.width) ||
12783 (height > (uint32_t)max_viewfinder_size.height) ||
12784 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12785 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012786}
12787
12788/*===========================================================================
12789 * FUNCTION : setBundleInfo
12790 *
12791 * DESCRIPTION: Set bundle info for all streams that are bundle.
12792 *
12793 * PARAMETERS : None
12794 *
12795 * RETURN : NO_ERROR on success
12796 * Error codes on failure
12797 *==========================================================================*/
12798int32_t QCamera3HardwareInterface::setBundleInfo()
12799{
12800 int32_t rc = NO_ERROR;
12801
12802 if (mChannelHandle) {
12803 cam_bundle_config_t bundleInfo;
12804 memset(&bundleInfo, 0, sizeof(bundleInfo));
12805 rc = mCameraHandle->ops->get_bundle_info(
12806 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12807 if (rc != NO_ERROR) {
12808 LOGE("get_bundle_info failed");
12809 return rc;
12810 }
12811 if (mAnalysisChannel) {
12812 mAnalysisChannel->setBundleInfo(bundleInfo);
12813 }
12814 if (mSupportChannel) {
12815 mSupportChannel->setBundleInfo(bundleInfo);
12816 }
12817 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12818 it != mStreamInfo.end(); it++) {
12819 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12820 channel->setBundleInfo(bundleInfo);
12821 }
12822 if (mRawDumpChannel) {
12823 mRawDumpChannel->setBundleInfo(bundleInfo);
12824 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012825 if (mHdrPlusRawSrcChannel) {
12826 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
12827 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012828 }
12829
12830 return rc;
12831}
12832
12833/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012834 * FUNCTION : setInstantAEC
12835 *
12836 * DESCRIPTION: Set Instant AEC related params.
12837 *
12838 * PARAMETERS :
12839 * @meta: CameraMetadata reference
12840 *
12841 * RETURN : NO_ERROR on success
12842 * Error codes on failure
12843 *==========================================================================*/
12844int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12845{
12846 int32_t rc = NO_ERROR;
12847 uint8_t val = 0;
12848 char prop[PROPERTY_VALUE_MAX];
12849
12850 // First try to configure instant AEC from framework metadata
12851 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12852 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12853 }
12854
12855 // If framework did not set this value, try to read from set prop.
12856 if (val == 0) {
12857 memset(prop, 0, sizeof(prop));
12858 property_get("persist.camera.instant.aec", prop, "0");
12859 val = (uint8_t)atoi(prop);
12860 }
12861
12862 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12863 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12864 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12865 mInstantAEC = val;
12866 mInstantAECSettledFrameNumber = 0;
12867 mInstantAecFrameIdxCount = 0;
12868 LOGH("instantAEC value set %d",val);
12869 if (mInstantAEC) {
12870 memset(prop, 0, sizeof(prop));
12871 property_get("persist.camera.ae.instant.bound", prop, "10");
12872 int32_t aec_frame_skip_cnt = atoi(prop);
12873 if (aec_frame_skip_cnt >= 0) {
12874 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12875 } else {
12876 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12877 rc = BAD_VALUE;
12878 }
12879 }
12880 } else {
12881 LOGE("Bad instant aec value set %d", val);
12882 rc = BAD_VALUE;
12883 }
12884 return rc;
12885}
12886
12887/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012888 * FUNCTION : get_num_overall_buffers
12889 *
12890 * DESCRIPTION: Estimate number of pending buffers across all requests.
12891 *
12892 * PARAMETERS : None
12893 *
12894 * RETURN : Number of overall pending buffers
12895 *
12896 *==========================================================================*/
12897uint32_t PendingBuffersMap::get_num_overall_buffers()
12898{
12899 uint32_t sum_buffers = 0;
12900 for (auto &req : mPendingBuffersInRequest) {
12901 sum_buffers += req.mPendingBufferList.size();
12902 }
12903 return sum_buffers;
12904}
12905
12906/*===========================================================================
12907 * FUNCTION : removeBuf
12908 *
12909 * DESCRIPTION: Remove a matching buffer from tracker.
12910 *
12911 * PARAMETERS : @buffer: image buffer for the callback
12912 *
12913 * RETURN : None
12914 *
12915 *==========================================================================*/
12916void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12917{
12918 bool buffer_found = false;
12919 for (auto req = mPendingBuffersInRequest.begin();
12920 req != mPendingBuffersInRequest.end(); req++) {
12921 for (auto k = req->mPendingBufferList.begin();
12922 k != req->mPendingBufferList.end(); k++ ) {
12923 if (k->buffer == buffer) {
12924 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12925 req->frame_number, buffer);
12926 k = req->mPendingBufferList.erase(k);
12927 if (req->mPendingBufferList.empty()) {
12928 // Remove this request from Map
12929 req = mPendingBuffersInRequest.erase(req);
12930 }
12931 buffer_found = true;
12932 break;
12933 }
12934 }
12935 if (buffer_found) {
12936 break;
12937 }
12938 }
12939 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12940 get_num_overall_buffers());
12941}
12942
12943/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012944 * FUNCTION : getBufErrStatus
12945 *
12946 * DESCRIPTION: get buffer error status
12947 *
12948 * PARAMETERS : @buffer: buffer handle
12949 *
12950 * RETURN : Error status
12951 *
12952 *==========================================================================*/
12953int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12954{
12955 for (auto& req : mPendingBuffersInRequest) {
12956 for (auto& k : req.mPendingBufferList) {
12957 if (k.buffer == buffer)
12958 return k.bufStatus;
12959 }
12960 }
12961 return CAMERA3_BUFFER_STATUS_OK;
12962}
12963
12964/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012965 * FUNCTION : setPAAFSupport
12966 *
12967 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12968 * feature mask according to stream type and filter
12969 * arrangement
12970 *
12971 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12972 * @stream_type: stream type
12973 * @filter_arrangement: filter arrangement
12974 *
12975 * RETURN : None
12976 *==========================================================================*/
12977void QCamera3HardwareInterface::setPAAFSupport(
12978 cam_feature_mask_t& feature_mask,
12979 cam_stream_type_t stream_type,
12980 cam_color_filter_arrangement_t filter_arrangement)
12981{
12982 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12983 feature_mask, stream_type, filter_arrangement);
12984
12985 switch (filter_arrangement) {
12986 case CAM_FILTER_ARRANGEMENT_RGGB:
12987 case CAM_FILTER_ARRANGEMENT_GRBG:
12988 case CAM_FILTER_ARRANGEMENT_GBRG:
12989 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012990 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12991 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012992 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12993 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12994 }
12995 break;
12996 case CAM_FILTER_ARRANGEMENT_Y:
12997 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12998 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12999 }
13000 break;
13001 default:
13002 break;
13003 }
13004}
13005
13006/*===========================================================================
13007* FUNCTION : getSensorMountAngle
13008*
13009* DESCRIPTION: Retrieve sensor mount angle
13010*
13011* PARAMETERS : None
13012*
13013* RETURN : sensor mount angle in uint32_t
13014*==========================================================================*/
13015uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13016{
13017 return gCamCapability[mCameraId]->sensor_mount_angle;
13018}
13019
13020/*===========================================================================
13021* FUNCTION : getRelatedCalibrationData
13022*
13023* DESCRIPTION: Retrieve related system calibration data
13024*
13025* PARAMETERS : None
13026*
13027* RETURN : Pointer of related system calibration data
13028*==========================================================================*/
13029const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13030{
13031 return (const cam_related_system_calibration_data_t *)
13032 &(gCamCapability[mCameraId]->related_cam_calibration);
13033}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013034
13035/*===========================================================================
13036 * FUNCTION : is60HzZone
13037 *
13038 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13039 *
13040 * PARAMETERS : None
13041 *
13042 * RETURN : True if in 60Hz zone, False otherwise
13043 *==========================================================================*/
13044bool QCamera3HardwareInterface::is60HzZone()
13045{
13046 time_t t = time(NULL);
13047 struct tm lt;
13048
13049 struct tm* r = localtime_r(&t, &lt);
13050
13051 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13052 return true;
13053 else
13054 return false;
13055}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013056
13057/*===========================================================================
13058 * FUNCTION : adjustBlackLevelForCFA
13059 *
13060 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13061 * of bayer CFA (Color Filter Array).
13062 *
13063 * PARAMETERS : @input: black level pattern in the order of RGGB
13064 * @output: black level pattern in the order of CFA
13065 * @color_arrangement: CFA color arrangement
13066 *
13067 * RETURN : None
13068 *==========================================================================*/
13069template<typename T>
13070void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13071 T input[BLACK_LEVEL_PATTERN_CNT],
13072 T output[BLACK_LEVEL_PATTERN_CNT],
13073 cam_color_filter_arrangement_t color_arrangement)
13074{
13075 switch (color_arrangement) {
13076 case CAM_FILTER_ARRANGEMENT_GRBG:
13077 output[0] = input[1];
13078 output[1] = input[0];
13079 output[2] = input[3];
13080 output[3] = input[2];
13081 break;
13082 case CAM_FILTER_ARRANGEMENT_GBRG:
13083 output[0] = input[2];
13084 output[1] = input[3];
13085 output[2] = input[0];
13086 output[3] = input[1];
13087 break;
13088 case CAM_FILTER_ARRANGEMENT_BGGR:
13089 output[0] = input[3];
13090 output[1] = input[2];
13091 output[2] = input[1];
13092 output[3] = input[0];
13093 break;
13094 case CAM_FILTER_ARRANGEMENT_RGGB:
13095 output[0] = input[0];
13096 output[1] = input[1];
13097 output[2] = input[2];
13098 output[3] = input[3];
13099 break;
13100 default:
13101 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13102 break;
13103 }
13104}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013105
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013106void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13107 CameraMetadata &resultMetadata,
13108 std::shared_ptr<metadata_buffer_t> settings)
13109{
13110 if (settings == nullptr) {
13111 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13112 return;
13113 }
13114
13115 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13116 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13117 }
13118
13119 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13120 String8 str((const char *)gps_methods);
13121 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13122 }
13123
13124 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13125 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13126 }
13127
13128 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13129 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13130 }
13131
13132 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13133 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13134 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13135 }
13136
13137 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13138 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13139 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13140 }
13141
13142 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13143 int32_t fwk_thumb_size[2];
13144 fwk_thumb_size[0] = thumb_size->width;
13145 fwk_thumb_size[1] = thumb_size->height;
13146 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13147 }
13148
13149 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13150 uint8_t fwk_intent = intent[0];
13151 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13152 }
13153}
13154
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013155void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13156 const camera_metadata_t &resultMetadata) {
13157 if (result != nullptr) {
13158 if (result->outputBuffers.size() != 1) {
13159 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13160 result->outputBuffers.size());
13161 return;
13162 }
13163
13164 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13165 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13166 result->outputBuffers[0].streamId);
13167 return;
13168 }
13169
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013170 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013171 HdrPlusPendingRequest pendingRequest;
13172 {
13173 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13174 auto req = mHdrPlusPendingRequests.find(result->requestId);
13175 pendingRequest = req->second;
13176 }
13177
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013178 // Update the result metadata with the settings of the HDR+ still capture request because
13179 // the result metadata belongs to a ZSL buffer.
13180 CameraMetadata metadata;
13181 metadata = &resultMetadata;
13182 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
13183 camera_metadata_t* updatedResultMetadata = metadata.release();
13184
13185 QCamera3PicChannel *picChannel =
13186 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13187
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013188 // Check if dumping HDR+ YUV output is enabled.
13189 char prop[PROPERTY_VALUE_MAX];
13190 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13191 bool dumpYuvOutput = atoi(prop);
13192
13193 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013194 // Dump yuv buffer to a ppm file.
13195 pbcamera::StreamConfiguration outputConfig;
13196 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13197 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13198 if (rc == OK) {
13199 char buf[FILENAME_MAX] = {};
13200 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13201 result->requestId, result->outputBuffers[0].streamId,
13202 outputConfig.image.width, outputConfig.image.height);
13203
13204 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13205 } else {
13206 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13207 __FUNCTION__, strerror(-rc), rc);
13208 }
13209 }
13210
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013211 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
13212 auto halMetadata = std::make_shared<metadata_buffer_t>();
13213 clear_metadata_buffer(halMetadata.get());
13214
13215 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
13216 // encoding.
13217 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
13218 halStreamId, /*minFrameDuration*/0);
13219 if (res == OK) {
13220 // Return the buffer to pic channel for encoding.
13221 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13222 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13223 halMetadata);
13224 } else {
13225 // Return the buffer without encoding.
13226 // TODO: This should not happen but we may want to report an error buffer to camera
13227 // service.
13228 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
13229 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
13230 strerror(-res), res);
13231 }
13232
13233 // Send HDR+ metadata to framework.
13234 {
13235 pthread_mutex_lock(&mMutex);
13236
13237 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
13238 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
13239 pthread_mutex_unlock(&mMutex);
13240 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013241
13242 // Remove the HDR+ pending request.
13243 {
13244 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13245 auto req = mHdrPlusPendingRequests.find(result->requestId);
13246 mHdrPlusPendingRequests.erase(req);
13247 }
13248 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013249}
13250
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013251void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13252 // TODO: Handle HDR+ capture failures and send the failure to framework.
13253 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13254 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13255
13256 // Return the buffer to pic channel.
13257 QCamera3PicChannel *picChannel =
13258 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
13259 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
13260
13261 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013262}
13263
Thierry Strudel3d639192016-09-09 11:52:26 -070013264}; //end namespace qcamera