blob: 508da667e020972cb0da3ee445eaf7feb1512a7e [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
Thierry Strudel54dc9782017-02-15 12:12:10 -0800143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_binning_correction_mode_t,
145 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
146 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
147 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
148};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700149
150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_ir_mode_t,
152 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
153 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
154 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
155 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
156};
Thierry Strudel3d639192016-09-09 11:52:26 -0700157
158const QCamera3HardwareInterface::QCameraMap<
159 camera_metadata_enum_android_control_effect_mode_t,
160 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
161 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
162 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
163 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
164 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
165 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
166 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
167 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
168 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
169 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
170};
171
172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_control_awb_mode_t,
174 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
175 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
176 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
177 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
178 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
179 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
180 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
181 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
182 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
183 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
184};
185
186const QCamera3HardwareInterface::QCameraMap<
187 camera_metadata_enum_android_control_scene_mode_t,
188 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
189 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
190 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
191 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
192 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
193 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
194 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
195 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
196 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
197 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
198 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
199 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
200 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
201 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
202 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
203 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800204 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
205 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_control_af_mode_t,
210 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
211 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
212 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
213 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
214 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
215 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
216 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
217 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221 camera_metadata_enum_android_color_correction_aberration_mode_t,
222 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
223 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
224 CAM_COLOR_CORRECTION_ABERRATION_OFF },
225 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
226 CAM_COLOR_CORRECTION_ABERRATION_FAST },
227 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
228 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
229};
230
231const QCamera3HardwareInterface::QCameraMap<
232 camera_metadata_enum_android_control_ae_antibanding_mode_t,
233 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
234 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
235 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
236 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
237 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_ae_mode_t,
242 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
243 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
244 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
245 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
246 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
247 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
248};
249
250const QCamera3HardwareInterface::QCameraMap<
251 camera_metadata_enum_android_flash_mode_t,
252 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
253 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
254 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
255 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
256};
257
258const QCamera3HardwareInterface::QCameraMap<
259 camera_metadata_enum_android_statistics_face_detect_mode_t,
260 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
261 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
262 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
263 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
268 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
269 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
270 CAM_FOCUS_UNCALIBRATED },
271 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
272 CAM_FOCUS_APPROXIMATE },
273 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
274 CAM_FOCUS_CALIBRATED }
275};
276
277const QCamera3HardwareInterface::QCameraMap<
278 camera_metadata_enum_android_lens_state_t,
279 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
280 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
281 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
282};
283
284const int32_t available_thumbnail_sizes[] = {0, 0,
285 176, 144,
286 240, 144,
287 256, 144,
288 240, 160,
289 256, 154,
290 240, 240,
291 320, 240};
292
293const QCamera3HardwareInterface::QCameraMap<
294 camera_metadata_enum_android_sensor_test_pattern_mode_t,
295 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
296 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
297 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
298 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
299 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
300 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
302};
303
304/* Since there is no mapping for all the options some Android enum are not listed.
305 * Also, the order in this list is important because while mapping from HAL to Android it will
306 * traverse from lower to higher index which means that for HAL values that are map to different
307 * Android values, the traverse logic will select the first one found.
308 */
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_sensor_reference_illuminant1_t,
311 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
328};
329
330const QCamera3HardwareInterface::QCameraMap<
331 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
332 { 60, CAM_HFR_MODE_60FPS},
333 { 90, CAM_HFR_MODE_90FPS},
334 { 120, CAM_HFR_MODE_120FPS},
335 { 150, CAM_HFR_MODE_150FPS},
336 { 180, CAM_HFR_MODE_180FPS},
337 { 210, CAM_HFR_MODE_210FPS},
338 { 240, CAM_HFR_MODE_240FPS},
339 { 480, CAM_HFR_MODE_480FPS},
340};
341
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700342const QCamera3HardwareInterface::QCameraMap<
343 qcamera3_ext_instant_aec_mode_t,
344 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
345 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
346 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
347 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
348};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800349
350const QCamera3HardwareInterface::QCameraMap<
351 qcamera3_ext_exposure_meter_mode_t,
352 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
353 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
354 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
355 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
356 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
357 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
358 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 qcamera3_ext_iso_mode_t,
364 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
365 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
366 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
367 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
368 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
369 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
370 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
371 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
372 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
373};
374
Thierry Strudel3d639192016-09-09 11:52:26 -0700375camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
376 .initialize = QCamera3HardwareInterface::initialize,
377 .configure_streams = QCamera3HardwareInterface::configure_streams,
378 .register_stream_buffers = NULL,
379 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
380 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
381 .get_metadata_vendor_tag_ops = NULL,
382 .dump = QCamera3HardwareInterface::dump,
383 .flush = QCamera3HardwareInterface::flush,
384 .reserved = {0},
385};
386
387// initialise to some default value
388uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
389
390/*===========================================================================
391 * FUNCTION : QCamera3HardwareInterface
392 *
393 * DESCRIPTION: constructor of QCamera3HardwareInterface
394 *
395 * PARAMETERS :
396 * @cameraId : camera ID
397 *
398 * RETURN : none
399 *==========================================================================*/
400QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
401 const camera_module_callbacks_t *callbacks)
402 : mCameraId(cameraId),
403 mCameraHandle(NULL),
404 mCameraInitialized(false),
405 mCallbackOps(NULL),
406 mMetadataChannel(NULL),
407 mPictureChannel(NULL),
408 mRawChannel(NULL),
409 mSupportChannel(NULL),
410 mAnalysisChannel(NULL),
411 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700412 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700413 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800414 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800415 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700416 mChannelHandle(0),
417 mFirstConfiguration(true),
418 mFlush(false),
419 mFlushPerf(false),
420 mParamHeap(NULL),
421 mParameters(NULL),
422 mPrevParameters(NULL),
423 m_bIsVideo(false),
424 m_bIs4KVideo(false),
425 m_bEisSupportedSize(false),
426 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800427 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700428 m_MobicatMask(0),
429 mMinProcessedFrameDuration(0),
430 mMinJpegFrameDuration(0),
431 mMinRawFrameDuration(0),
432 mMetaFrameCount(0U),
433 mUpdateDebugLevel(false),
434 mCallbacks(callbacks),
435 mCaptureIntent(0),
436 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700437 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800438 /* DevCamDebug metadata internal m control*/
439 mDevCamDebugMetaEnable(0),
440 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700441 mBatchSize(0),
442 mToBeQueuedVidBufs(0),
443 mHFRVideoFps(DEFAULT_VIDEO_FPS),
444 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800445 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800446 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700447 mFirstFrameNumberInBatch(0),
448 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800449 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700450 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
451 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700452 mInstantAEC(false),
453 mResetInstantAEC(false),
454 mInstantAECSettledFrameNumber(0),
455 mAecSkipDisplayFrameBound(0),
456 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800457 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 mLastCustIntentFrmNum(-1),
460 mState(CLOSED),
461 mIsDeviceLinked(false),
462 mIsMainCamera(true),
463 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700464 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800465 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800466 mHdrPlusModeEnabled(false),
467 mIsApInputUsedForHdrPlus(false),
468 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800469 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700470{
471 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 mCommon.init(gCamCapability[cameraId]);
473 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700474#ifndef USE_HAL_3_3
475 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
476#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700478#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 mCameraDevice.common.close = close_camera_device;
480 mCameraDevice.ops = &mCameraOps;
481 mCameraDevice.priv = this;
482 gCamCapability[cameraId]->version = CAM_HAL_V3;
483 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
484 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
485 gCamCapability[cameraId]->min_num_pp_bufs = 3;
486
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800487 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700488
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800489 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mPendingLiveRequest = 0;
491 mCurrentRequestId = -1;
492 pthread_mutex_init(&mMutex, NULL);
493
494 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
495 mDefaultMetadata[i] = NULL;
496
497 // Getting system props of different kinds
498 char prop[PROPERTY_VALUE_MAX];
499 memset(prop, 0, sizeof(prop));
500 property_get("persist.camera.raw.dump", prop, "0");
501 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800502 property_get("persist.camera.hal3.force.hdr", prop, "0");
503 mForceHdrSnapshot = atoi(prop);
504
Thierry Strudel3d639192016-09-09 11:52:26 -0700505 if (mEnableRawDump)
506 LOGD("Raw dump from Camera HAL enabled");
507
508 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
509 memset(mLdafCalib, 0, sizeof(mLdafCalib));
510
511 memset(prop, 0, sizeof(prop));
512 property_get("persist.camera.tnr.preview", prop, "0");
513 m_bTnrPreview = (uint8_t)atoi(prop);
514
515 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800516 property_get("persist.camera.swtnr.preview", prop, "1");
517 m_bSwTnrPreview = (uint8_t)atoi(prop);
518
519 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700520 property_get("persist.camera.tnr.video", prop, "0");
521 m_bTnrVideo = (uint8_t)atoi(prop);
522
523 memset(prop, 0, sizeof(prop));
524 property_get("persist.camera.avtimer.debug", prop, "0");
525 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800526 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700527
Thierry Strudel54dc9782017-02-15 12:12:10 -0800528 memset(prop, 0, sizeof(prop));
529 property_get("persist.camera.cacmode.disable", prop, "0");
530 m_cacModeDisabled = (uint8_t)atoi(prop);
531
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 //Load and read GPU library.
533 lib_surface_utils = NULL;
534 LINK_get_surface_pixel_alignment = NULL;
535 mSurfaceStridePadding = CAM_PAD_TO_32;
536 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
537 if (lib_surface_utils) {
538 *(void **)&LINK_get_surface_pixel_alignment =
539 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
540 if (LINK_get_surface_pixel_alignment) {
541 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
542 }
543 dlclose(lib_surface_utils);
544 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700545
546 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700547}
548
549/*===========================================================================
550 * FUNCTION : ~QCamera3HardwareInterface
551 *
552 * DESCRIPTION: destructor of QCamera3HardwareInterface
553 *
554 * PARAMETERS : none
555 *
556 * RETURN : none
557 *==========================================================================*/
558QCamera3HardwareInterface::~QCamera3HardwareInterface()
559{
560 LOGD("E");
561
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800562 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700563
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800564 // Disable power hint and enable the perf lock for close camera
565 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
566 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
567
568 // unlink of dualcam during close camera
569 if (mIsDeviceLinked) {
570 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
571 &m_pDualCamCmdPtr->bundle_info;
572 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
573 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
574 pthread_mutex_lock(&gCamLock);
575
576 if (mIsMainCamera == 1) {
577 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
578 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
579 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
580 // related session id should be session id of linked session
581 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
582 } else {
583 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
584 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
585 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
586 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
587 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800588 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800589 pthread_mutex_unlock(&gCamLock);
590
591 rc = mCameraHandle->ops->set_dual_cam_cmd(
592 mCameraHandle->camera_handle);
593 if (rc < 0) {
594 LOGE("Dualcam: Unlink failed, but still proceed to close");
595 }
596 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700597
598 /* We need to stop all streams before deleting any stream */
599 if (mRawDumpChannel) {
600 mRawDumpChannel->stop();
601 }
602
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700603 if (mHdrPlusRawSrcChannel) {
604 mHdrPlusRawSrcChannel->stop();
605 }
606
Thierry Strudel3d639192016-09-09 11:52:26 -0700607 // NOTE: 'camera3_stream_t *' objects are already freed at
608 // this stage by the framework
609 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
610 it != mStreamInfo.end(); it++) {
611 QCamera3ProcessingChannel *channel = (*it)->channel;
612 if (channel) {
613 channel->stop();
614 }
615 }
616 if (mSupportChannel)
617 mSupportChannel->stop();
618
619 if (mAnalysisChannel) {
620 mAnalysisChannel->stop();
621 }
622 if (mMetadataChannel) {
623 mMetadataChannel->stop();
624 }
625 if (mChannelHandle) {
626 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
627 mChannelHandle);
628 LOGD("stopping channel %d", mChannelHandle);
629 }
630
631 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
632 it != mStreamInfo.end(); it++) {
633 QCamera3ProcessingChannel *channel = (*it)->channel;
634 if (channel)
635 delete channel;
636 free (*it);
637 }
638 if (mSupportChannel) {
639 delete mSupportChannel;
640 mSupportChannel = NULL;
641 }
642
643 if (mAnalysisChannel) {
644 delete mAnalysisChannel;
645 mAnalysisChannel = NULL;
646 }
647 if (mRawDumpChannel) {
648 delete mRawDumpChannel;
649 mRawDumpChannel = NULL;
650 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 delete mHdrPlusRawSrcChannel;
653 mHdrPlusRawSrcChannel = NULL;
654 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 if (mDummyBatchChannel) {
656 delete mDummyBatchChannel;
657 mDummyBatchChannel = NULL;
658 }
659
660 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800661 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700662
663 if (mMetadataChannel) {
664 delete mMetadataChannel;
665 mMetadataChannel = NULL;
666 }
667
668 /* Clean up all channels */
669 if (mCameraInitialized) {
670 if(!mFirstConfiguration){
671 //send the last unconfigure
672 cam_stream_size_info_t stream_config_info;
673 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
674 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
675 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800676 m_bIs4KVideo ? 0 :
677 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700678 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700679 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
680 stream_config_info);
681 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
682 if (rc < 0) {
683 LOGE("set_parms failed for unconfigure");
684 }
685 }
686 deinitParameters();
687 }
688
689 if (mChannelHandle) {
690 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
691 mChannelHandle);
692 LOGH("deleting channel %d", mChannelHandle);
693 mChannelHandle = 0;
694 }
695
696 if (mState != CLOSED)
697 closeCamera();
698
699 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
700 req.mPendingBufferList.clear();
701 }
702 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 for (pendingRequestIterator i = mPendingRequestsList.begin();
704 i != mPendingRequestsList.end();) {
705 i = erasePendingRequest(i);
706 }
707 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
708 if (mDefaultMetadata[i])
709 free_camera_metadata(mDefaultMetadata[i]);
710
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800711 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712
713 pthread_cond_destroy(&mRequestCond);
714
715 pthread_cond_destroy(&mBuffersCond);
716
717 pthread_mutex_destroy(&mMutex);
718 LOGD("X");
719}
720
721/*===========================================================================
722 * FUNCTION : erasePendingRequest
723 *
724 * DESCRIPTION: function to erase a desired pending request after freeing any
725 * allocated memory
726 *
727 * PARAMETERS :
728 * @i : iterator pointing to pending request to be erased
729 *
730 * RETURN : iterator pointing to the next request
731 *==========================================================================*/
732QCamera3HardwareInterface::pendingRequestIterator
733 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
734{
735 if (i->input_buffer != NULL) {
736 free(i->input_buffer);
737 i->input_buffer = NULL;
738 }
739 if (i->settings != NULL)
740 free_camera_metadata((camera_metadata_t*)i->settings);
741 return mPendingRequestsList.erase(i);
742}
743
744/*===========================================================================
745 * FUNCTION : camEvtHandle
746 *
747 * DESCRIPTION: Function registered to mm-camera-interface to handle events
748 *
749 * PARAMETERS :
750 * @camera_handle : interface layer camera handle
751 * @evt : ptr to event
752 * @user_data : user data ptr
753 *
754 * RETURN : none
755 *==========================================================================*/
756void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
757 mm_camera_event_t *evt,
758 void *user_data)
759{
760 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
761 if (obj && evt) {
762 switch(evt->server_event_type) {
763 case CAM_EVENT_TYPE_DAEMON_DIED:
764 pthread_mutex_lock(&obj->mMutex);
765 obj->mState = ERROR;
766 pthread_mutex_unlock(&obj->mMutex);
767 LOGE("Fatal, camera daemon died");
768 break;
769
770 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
771 LOGD("HAL got request pull from Daemon");
772 pthread_mutex_lock(&obj->mMutex);
773 obj->mWokenUpByDaemon = true;
774 obj->unblockRequestIfNecessary();
775 pthread_mutex_unlock(&obj->mMutex);
776 break;
777
778 default:
779 LOGW("Warning: Unhandled event %d",
780 evt->server_event_type);
781 break;
782 }
783 } else {
784 LOGE("NULL user_data/evt");
785 }
786}
787
788/*===========================================================================
789 * FUNCTION : openCamera
790 *
791 * DESCRIPTION: open camera
792 *
793 * PARAMETERS :
794 * @hw_device : double ptr for camera device struct
795 *
796 * RETURN : int32_t type of status
797 * NO_ERROR -- success
798 * none-zero failure code
799 *==========================================================================*/
800int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
801{
802 int rc = 0;
803 if (mState != CLOSED) {
804 *hw_device = NULL;
805 return PERMISSION_DENIED;
806 }
807
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800808 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700809 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
810 mCameraId);
811
812 rc = openCamera();
813 if (rc == 0) {
814 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800815 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700816 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800817 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700818
Thierry Strudel3d639192016-09-09 11:52:26 -0700819 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
820 mCameraId, rc);
821
822 if (rc == NO_ERROR) {
823 mState = OPENED;
824 }
825 return rc;
826}
827
828/*===========================================================================
829 * FUNCTION : openCamera
830 *
831 * DESCRIPTION: open camera
832 *
833 * PARAMETERS : none
834 *
835 * RETURN : int32_t type of status
836 * NO_ERROR -- success
837 * none-zero failure code
838 *==========================================================================*/
839int QCamera3HardwareInterface::openCamera()
840{
841 int rc = 0;
842 char value[PROPERTY_VALUE_MAX];
843
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800844 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700845 if (mCameraHandle) {
846 LOGE("Failure: Camera already opened");
847 return ALREADY_EXISTS;
848 }
849
850 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
851 if (rc < 0) {
852 LOGE("Failed to reserve flash for camera id: %d",
853 mCameraId);
854 return UNKNOWN_ERROR;
855 }
856
857 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
858 if (rc) {
859 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
860 return rc;
861 }
862
863 if (!mCameraHandle) {
864 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
865 return -ENODEV;
866 }
867
868 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
869 camEvtHandle, (void *)this);
870
871 if (rc < 0) {
872 LOGE("Error, failed to register event callback");
873 /* Not closing camera here since it is already handled in destructor */
874 return FAILED_TRANSACTION;
875 }
876
877 mExifParams.debug_params =
878 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
879 if (mExifParams.debug_params) {
880 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
881 } else {
882 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
883 return NO_MEMORY;
884 }
885 mFirstConfiguration = true;
886
887 //Notify display HAL that a camera session is active.
888 //But avoid calling the same during bootup because camera service might open/close
889 //cameras at boot time during its initialization and display service will also internally
890 //wait for camera service to initialize first while calling this display API, resulting in a
891 //deadlock situation. Since boot time camera open/close calls are made only to fetch
892 //capabilities, no need of this display bw optimization.
893 //Use "service.bootanim.exit" property to know boot status.
894 property_get("service.bootanim.exit", value, "0");
895 if (atoi(value) == 1) {
896 pthread_mutex_lock(&gCamLock);
897 if (gNumCameraSessions++ == 0) {
898 setCameraLaunchStatus(true);
899 }
900 pthread_mutex_unlock(&gCamLock);
901 }
902
903 //fill the session id needed while linking dual cam
904 pthread_mutex_lock(&gCamLock);
905 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
906 &sessionId[mCameraId]);
907 pthread_mutex_unlock(&gCamLock);
908
909 if (rc < 0) {
910 LOGE("Error, failed to get sessiion id");
911 return UNKNOWN_ERROR;
912 } else {
913 //Allocate related cam sync buffer
914 //this is needed for the payload that goes along with bundling cmd for related
915 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700916 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
917 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700918 if(rc != OK) {
919 rc = NO_MEMORY;
920 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
921 return NO_MEMORY;
922 }
923
924 //Map memory for related cam sync buffer
925 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700926 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
927 m_pDualCamCmdHeap->getFd(0),
928 sizeof(cam_dual_camera_cmd_info_t),
929 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700930 if(rc < 0) {
931 LOGE("Dualcam: failed to map Related cam sync buffer");
932 rc = FAILED_TRANSACTION;
933 return NO_MEMORY;
934 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700935 m_pDualCamCmdPtr =
936 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700937 }
938
939 LOGH("mCameraId=%d",mCameraId);
940
Chien-Yu Chenee335912017-02-09 17:53:20 -0800941 // Create an HDR+ client instance.
942 // TODO: detect if Easel exists instead of property.
943 bool enableHdrPlus = property_get_bool("persist.camera.hdrplus.enable",
944 false);
945 ALOGD("%s: HDR+ in Camera HAL %s.", __FUNCTION__, enableHdrPlus ?
946 "enabled" : "disabled");
947 if (enableHdrPlus) {
948 mHdrPlusClient = std::make_shared<HdrPlusClient>();
949 mIsApInputUsedForHdrPlus =
950 property_get_bool("persist.camera.hdrplus.apinput", false);
951 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
952 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
953 }
954
Thierry Strudel3d639192016-09-09 11:52:26 -0700955 return NO_ERROR;
956}
957
958/*===========================================================================
959 * FUNCTION : closeCamera
960 *
961 * DESCRIPTION: close camera
962 *
963 * PARAMETERS : none
964 *
965 * RETURN : int32_t type of status
966 * NO_ERROR -- success
967 * none-zero failure code
968 *==========================================================================*/
969int QCamera3HardwareInterface::closeCamera()
970{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800971 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700972 int rc = NO_ERROR;
973 char value[PROPERTY_VALUE_MAX];
974
975 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
976 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700977
978 // unmap memory for related cam sync buffer
979 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800980 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700981 if (NULL != m_pDualCamCmdHeap) {
982 m_pDualCamCmdHeap->deallocate();
983 delete m_pDualCamCmdHeap;
984 m_pDualCamCmdHeap = NULL;
985 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700986 }
987
Thierry Strudel3d639192016-09-09 11:52:26 -0700988 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
989 mCameraHandle = NULL;
990
Chien-Yu Chenee335912017-02-09 17:53:20 -0800991 mHdrPlusClient = nullptr;
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700992
Thierry Strudel3d639192016-09-09 11:52:26 -0700993 //reset session id to some invalid id
994 pthread_mutex_lock(&gCamLock);
995 sessionId[mCameraId] = 0xDEADBEEF;
996 pthread_mutex_unlock(&gCamLock);
997
998 //Notify display HAL that there is no active camera session
999 //but avoid calling the same during bootup. Refer to openCamera
1000 //for more details.
1001 property_get("service.bootanim.exit", value, "0");
1002 if (atoi(value) == 1) {
1003 pthread_mutex_lock(&gCamLock);
1004 if (--gNumCameraSessions == 0) {
1005 setCameraLaunchStatus(false);
1006 }
1007 pthread_mutex_unlock(&gCamLock);
1008 }
1009
Thierry Strudel3d639192016-09-09 11:52:26 -07001010 if (mExifParams.debug_params) {
1011 free(mExifParams.debug_params);
1012 mExifParams.debug_params = NULL;
1013 }
1014 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1015 LOGW("Failed to release flash for camera id: %d",
1016 mCameraId);
1017 }
1018 mState = CLOSED;
1019 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1020 mCameraId, rc);
1021 return rc;
1022}
1023
1024/*===========================================================================
1025 * FUNCTION : initialize
1026 *
1027 * DESCRIPTION: Initialize frameworks callback functions
1028 *
1029 * PARAMETERS :
1030 * @callback_ops : callback function to frameworks
1031 *
1032 * RETURN :
1033 *
1034 *==========================================================================*/
1035int QCamera3HardwareInterface::initialize(
1036 const struct camera3_callback_ops *callback_ops)
1037{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001038 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001039 int rc;
1040
1041 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1042 pthread_mutex_lock(&mMutex);
1043
1044 // Validate current state
1045 switch (mState) {
1046 case OPENED:
1047 /* valid state */
1048 break;
1049 default:
1050 LOGE("Invalid state %d", mState);
1051 rc = -ENODEV;
1052 goto err1;
1053 }
1054
1055 rc = initParameters();
1056 if (rc < 0) {
1057 LOGE("initParamters failed %d", rc);
1058 goto err1;
1059 }
1060 mCallbackOps = callback_ops;
1061
1062 mChannelHandle = mCameraHandle->ops->add_channel(
1063 mCameraHandle->camera_handle, NULL, NULL, this);
1064 if (mChannelHandle == 0) {
1065 LOGE("add_channel failed");
1066 rc = -ENOMEM;
1067 pthread_mutex_unlock(&mMutex);
1068 return rc;
1069 }
1070
1071 pthread_mutex_unlock(&mMutex);
1072 mCameraInitialized = true;
1073 mState = INITIALIZED;
1074 LOGI("X");
1075 return 0;
1076
1077err1:
1078 pthread_mutex_unlock(&mMutex);
1079 return rc;
1080}
1081
1082/*===========================================================================
1083 * FUNCTION : validateStreamDimensions
1084 *
1085 * DESCRIPTION: Check if the configuration requested are those advertised
1086 *
1087 * PARAMETERS :
1088 * @stream_list : streams to be configured
1089 *
1090 * RETURN :
1091 *
1092 *==========================================================================*/
1093int QCamera3HardwareInterface::validateStreamDimensions(
1094 camera3_stream_configuration_t *streamList)
1095{
1096 int rc = NO_ERROR;
1097 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001098 uint32_t depthWidth =
1099 gCamCapability[mCameraId]->active_array_size.width;
1100 uint32_t depthHeight =
1101 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001102
1103 camera3_stream_t *inputStream = NULL;
1104 /*
1105 * Loop through all streams to find input stream if it exists*
1106 */
1107 for (size_t i = 0; i< streamList->num_streams; i++) {
1108 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1109 if (inputStream != NULL) {
1110 LOGE("Error, Multiple input streams requested");
1111 return -EINVAL;
1112 }
1113 inputStream = streamList->streams[i];
1114 }
1115 }
1116 /*
1117 * Loop through all streams requested in configuration
1118 * Check if unsupported sizes have been requested on any of them
1119 */
1120 for (size_t j = 0; j < streamList->num_streams; j++) {
1121 bool sizeFound = false;
1122 camera3_stream_t *newStream = streamList->streams[j];
1123
1124 uint32_t rotatedHeight = newStream->height;
1125 uint32_t rotatedWidth = newStream->width;
1126 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1127 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1128 rotatedHeight = newStream->width;
1129 rotatedWidth = newStream->height;
1130 }
1131
1132 /*
1133 * Sizes are different for each type of stream format check against
1134 * appropriate table.
1135 */
1136 switch (newStream->format) {
1137 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1138 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1139 case HAL_PIXEL_FORMAT_RAW10:
1140 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1141 for (size_t i = 0; i < count; i++) {
1142 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1143 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1144 sizeFound = true;
1145 break;
1146 }
1147 }
1148 break;
1149 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001150 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1151 //As per spec. depth cloud should be sample count / 16
1152 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1153 if ((depthSamplesCount == newStream->width) &&
1154 (1 == newStream->height)) {
1155 sizeFound = true;
1156 }
1157 break;
1158 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001159 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1160 /* Verify set size against generated sizes table */
1161 for (size_t i = 0; i < count; i++) {
1162 if (((int32_t)rotatedWidth ==
1163 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1164 ((int32_t)rotatedHeight ==
1165 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1166 sizeFound = true;
1167 break;
1168 }
1169 }
1170 break;
1171 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1172 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1173 default:
1174 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1175 || newStream->stream_type == CAMERA3_STREAM_INPUT
1176 || IS_USAGE_ZSL(newStream->usage)) {
1177 if (((int32_t)rotatedWidth ==
1178 gCamCapability[mCameraId]->active_array_size.width) &&
1179 ((int32_t)rotatedHeight ==
1180 gCamCapability[mCameraId]->active_array_size.height)) {
1181 sizeFound = true;
1182 break;
1183 }
1184 /* We could potentially break here to enforce ZSL stream
1185 * set from frameworks always is full active array size
1186 * but it is not clear from the spc if framework will always
1187 * follow that, also we have logic to override to full array
1188 * size, so keeping the logic lenient at the moment
1189 */
1190 }
1191 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1192 MAX_SIZES_CNT);
1193 for (size_t i = 0; i < count; i++) {
1194 if (((int32_t)rotatedWidth ==
1195 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1196 ((int32_t)rotatedHeight ==
1197 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1198 sizeFound = true;
1199 break;
1200 }
1201 }
1202 break;
1203 } /* End of switch(newStream->format) */
1204
1205 /* We error out even if a single stream has unsupported size set */
1206 if (!sizeFound) {
1207 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1208 rotatedWidth, rotatedHeight, newStream->format,
1209 gCamCapability[mCameraId]->active_array_size.width,
1210 gCamCapability[mCameraId]->active_array_size.height);
1211 rc = -EINVAL;
1212 break;
1213 }
1214 } /* End of for each stream */
1215 return rc;
1216}
1217
1218/*==============================================================================
1219 * FUNCTION : isSupportChannelNeeded
1220 *
1221 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1222 *
1223 * PARAMETERS :
1224 * @stream_list : streams to be configured
1225 * @stream_config_info : the config info for streams to be configured
1226 *
1227 * RETURN : Boolen true/false decision
1228 *
1229 *==========================================================================*/
1230bool QCamera3HardwareInterface::isSupportChannelNeeded(
1231 camera3_stream_configuration_t *streamList,
1232 cam_stream_size_info_t stream_config_info)
1233{
1234 uint32_t i;
1235 bool pprocRequested = false;
1236 /* Check for conditions where PProc pipeline does not have any streams*/
1237 for (i = 0; i < stream_config_info.num_streams; i++) {
1238 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1239 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1240 pprocRequested = true;
1241 break;
1242 }
1243 }
1244
1245 if (pprocRequested == false )
1246 return true;
1247
1248 /* Dummy stream needed if only raw or jpeg streams present */
1249 for (i = 0; i < streamList->num_streams; i++) {
1250 switch(streamList->streams[i]->format) {
1251 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1252 case HAL_PIXEL_FORMAT_RAW10:
1253 case HAL_PIXEL_FORMAT_RAW16:
1254 case HAL_PIXEL_FORMAT_BLOB:
1255 break;
1256 default:
1257 return false;
1258 }
1259 }
1260 return true;
1261}
1262
1263/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001264 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001265 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001266 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001267 *
1268 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001269 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001270 *
1271 * RETURN : int32_t type of status
1272 * NO_ERROR -- success
1273 * none-zero failure code
1274 *
1275 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001276int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001277{
1278 int32_t rc = NO_ERROR;
1279
1280 cam_dimension_t max_dim = {0, 0};
1281 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1282 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1283 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1284 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1285 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1286 }
1287
1288 clear_metadata_buffer(mParameters);
1289
1290 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1291 max_dim);
1292 if (rc != NO_ERROR) {
1293 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1294 return rc;
1295 }
1296
1297 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1298 if (rc != NO_ERROR) {
1299 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1300 return rc;
1301 }
1302
1303 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001304 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001305
1306 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1307 mParameters);
1308 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001309 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001310 return rc;
1311 }
1312
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001313 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001314 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1315 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1316 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1317 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1318 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001319
1320 return rc;
1321}
1322
1323/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001324 * FUNCTION : addToPPFeatureMask
1325 *
1326 * DESCRIPTION: add additional features to pp feature mask based on
1327 * stream type and usecase
1328 *
1329 * PARAMETERS :
1330 * @stream_format : stream type for feature mask
1331 * @stream_idx : stream idx within postprocess_mask list to change
1332 *
1333 * RETURN : NULL
1334 *
1335 *==========================================================================*/
1336void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1337 uint32_t stream_idx)
1338{
1339 char feature_mask_value[PROPERTY_VALUE_MAX];
1340 cam_feature_mask_t feature_mask;
1341 int args_converted;
1342 int property_len;
1343
1344 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001345#ifdef _LE_CAMERA_
1346 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1347 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1348 property_len = property_get("persist.camera.hal3.feature",
1349 feature_mask_value, swtnr_feature_mask_value);
1350#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001351 property_len = property_get("persist.camera.hal3.feature",
1352 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001353#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001354 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1355 (feature_mask_value[1] == 'x')) {
1356 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1357 } else {
1358 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1359 }
1360 if (1 != args_converted) {
1361 feature_mask = 0;
1362 LOGE("Wrong feature mask %s", feature_mask_value);
1363 return;
1364 }
1365
1366 switch (stream_format) {
1367 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1368 /* Add LLVD to pp feature mask only if video hint is enabled */
1369 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1370 mStreamConfigInfo.postprocess_mask[stream_idx]
1371 |= CAM_QTI_FEATURE_SW_TNR;
1372 LOGH("Added SW TNR to pp feature mask");
1373 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1374 mStreamConfigInfo.postprocess_mask[stream_idx]
1375 |= CAM_QCOM_FEATURE_LLVD;
1376 LOGH("Added LLVD SeeMore to pp feature mask");
1377 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001378 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1379 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1380 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1381 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001382 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1383 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1384 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1385 CAM_QTI_FEATURE_BINNING_CORRECTION;
1386 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001387 break;
1388 }
1389 default:
1390 break;
1391 }
1392 LOGD("PP feature mask %llx",
1393 mStreamConfigInfo.postprocess_mask[stream_idx]);
1394}
1395
1396/*==============================================================================
1397 * FUNCTION : updateFpsInPreviewBuffer
1398 *
1399 * DESCRIPTION: update FPS information in preview buffer.
1400 *
1401 * PARAMETERS :
1402 * @metadata : pointer to metadata buffer
1403 * @frame_number: frame_number to look for in pending buffer list
1404 *
1405 * RETURN : None
1406 *
1407 *==========================================================================*/
1408void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1409 uint32_t frame_number)
1410{
1411 // Mark all pending buffers for this particular request
1412 // with corresponding framerate information
1413 for (List<PendingBuffersInRequest>::iterator req =
1414 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1415 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1416 for(List<PendingBufferInfo>::iterator j =
1417 req->mPendingBufferList.begin();
1418 j != req->mPendingBufferList.end(); j++) {
1419 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1420 if ((req->frame_number == frame_number) &&
1421 (channel->getStreamTypeMask() &
1422 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1423 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1424 CAM_INTF_PARM_FPS_RANGE, metadata) {
1425 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1426 struct private_handle_t *priv_handle =
1427 (struct private_handle_t *)(*(j->buffer));
1428 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1429 }
1430 }
1431 }
1432 }
1433}
1434
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001435/*==============================================================================
1436 * FUNCTION : updateTimeStampInPendingBuffers
1437 *
1438 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1439 * of a frame number
1440 *
1441 * PARAMETERS :
1442 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1443 * @timestamp : timestamp to be set
1444 *
1445 * RETURN : None
1446 *
1447 *==========================================================================*/
1448void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1449 uint32_t frameNumber, nsecs_t timestamp)
1450{
1451 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1452 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1453 if (req->frame_number != frameNumber)
1454 continue;
1455
1456 for (auto k = req->mPendingBufferList.begin();
1457 k != req->mPendingBufferList.end(); k++ ) {
1458 struct private_handle_t *priv_handle =
1459 (struct private_handle_t *) (*(k->buffer));
1460 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1461 }
1462 }
1463 return;
1464}
1465
Thierry Strudel3d639192016-09-09 11:52:26 -07001466/*===========================================================================
1467 * FUNCTION : configureStreams
1468 *
1469 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1470 * and output streams.
1471 *
1472 * PARAMETERS :
1473 * @stream_list : streams to be configured
1474 *
1475 * RETURN :
1476 *
1477 *==========================================================================*/
1478int QCamera3HardwareInterface::configureStreams(
1479 camera3_stream_configuration_t *streamList)
1480{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001481 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 int rc = 0;
1483
1484 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001485 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001487 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001488
1489 return rc;
1490}
1491
1492/*===========================================================================
1493 * FUNCTION : configureStreamsPerfLocked
1494 *
1495 * DESCRIPTION: configureStreams while perfLock is held.
1496 *
1497 * PARAMETERS :
1498 * @stream_list : streams to be configured
1499 *
1500 * RETURN : int32_t type of status
1501 * NO_ERROR -- success
1502 * none-zero failure code
1503 *==========================================================================*/
1504int QCamera3HardwareInterface::configureStreamsPerfLocked(
1505 camera3_stream_configuration_t *streamList)
1506{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001507 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001508 int rc = 0;
1509
1510 // Sanity check stream_list
1511 if (streamList == NULL) {
1512 LOGE("NULL stream configuration");
1513 return BAD_VALUE;
1514 }
1515 if (streamList->streams == NULL) {
1516 LOGE("NULL stream list");
1517 return BAD_VALUE;
1518 }
1519
1520 if (streamList->num_streams < 1) {
1521 LOGE("Bad number of streams requested: %d",
1522 streamList->num_streams);
1523 return BAD_VALUE;
1524 }
1525
1526 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1527 LOGE("Maximum number of streams %d exceeded: %d",
1528 MAX_NUM_STREAMS, streamList->num_streams);
1529 return BAD_VALUE;
1530 }
1531
1532 mOpMode = streamList->operation_mode;
1533 LOGD("mOpMode: %d", mOpMode);
1534
1535 /* first invalidate all the steams in the mStreamList
1536 * if they appear again, they will be validated */
1537 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1538 it != mStreamInfo.end(); it++) {
1539 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1540 if (channel) {
1541 channel->stop();
1542 }
1543 (*it)->status = INVALID;
1544 }
1545
1546 if (mRawDumpChannel) {
1547 mRawDumpChannel->stop();
1548 delete mRawDumpChannel;
1549 mRawDumpChannel = NULL;
1550 }
1551
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001552 if (mHdrPlusRawSrcChannel) {
1553 mHdrPlusRawSrcChannel->stop();
1554 delete mHdrPlusRawSrcChannel;
1555 mHdrPlusRawSrcChannel = NULL;
1556 }
1557
Thierry Strudel3d639192016-09-09 11:52:26 -07001558 if (mSupportChannel)
1559 mSupportChannel->stop();
1560
1561 if (mAnalysisChannel) {
1562 mAnalysisChannel->stop();
1563 }
1564 if (mMetadataChannel) {
1565 /* If content of mStreamInfo is not 0, there is metadata stream */
1566 mMetadataChannel->stop();
1567 }
1568 if (mChannelHandle) {
1569 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1570 mChannelHandle);
1571 LOGD("stopping channel %d", mChannelHandle);
1572 }
1573
1574 pthread_mutex_lock(&mMutex);
1575
1576 // Check state
1577 switch (mState) {
1578 case INITIALIZED:
1579 case CONFIGURED:
1580 case STARTED:
1581 /* valid state */
1582 break;
1583 default:
1584 LOGE("Invalid state %d", mState);
1585 pthread_mutex_unlock(&mMutex);
1586 return -ENODEV;
1587 }
1588
1589 /* Check whether we have video stream */
1590 m_bIs4KVideo = false;
1591 m_bIsVideo = false;
1592 m_bEisSupportedSize = false;
1593 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001594 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001595 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001596 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 uint32_t videoWidth = 0U;
1598 uint32_t videoHeight = 0U;
1599 size_t rawStreamCnt = 0;
1600 size_t stallStreamCnt = 0;
1601 size_t processedStreamCnt = 0;
1602 // Number of streams on ISP encoder path
1603 size_t numStreamsOnEncoder = 0;
1604 size_t numYuv888OnEncoder = 0;
1605 bool bYuv888OverrideJpeg = false;
1606 cam_dimension_t largeYuv888Size = {0, 0};
1607 cam_dimension_t maxViewfinderSize = {0, 0};
1608 bool bJpegExceeds4K = false;
1609 bool bJpegOnEncoder = false;
1610 bool bUseCommonFeatureMask = false;
1611 cam_feature_mask_t commonFeatureMask = 0;
1612 bool bSmallJpegSize = false;
1613 uint32_t width_ratio;
1614 uint32_t height_ratio;
1615 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1616 camera3_stream_t *inputStream = NULL;
1617 bool isJpeg = false;
1618 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001619 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001620
1621 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1622
1623 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001624 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001625 uint8_t eis_prop_set;
1626 uint32_t maxEisWidth = 0;
1627 uint32_t maxEisHeight = 0;
1628
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001629 // Initialize all instant AEC related variables
1630 mInstantAEC = false;
1631 mResetInstantAEC = false;
1632 mInstantAECSettledFrameNumber = 0;
1633 mAecSkipDisplayFrameBound = 0;
1634 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001635 mCurrFeatureState = 0;
1636 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001637
Thierry Strudel3d639192016-09-09 11:52:26 -07001638 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1639
1640 size_t count = IS_TYPE_MAX;
1641 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1642 for (size_t i = 0; i < count; i++) {
1643 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001644 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1645 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001646 break;
1647 }
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 count = CAM_OPT_STAB_MAX;
1650 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1651 for (size_t i = 0; i < count; i++) {
1652 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1653 oisSupported = true;
1654 break;
1655 }
1656 }
1657
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001658 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001659 maxEisWidth = MAX_EIS_WIDTH;
1660 maxEisHeight = MAX_EIS_HEIGHT;
1661 }
1662
1663 /* EIS setprop control */
1664 char eis_prop[PROPERTY_VALUE_MAX];
1665 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001666 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001667 eis_prop_set = (uint8_t)atoi(eis_prop);
1668
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001669 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001670 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1671
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001672 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1673 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1674
Thierry Strudel3d639192016-09-09 11:52:26 -07001675 /* stream configurations */
1676 for (size_t i = 0; i < streamList->num_streams; i++) {
1677 camera3_stream_t *newStream = streamList->streams[i];
1678 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1679 "height = %d, rotation = %d, usage = 0x%x",
1680 i, newStream->stream_type, newStream->format,
1681 newStream->width, newStream->height, newStream->rotation,
1682 newStream->usage);
1683 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1684 newStream->stream_type == CAMERA3_STREAM_INPUT){
1685 isZsl = true;
1686 }
1687 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1688 inputStream = newStream;
1689 }
1690
Emilian Peev7650c122017-01-19 08:24:33 -08001691 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1692 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001693 isJpeg = true;
1694 jpegSize.width = newStream->width;
1695 jpegSize.height = newStream->height;
1696 if (newStream->width > VIDEO_4K_WIDTH ||
1697 newStream->height > VIDEO_4K_HEIGHT)
1698 bJpegExceeds4K = true;
1699 }
1700
1701 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1702 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1703 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001704 // In HAL3 we can have multiple different video streams.
1705 // The variables video width and height are used below as
1706 // dimensions of the biggest of them
1707 if (videoWidth < newStream->width ||
1708 videoHeight < newStream->height) {
1709 videoWidth = newStream->width;
1710 videoHeight = newStream->height;
1711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001712 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1713 (VIDEO_4K_HEIGHT <= newStream->height)) {
1714 m_bIs4KVideo = true;
1715 }
1716 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1717 (newStream->height <= maxEisHeight);
1718 }
1719 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1720 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1721 switch (newStream->format) {
1722 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001723 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1724 depthPresent = true;
1725 break;
1726 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001727 stallStreamCnt++;
1728 if (isOnEncoder(maxViewfinderSize, newStream->width,
1729 newStream->height)) {
1730 numStreamsOnEncoder++;
1731 bJpegOnEncoder = true;
1732 }
1733 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1734 newStream->width);
1735 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1736 newStream->height);;
1737 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1738 "FATAL: max_downscale_factor cannot be zero and so assert");
1739 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1740 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1741 LOGH("Setting small jpeg size flag to true");
1742 bSmallJpegSize = true;
1743 }
1744 break;
1745 case HAL_PIXEL_FORMAT_RAW10:
1746 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1747 case HAL_PIXEL_FORMAT_RAW16:
1748 rawStreamCnt++;
1749 break;
1750 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1751 processedStreamCnt++;
1752 if (isOnEncoder(maxViewfinderSize, newStream->width,
1753 newStream->height)) {
1754 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1755 !IS_USAGE_ZSL(newStream->usage)) {
1756 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1757 }
1758 numStreamsOnEncoder++;
1759 }
1760 break;
1761 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1762 processedStreamCnt++;
1763 if (isOnEncoder(maxViewfinderSize, newStream->width,
1764 newStream->height)) {
1765 // If Yuv888 size is not greater than 4K, set feature mask
1766 // to SUPERSET so that it support concurrent request on
1767 // YUV and JPEG.
1768 if (newStream->width <= VIDEO_4K_WIDTH &&
1769 newStream->height <= VIDEO_4K_HEIGHT) {
1770 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1771 }
1772 numStreamsOnEncoder++;
1773 numYuv888OnEncoder++;
1774 largeYuv888Size.width = newStream->width;
1775 largeYuv888Size.height = newStream->height;
1776 }
1777 break;
1778 default:
1779 processedStreamCnt++;
1780 if (isOnEncoder(maxViewfinderSize, newStream->width,
1781 newStream->height)) {
1782 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1783 numStreamsOnEncoder++;
1784 }
1785 break;
1786 }
1787
1788 }
1789 }
1790
1791 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1792 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1793 !m_bIsVideo) {
1794 m_bEisEnable = false;
1795 }
1796
Thierry Strudel54dc9782017-02-15 12:12:10 -08001797 uint8_t forceEnableTnr = 0;
1798 char tnr_prop[PROPERTY_VALUE_MAX];
1799 memset(tnr_prop, 0, sizeof(tnr_prop));
1800 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1801 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1802
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 /* Logic to enable/disable TNR based on specific config size/etc.*/
1804 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1805 ((videoWidth == 1920 && videoHeight == 1080) ||
1806 (videoWidth == 1280 && videoHeight == 720)) &&
1807 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1808 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001809 else if (forceEnableTnr)
1810 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001811
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001812 char videoHdrProp[PROPERTY_VALUE_MAX];
1813 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1814 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1815 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1816
1817 if (hdr_mode_prop == 1 && m_bIsVideo &&
1818 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1819 m_bVideoHdrEnabled = true;
1820 else
1821 m_bVideoHdrEnabled = false;
1822
1823
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 /* Check if num_streams is sane */
1825 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1826 rawStreamCnt > MAX_RAW_STREAMS ||
1827 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1828 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1829 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1830 pthread_mutex_unlock(&mMutex);
1831 return -EINVAL;
1832 }
1833 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001834 if (isZsl && m_bIs4KVideo) {
1835 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001836 pthread_mutex_unlock(&mMutex);
1837 return -EINVAL;
1838 }
1839 /* Check if stream sizes are sane */
1840 if (numStreamsOnEncoder > 2) {
1841 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1842 pthread_mutex_unlock(&mMutex);
1843 return -EINVAL;
1844 } else if (1 < numStreamsOnEncoder){
1845 bUseCommonFeatureMask = true;
1846 LOGH("Multiple streams above max viewfinder size, common mask needed");
1847 }
1848
1849 /* Check if BLOB size is greater than 4k in 4k recording case */
1850 if (m_bIs4KVideo && bJpegExceeds4K) {
1851 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1852 pthread_mutex_unlock(&mMutex);
1853 return -EINVAL;
1854 }
1855
Emilian Peev7650c122017-01-19 08:24:33 -08001856 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1857 depthPresent) {
1858 LOGE("HAL doesn't support depth streams in HFR mode!");
1859 pthread_mutex_unlock(&mMutex);
1860 return -EINVAL;
1861 }
1862
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1864 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1865 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1866 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1867 // configurations:
1868 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1869 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1870 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1871 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1872 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1873 __func__);
1874 pthread_mutex_unlock(&mMutex);
1875 return -EINVAL;
1876 }
1877
1878 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1879 // the YUV stream's size is greater or equal to the JPEG size, set common
1880 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1881 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1882 jpegSize.width, jpegSize.height) &&
1883 largeYuv888Size.width > jpegSize.width &&
1884 largeYuv888Size.height > jpegSize.height) {
1885 bYuv888OverrideJpeg = true;
1886 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1887 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1888 }
1889
1890 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1891 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1892 commonFeatureMask);
1893 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1894 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1895
1896 rc = validateStreamDimensions(streamList);
1897 if (rc == NO_ERROR) {
1898 rc = validateStreamRotations(streamList);
1899 }
1900 if (rc != NO_ERROR) {
1901 LOGE("Invalid stream configuration requested!");
1902 pthread_mutex_unlock(&mMutex);
1903 return rc;
1904 }
1905
1906 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1907 for (size_t i = 0; i < streamList->num_streams; i++) {
1908 camera3_stream_t *newStream = streamList->streams[i];
1909 LOGH("newStream type = %d, stream format = %d "
1910 "stream size : %d x %d, stream rotation = %d",
1911 newStream->stream_type, newStream->format,
1912 newStream->width, newStream->height, newStream->rotation);
1913 //if the stream is in the mStreamList validate it
1914 bool stream_exists = false;
1915 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1916 it != mStreamInfo.end(); it++) {
1917 if ((*it)->stream == newStream) {
1918 QCamera3ProcessingChannel *channel =
1919 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1920 stream_exists = true;
1921 if (channel)
1922 delete channel;
1923 (*it)->status = VALID;
1924 (*it)->stream->priv = NULL;
1925 (*it)->channel = NULL;
1926 }
1927 }
1928 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1929 //new stream
1930 stream_info_t* stream_info;
1931 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1932 if (!stream_info) {
1933 LOGE("Could not allocate stream info");
1934 rc = -ENOMEM;
1935 pthread_mutex_unlock(&mMutex);
1936 return rc;
1937 }
1938 stream_info->stream = newStream;
1939 stream_info->status = VALID;
1940 stream_info->channel = NULL;
1941 mStreamInfo.push_back(stream_info);
1942 }
1943 /* Covers Opaque ZSL and API1 F/W ZSL */
1944 if (IS_USAGE_ZSL(newStream->usage)
1945 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1946 if (zslStream != NULL) {
1947 LOGE("Multiple input/reprocess streams requested!");
1948 pthread_mutex_unlock(&mMutex);
1949 return BAD_VALUE;
1950 }
1951 zslStream = newStream;
1952 }
1953 /* Covers YUV reprocess */
1954 if (inputStream != NULL) {
1955 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1956 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1957 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1958 && inputStream->width == newStream->width
1959 && inputStream->height == newStream->height) {
1960 if (zslStream != NULL) {
1961 /* This scenario indicates multiple YUV streams with same size
1962 * as input stream have been requested, since zsl stream handle
1963 * is solely use for the purpose of overriding the size of streams
1964 * which share h/w streams we will just make a guess here as to
1965 * which of the stream is a ZSL stream, this will be refactored
1966 * once we make generic logic for streams sharing encoder output
1967 */
1968 LOGH("Warning, Multiple ip/reprocess streams requested!");
1969 }
1970 zslStream = newStream;
1971 }
1972 }
1973 }
1974
1975 /* If a zsl stream is set, we know that we have configured at least one input or
1976 bidirectional stream */
1977 if (NULL != zslStream) {
1978 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1979 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1980 mInputStreamInfo.format = zslStream->format;
1981 mInputStreamInfo.usage = zslStream->usage;
1982 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1983 mInputStreamInfo.dim.width,
1984 mInputStreamInfo.dim.height,
1985 mInputStreamInfo.format, mInputStreamInfo.usage);
1986 }
1987
1988 cleanAndSortStreamInfo();
1989 if (mMetadataChannel) {
1990 delete mMetadataChannel;
1991 mMetadataChannel = NULL;
1992 }
1993 if (mSupportChannel) {
1994 delete mSupportChannel;
1995 mSupportChannel = NULL;
1996 }
1997
1998 if (mAnalysisChannel) {
1999 delete mAnalysisChannel;
2000 mAnalysisChannel = NULL;
2001 }
2002
2003 if (mDummyBatchChannel) {
2004 delete mDummyBatchChannel;
2005 mDummyBatchChannel = NULL;
2006 }
2007
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (mDepthChannel) {
2009 mDepthChannel = NULL;
2010 }
2011
Thierry Strudel2896d122017-02-23 19:18:03 -08002012 char is_type_value[PROPERTY_VALUE_MAX];
2013 property_get("persist.camera.is_type", is_type_value, "4");
2014 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2015
Thierry Strudel3d639192016-09-09 11:52:26 -07002016 //Create metadata channel and initialize it
2017 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2018 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2019 gCamCapability[mCameraId]->color_arrangement);
2020 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2021 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002022 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002023 if (mMetadataChannel == NULL) {
2024 LOGE("failed to allocate metadata channel");
2025 rc = -ENOMEM;
2026 pthread_mutex_unlock(&mMutex);
2027 return rc;
2028 }
2029 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2030 if (rc < 0) {
2031 LOGE("metadata channel initialization failed");
2032 delete mMetadataChannel;
2033 mMetadataChannel = NULL;
2034 pthread_mutex_unlock(&mMutex);
2035 return rc;
2036 }
2037
Thierry Strudel2896d122017-02-23 19:18:03 -08002038 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002039 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002040 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002041 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2042 /* Allocate channel objects for the requested streams */
2043 for (size_t i = 0; i < streamList->num_streams; i++) {
2044 camera3_stream_t *newStream = streamList->streams[i];
2045 uint32_t stream_usage = newStream->usage;
2046 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2047 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2048 struct camera_info *p_info = NULL;
2049 pthread_mutex_lock(&gCamLock);
2050 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2051 pthread_mutex_unlock(&gCamLock);
2052 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2053 || IS_USAGE_ZSL(newStream->usage)) &&
2054 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002055 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002056 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002057 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2058 if (bUseCommonFeatureMask)
2059 zsl_ppmask = commonFeatureMask;
2060 else
2061 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002062 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002063 if (numStreamsOnEncoder > 0)
2064 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2065 else
2066 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002067 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002068 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002069 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002070 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002071 LOGH("Input stream configured, reprocess config");
2072 } else {
2073 //for non zsl streams find out the format
2074 switch (newStream->format) {
2075 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2076 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002077 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2079 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2080 /* add additional features to pp feature mask */
2081 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2082 mStreamConfigInfo.num_streams);
2083
2084 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2085 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2086 CAM_STREAM_TYPE_VIDEO;
2087 if (m_bTnrEnabled && m_bTnrVideo) {
2088 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2089 CAM_QCOM_FEATURE_CPP_TNR;
2090 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2091 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2092 ~CAM_QCOM_FEATURE_CDS;
2093 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002094 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2095 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2096 CAM_QTI_FEATURE_PPEISCORE;
2097 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002098 } else {
2099 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2100 CAM_STREAM_TYPE_PREVIEW;
2101 if (m_bTnrEnabled && m_bTnrPreview) {
2102 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2103 CAM_QCOM_FEATURE_CPP_TNR;
2104 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2105 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2106 ~CAM_QCOM_FEATURE_CDS;
2107 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002108 if(!m_bSwTnrPreview) {
2109 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2110 ~CAM_QTI_FEATURE_SW_TNR;
2111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002112 padding_info.width_padding = mSurfaceStridePadding;
2113 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002114 previewSize.width = (int32_t)newStream->width;
2115 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 }
2117 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2118 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2119 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2120 newStream->height;
2121 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2122 newStream->width;
2123 }
2124 }
2125 break;
2126 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002127 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2129 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2130 if (bUseCommonFeatureMask)
2131 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2132 commonFeatureMask;
2133 else
2134 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2135 CAM_QCOM_FEATURE_NONE;
2136 } else {
2137 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2138 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2139 }
2140 break;
2141 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002142 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002143 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2144 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2145 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2146 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2147 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002148 /* Remove rotation if it is not supported
2149 for 4K LiveVideo snapshot case (online processing) */
2150 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2151 CAM_QCOM_FEATURE_ROTATION)) {
2152 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2153 &= ~CAM_QCOM_FEATURE_ROTATION;
2154 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 } else {
2156 if (bUseCommonFeatureMask &&
2157 isOnEncoder(maxViewfinderSize, newStream->width,
2158 newStream->height)) {
2159 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2160 } else {
2161 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2162 }
2163 }
2164 if (isZsl) {
2165 if (zslStream) {
2166 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2167 (int32_t)zslStream->width;
2168 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2169 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002170 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2171 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002172 } else {
2173 LOGE("Error, No ZSL stream identified");
2174 pthread_mutex_unlock(&mMutex);
2175 return -EINVAL;
2176 }
2177 } else if (m_bIs4KVideo) {
2178 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2179 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2180 } else if (bYuv888OverrideJpeg) {
2181 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2182 (int32_t)largeYuv888Size.width;
2183 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2184 (int32_t)largeYuv888Size.height;
2185 }
2186 break;
2187 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2188 case HAL_PIXEL_FORMAT_RAW16:
2189 case HAL_PIXEL_FORMAT_RAW10:
2190 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2191 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2192 isRawStreamRequested = true;
2193 break;
2194 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002195 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002196 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2197 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2198 break;
2199 }
2200 }
2201
2202 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2203 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2204 gCamCapability[mCameraId]->color_arrangement);
2205
2206 if (newStream->priv == NULL) {
2207 //New stream, construct channel
2208 switch (newStream->stream_type) {
2209 case CAMERA3_STREAM_INPUT:
2210 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2211 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2212 break;
2213 case CAMERA3_STREAM_BIDIRECTIONAL:
2214 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2215 GRALLOC_USAGE_HW_CAMERA_WRITE;
2216 break;
2217 case CAMERA3_STREAM_OUTPUT:
2218 /* For video encoding stream, set read/write rarely
2219 * flag so that they may be set to un-cached */
2220 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2221 newStream->usage |=
2222 (GRALLOC_USAGE_SW_READ_RARELY |
2223 GRALLOC_USAGE_SW_WRITE_RARELY |
2224 GRALLOC_USAGE_HW_CAMERA_WRITE);
2225 else if (IS_USAGE_ZSL(newStream->usage))
2226 {
2227 LOGD("ZSL usage flag skipping");
2228 }
2229 else if (newStream == zslStream
2230 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2231 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2232 } else
2233 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2234 break;
2235 default:
2236 LOGE("Invalid stream_type %d", newStream->stream_type);
2237 break;
2238 }
2239
2240 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2241 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2242 QCamera3ProcessingChannel *channel = NULL;
2243 switch (newStream->format) {
2244 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2245 if ((newStream->usage &
2246 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2247 (streamList->operation_mode ==
2248 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2249 ) {
2250 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2251 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002252 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002253 this,
2254 newStream,
2255 (cam_stream_type_t)
2256 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2257 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2258 mMetadataChannel,
2259 0); //heap buffers are not required for HFR video channel
2260 if (channel == NULL) {
2261 LOGE("allocation of channel failed");
2262 pthread_mutex_unlock(&mMutex);
2263 return -ENOMEM;
2264 }
2265 //channel->getNumBuffers() will return 0 here so use
2266 //MAX_INFLIGH_HFR_REQUESTS
2267 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2268 newStream->priv = channel;
2269 LOGI("num video buffers in HFR mode: %d",
2270 MAX_INFLIGHT_HFR_REQUESTS);
2271 } else {
2272 /* Copy stream contents in HFR preview only case to create
2273 * dummy batch channel so that sensor streaming is in
2274 * HFR mode */
2275 if (!m_bIsVideo && (streamList->operation_mode ==
2276 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2277 mDummyBatchStream = *newStream;
2278 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002279 int bufferCount = MAX_INFLIGHT_REQUESTS;
2280 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2281 CAM_STREAM_TYPE_VIDEO) {
2282 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2283 bufferCount = MAX_VIDEO_BUFFERS;
2284 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002285 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2286 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002287 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 this,
2289 newStream,
2290 (cam_stream_type_t)
2291 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2292 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2293 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002294 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002295 if (channel == NULL) {
2296 LOGE("allocation of channel failed");
2297 pthread_mutex_unlock(&mMutex);
2298 return -ENOMEM;
2299 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002300 /* disable UBWC for preview, though supported,
2301 * to take advantage of CPP duplication */
2302 if (m_bIsVideo && (!mCommon.isVideoUBWCEnabled()) &&
2303 (previewSize.width == (int32_t)videoWidth)&&
2304 (previewSize.height == (int32_t)videoHeight)){
2305 channel->setUBWCEnabled(false);
2306 }else {
2307 channel->setUBWCEnabled(true);
2308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002309 newStream->max_buffers = channel->getNumBuffers();
2310 newStream->priv = channel;
2311 }
2312 break;
2313 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2314 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2315 mChannelHandle,
2316 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002317 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002318 this,
2319 newStream,
2320 (cam_stream_type_t)
2321 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2322 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2323 mMetadataChannel);
2324 if (channel == NULL) {
2325 LOGE("allocation of YUV channel failed");
2326 pthread_mutex_unlock(&mMutex);
2327 return -ENOMEM;
2328 }
2329 newStream->max_buffers = channel->getNumBuffers();
2330 newStream->priv = channel;
2331 break;
2332 }
2333 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2334 case HAL_PIXEL_FORMAT_RAW16:
2335 case HAL_PIXEL_FORMAT_RAW10:
2336 mRawChannel = new QCamera3RawChannel(
2337 mCameraHandle->camera_handle, mChannelHandle,
2338 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002339 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 this, newStream,
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2342 mMetadataChannel,
2343 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2344 if (mRawChannel == NULL) {
2345 LOGE("allocation of raw channel failed");
2346 pthread_mutex_unlock(&mMutex);
2347 return -ENOMEM;
2348 }
2349 newStream->max_buffers = mRawChannel->getNumBuffers();
2350 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2351 break;
2352 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002353 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2354 mDepthChannel = new QCamera3DepthChannel(
2355 mCameraHandle->camera_handle, mChannelHandle,
2356 mCameraHandle->ops, NULL, NULL, &padding_info,
2357 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2358 mMetadataChannel);
2359 if (NULL == mDepthChannel) {
2360 LOGE("Allocation of depth channel failed");
2361 pthread_mutex_unlock(&mMutex);
2362 return NO_MEMORY;
2363 }
2364 newStream->priv = mDepthChannel;
2365 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2366 } else {
2367 // Max live snapshot inflight buffer is 1. This is to mitigate
2368 // frame drop issues for video snapshot. The more buffers being
2369 // allocated, the more frame drops there are.
2370 mPictureChannel = new QCamera3PicChannel(
2371 mCameraHandle->camera_handle, mChannelHandle,
2372 mCameraHandle->ops, captureResultCb,
2373 setBufferErrorStatus, &padding_info, this, newStream,
2374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2375 m_bIs4KVideo, isZsl, mMetadataChannel,
2376 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2377 if (mPictureChannel == NULL) {
2378 LOGE("allocation of channel failed");
2379 pthread_mutex_unlock(&mMutex);
2380 return -ENOMEM;
2381 }
2382 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2383 newStream->max_buffers = mPictureChannel->getNumBuffers();
2384 mPictureChannel->overrideYuvSize(
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2386 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002388 break;
2389
2390 default:
2391 LOGE("not a supported format 0x%x", newStream->format);
2392 break;
2393 }
2394 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2395 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2396 } else {
2397 LOGE("Error, Unknown stream type");
2398 pthread_mutex_unlock(&mMutex);
2399 return -EINVAL;
2400 }
2401
2402 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2403 if (channel != NULL && channel->isUBWCEnabled()) {
2404 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2406 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2408 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2409 }
2410 }
2411
2412 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2413 it != mStreamInfo.end(); it++) {
2414 if ((*it)->stream == newStream) {
2415 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2416 break;
2417 }
2418 }
2419 } else {
2420 // Channel already exists for this stream
2421 // Do nothing for now
2422 }
2423 padding_info = gCamCapability[mCameraId]->padding_info;
2424
Emilian Peev7650c122017-01-19 08:24:33 -08002425 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 * since there is no real stream associated with it
2427 */
Emilian Peev7650c122017-01-19 08:24:33 -08002428 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2429 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002431 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002432 }
2433
Thierry Strudel2896d122017-02-23 19:18:03 -08002434 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2435 onlyRaw = false;
2436 }
2437
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002438 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002439 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002440 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2441 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2442 gCamCapability[mCameraId]->color_arrangement);
2443 cam_analysis_info_t analysisInfo;
2444 int32_t ret = NO_ERROR;
2445 ret = mCommon.getAnalysisInfo(
2446 FALSE,
2447 analysisFeatureMask,
2448 &analysisInfo);
2449 if (ret == NO_ERROR) {
2450 cam_dimension_t analysisDim;
2451 analysisDim = mCommon.getMatchingDimension(previewSize,
2452 analysisInfo.analysis_recommended_res);
2453
2454 mAnalysisChannel = new QCamera3SupportChannel(
2455 mCameraHandle->camera_handle,
2456 mChannelHandle,
2457 mCameraHandle->ops,
2458 &analysisInfo.analysis_padding_info,
2459 analysisFeatureMask,
2460 CAM_STREAM_TYPE_ANALYSIS,
2461 &analysisDim,
2462 (analysisInfo.analysis_format
2463 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2464 : CAM_FORMAT_YUV_420_NV21),
2465 analysisInfo.hw_analysis_supported,
2466 gCamCapability[mCameraId]->color_arrangement,
2467 this,
2468 0); // force buffer count to 0
2469 } else {
2470 LOGW("getAnalysisInfo failed, ret = %d", ret);
2471 }
2472 if (!mAnalysisChannel) {
2473 LOGW("Analysis channel cannot be created");
2474 }
2475 }
2476
Thierry Strudel3d639192016-09-09 11:52:26 -07002477 //RAW DUMP channel
2478 if (mEnableRawDump && isRawStreamRequested == false){
2479 cam_dimension_t rawDumpSize;
2480 rawDumpSize = getMaxRawSize(mCameraId);
2481 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2482 setPAAFSupport(rawDumpFeatureMask,
2483 CAM_STREAM_TYPE_RAW,
2484 gCamCapability[mCameraId]->color_arrangement);
2485 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2486 mChannelHandle,
2487 mCameraHandle->ops,
2488 rawDumpSize,
2489 &padding_info,
2490 this, rawDumpFeatureMask);
2491 if (!mRawDumpChannel) {
2492 LOGE("Raw Dump channel cannot be created");
2493 pthread_mutex_unlock(&mMutex);
2494 return -ENOMEM;
2495 }
2496 }
2497
Chien-Yu Chenee335912017-02-09 17:53:20 -08002498 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
2499 if (mHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002500 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002501 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2502 "HDR+ RAW source channel is not created.",
2503 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002504 } else {
2505 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2506 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2507 setPAAFSupport(hdrPlusRawFeatureMask,
2508 CAM_STREAM_TYPE_RAW,
2509 gCamCapability[mCameraId]->color_arrangement);
2510 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2511 mChannelHandle,
2512 mCameraHandle->ops,
2513 rawSize,
2514 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002515 this, hdrPlusRawFeatureMask,
2516 mHdrPlusClient,
2517 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002518 if (!mHdrPlusRawSrcChannel) {
2519 LOGE("HDR+ Raw Source channel cannot be created");
2520 pthread_mutex_unlock(&mMutex);
2521 return -ENOMEM;
2522 }
2523 }
2524 }
2525
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 if (mAnalysisChannel) {
2527 cam_analysis_info_t analysisInfo;
2528 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2529 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2530 CAM_STREAM_TYPE_ANALYSIS;
2531 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2532 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2533 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2534 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2535 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002536 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2538 &analysisInfo);
2539 if (rc != NO_ERROR) {
2540 LOGE("getAnalysisInfo failed, ret = %d", rc);
2541 pthread_mutex_unlock(&mMutex);
2542 return rc;
2543 }
2544 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002545 mCommon.getMatchingDimension(previewSize,
2546 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002547 mStreamConfigInfo.num_streams++;
2548 }
2549
Thierry Strudel2896d122017-02-23 19:18:03 -08002550 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002551 cam_analysis_info_t supportInfo;
2552 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2553 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2554 setPAAFSupport(callbackFeatureMask,
2555 CAM_STREAM_TYPE_CALLBACK,
2556 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002557 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002558 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002559 if (ret != NO_ERROR) {
2560 /* Ignore the error for Mono camera
2561 * because the PAAF bit mask is only set
2562 * for CAM_STREAM_TYPE_ANALYSIS stream type
2563 */
2564 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2565 LOGW("getAnalysisInfo failed, ret = %d", ret);
2566 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002567 }
2568 mSupportChannel = new QCamera3SupportChannel(
2569 mCameraHandle->camera_handle,
2570 mChannelHandle,
2571 mCameraHandle->ops,
2572 &gCamCapability[mCameraId]->padding_info,
2573 callbackFeatureMask,
2574 CAM_STREAM_TYPE_CALLBACK,
2575 &QCamera3SupportChannel::kDim,
2576 CAM_FORMAT_YUV_420_NV21,
2577 supportInfo.hw_analysis_supported,
2578 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002579 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002580 if (!mSupportChannel) {
2581 LOGE("dummy channel cannot be created");
2582 pthread_mutex_unlock(&mMutex);
2583 return -ENOMEM;
2584 }
2585 }
2586
2587 if (mSupportChannel) {
2588 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2589 QCamera3SupportChannel::kDim;
2590 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2591 CAM_STREAM_TYPE_CALLBACK;
2592 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2593 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2594 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2595 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2596 gCamCapability[mCameraId]->color_arrangement);
2597 mStreamConfigInfo.num_streams++;
2598 }
2599
2600 if (mRawDumpChannel) {
2601 cam_dimension_t rawSize;
2602 rawSize = getMaxRawSize(mCameraId);
2603 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2604 rawSize;
2605 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2606 CAM_STREAM_TYPE_RAW;
2607 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2608 CAM_QCOM_FEATURE_NONE;
2609 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2610 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2611 gCamCapability[mCameraId]->color_arrangement);
2612 mStreamConfigInfo.num_streams++;
2613 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002614
2615 if (mHdrPlusRawSrcChannel) {
2616 cam_dimension_t rawSize;
2617 rawSize = getMaxRawSize(mCameraId);
2618 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2619 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2620 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2621 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2622 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2623 gCamCapability[mCameraId]->color_arrangement);
2624 mStreamConfigInfo.num_streams++;
2625 }
2626
Thierry Strudel3d639192016-09-09 11:52:26 -07002627 /* In HFR mode, if video stream is not added, create a dummy channel so that
2628 * ISP can create a batch mode even for preview only case. This channel is
2629 * never 'start'ed (no stream-on), it is only 'initialized' */
2630 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2631 !m_bIsVideo) {
2632 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2633 setPAAFSupport(dummyFeatureMask,
2634 CAM_STREAM_TYPE_VIDEO,
2635 gCamCapability[mCameraId]->color_arrangement);
2636 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2637 mChannelHandle,
2638 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002639 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002640 this,
2641 &mDummyBatchStream,
2642 CAM_STREAM_TYPE_VIDEO,
2643 dummyFeatureMask,
2644 mMetadataChannel);
2645 if (NULL == mDummyBatchChannel) {
2646 LOGE("creation of mDummyBatchChannel failed."
2647 "Preview will use non-hfr sensor mode ");
2648 }
2649 }
2650 if (mDummyBatchChannel) {
2651 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2652 mDummyBatchStream.width;
2653 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2654 mDummyBatchStream.height;
2655 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2656 CAM_STREAM_TYPE_VIDEO;
2657 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2658 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2659 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2660 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2661 gCamCapability[mCameraId]->color_arrangement);
2662 mStreamConfigInfo.num_streams++;
2663 }
2664
2665 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2666 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 m_bIs4KVideo ? 0 :
2668 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002669
2670 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2671 for (pendingRequestIterator i = mPendingRequestsList.begin();
2672 i != mPendingRequestsList.end();) {
2673 i = erasePendingRequest(i);
2674 }
2675 mPendingFrameDropList.clear();
2676 // Initialize/Reset the pending buffers list
2677 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2678 req.mPendingBufferList.clear();
2679 }
2680 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2681
Thierry Strudel3d639192016-09-09 11:52:26 -07002682 mCurJpegMeta.clear();
2683 //Get min frame duration for this streams configuration
2684 deriveMinFrameDuration();
2685
Chien-Yu Chenee335912017-02-09 17:53:20 -08002686 mFirstPreviewIntentSeen = false;
2687
2688 // Disable HRD+ if it's enabled;
2689 disableHdrPlusModeLocked();
2690
Thierry Strudel3d639192016-09-09 11:52:26 -07002691 // Update state
2692 mState = CONFIGURED;
2693
2694 pthread_mutex_unlock(&mMutex);
2695
2696 return rc;
2697}
2698
2699/*===========================================================================
2700 * FUNCTION : validateCaptureRequest
2701 *
2702 * DESCRIPTION: validate a capture request from camera service
2703 *
2704 * PARAMETERS :
2705 * @request : request from framework to process
2706 *
2707 * RETURN :
2708 *
2709 *==========================================================================*/
2710int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002711 camera3_capture_request_t *request,
2712 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002713{
2714 ssize_t idx = 0;
2715 const camera3_stream_buffer_t *b;
2716 CameraMetadata meta;
2717
2718 /* Sanity check the request */
2719 if (request == NULL) {
2720 LOGE("NULL capture request");
2721 return BAD_VALUE;
2722 }
2723
2724 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2725 /*settings cannot be null for the first request*/
2726 return BAD_VALUE;
2727 }
2728
2729 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002730 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2731 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002732 LOGE("Request %d: No output buffers provided!",
2733 __FUNCTION__, frameNumber);
2734 return BAD_VALUE;
2735 }
2736 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2737 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2738 request->num_output_buffers, MAX_NUM_STREAMS);
2739 return BAD_VALUE;
2740 }
2741 if (request->input_buffer != NULL) {
2742 b = request->input_buffer;
2743 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2744 LOGE("Request %d: Buffer %ld: Status not OK!",
2745 frameNumber, (long)idx);
2746 return BAD_VALUE;
2747 }
2748 if (b->release_fence != -1) {
2749 LOGE("Request %d: Buffer %ld: Has a release fence!",
2750 frameNumber, (long)idx);
2751 return BAD_VALUE;
2752 }
2753 if (b->buffer == NULL) {
2754 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2755 frameNumber, (long)idx);
2756 return BAD_VALUE;
2757 }
2758 }
2759
2760 // Validate all buffers
2761 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002762 if (b == NULL) {
2763 return BAD_VALUE;
2764 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002765 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002766 QCamera3ProcessingChannel *channel =
2767 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2768 if (channel == NULL) {
2769 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2770 frameNumber, (long)idx);
2771 return BAD_VALUE;
2772 }
2773 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2774 LOGE("Request %d: Buffer %ld: Status not OK!",
2775 frameNumber, (long)idx);
2776 return BAD_VALUE;
2777 }
2778 if (b->release_fence != -1) {
2779 LOGE("Request %d: Buffer %ld: Has a release fence!",
2780 frameNumber, (long)idx);
2781 return BAD_VALUE;
2782 }
2783 if (b->buffer == NULL) {
2784 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2785 frameNumber, (long)idx);
2786 return BAD_VALUE;
2787 }
2788 if (*(b->buffer) == NULL) {
2789 LOGE("Request %d: Buffer %ld: NULL private handle!",
2790 frameNumber, (long)idx);
2791 return BAD_VALUE;
2792 }
2793 idx++;
2794 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002795 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002796 return NO_ERROR;
2797}
2798
2799/*===========================================================================
2800 * FUNCTION : deriveMinFrameDuration
2801 *
2802 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2803 * on currently configured streams.
2804 *
2805 * PARAMETERS : NONE
2806 *
2807 * RETURN : NONE
2808 *
2809 *==========================================================================*/
2810void QCamera3HardwareInterface::deriveMinFrameDuration()
2811{
2812 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2813
2814 maxJpegDim = 0;
2815 maxProcessedDim = 0;
2816 maxRawDim = 0;
2817
2818 // Figure out maximum jpeg, processed, and raw dimensions
2819 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2820 it != mStreamInfo.end(); it++) {
2821
2822 // Input stream doesn't have valid stream_type
2823 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2824 continue;
2825
2826 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2827 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2828 if (dimension > maxJpegDim)
2829 maxJpegDim = dimension;
2830 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2831 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2832 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2833 if (dimension > maxRawDim)
2834 maxRawDim = dimension;
2835 } else {
2836 if (dimension > maxProcessedDim)
2837 maxProcessedDim = dimension;
2838 }
2839 }
2840
2841 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2842 MAX_SIZES_CNT);
2843
2844 //Assume all jpeg dimensions are in processed dimensions.
2845 if (maxJpegDim > maxProcessedDim)
2846 maxProcessedDim = maxJpegDim;
2847 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2848 if (maxProcessedDim > maxRawDim) {
2849 maxRawDim = INT32_MAX;
2850
2851 for (size_t i = 0; i < count; i++) {
2852 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2853 gCamCapability[mCameraId]->raw_dim[i].height;
2854 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2855 maxRawDim = dimension;
2856 }
2857 }
2858
2859 //Find minimum durations for processed, jpeg, and raw
2860 for (size_t i = 0; i < count; i++) {
2861 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2862 gCamCapability[mCameraId]->raw_dim[i].height) {
2863 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2864 break;
2865 }
2866 }
2867 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2868 for (size_t i = 0; i < count; i++) {
2869 if (maxProcessedDim ==
2870 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2871 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2872 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2873 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2874 break;
2875 }
2876 }
2877}
2878
2879/*===========================================================================
2880 * FUNCTION : getMinFrameDuration
2881 *
2882 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2883 * and current request configuration.
2884 *
2885 * PARAMETERS : @request: requset sent by the frameworks
2886 *
2887 * RETURN : min farme duration for a particular request
2888 *
2889 *==========================================================================*/
2890int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2891{
2892 bool hasJpegStream = false;
2893 bool hasRawStream = false;
2894 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2895 const camera3_stream_t *stream = request->output_buffers[i].stream;
2896 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2897 hasJpegStream = true;
2898 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2899 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2900 stream->format == HAL_PIXEL_FORMAT_RAW16)
2901 hasRawStream = true;
2902 }
2903
2904 if (!hasJpegStream)
2905 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2906 else
2907 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2908}
2909
2910/*===========================================================================
2911 * FUNCTION : handleBuffersDuringFlushLock
2912 *
2913 * DESCRIPTION: Account for buffers returned from back-end during flush
2914 * This function is executed while mMutex is held by the caller.
2915 *
2916 * PARAMETERS :
2917 * @buffer: image buffer for the callback
2918 *
2919 * RETURN :
2920 *==========================================================================*/
2921void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2922{
2923 bool buffer_found = false;
2924 for (List<PendingBuffersInRequest>::iterator req =
2925 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2926 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2927 for (List<PendingBufferInfo>::iterator i =
2928 req->mPendingBufferList.begin();
2929 i != req->mPendingBufferList.end(); i++) {
2930 if (i->buffer == buffer->buffer) {
2931 mPendingBuffersMap.numPendingBufsAtFlush--;
2932 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2933 buffer->buffer, req->frame_number,
2934 mPendingBuffersMap.numPendingBufsAtFlush);
2935 buffer_found = true;
2936 break;
2937 }
2938 }
2939 if (buffer_found) {
2940 break;
2941 }
2942 }
2943 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2944 //signal the flush()
2945 LOGD("All buffers returned to HAL. Continue flush");
2946 pthread_cond_signal(&mBuffersCond);
2947 }
2948}
2949
Thierry Strudel3d639192016-09-09 11:52:26 -07002950/*===========================================================================
2951 * FUNCTION : handleBatchMetadata
2952 *
2953 * DESCRIPTION: Handles metadata buffer callback in batch mode
2954 *
2955 * PARAMETERS : @metadata_buf: metadata buffer
2956 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2957 * the meta buf in this method
2958 *
2959 * RETURN :
2960 *
2961 *==========================================================================*/
2962void QCamera3HardwareInterface::handleBatchMetadata(
2963 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2964{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002965 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002966
2967 if (NULL == metadata_buf) {
2968 LOGE("metadata_buf is NULL");
2969 return;
2970 }
2971 /* In batch mode, the metdata will contain the frame number and timestamp of
2972 * the last frame in the batch. Eg: a batch containing buffers from request
2973 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2974 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2975 * multiple process_capture_results */
2976 metadata_buffer_t *metadata =
2977 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2978 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2979 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2980 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2981 uint32_t frame_number = 0, urgent_frame_number = 0;
2982 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2983 bool invalid_metadata = false;
2984 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2985 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002986 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002987
2988 int32_t *p_frame_number_valid =
2989 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2990 uint32_t *p_frame_number =
2991 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2992 int64_t *p_capture_time =
2993 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2994 int32_t *p_urgent_frame_number_valid =
2995 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2996 uint32_t *p_urgent_frame_number =
2997 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2998
2999 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3000 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3001 (NULL == p_urgent_frame_number)) {
3002 LOGE("Invalid metadata");
3003 invalid_metadata = true;
3004 } else {
3005 frame_number_valid = *p_frame_number_valid;
3006 last_frame_number = *p_frame_number;
3007 last_frame_capture_time = *p_capture_time;
3008 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3009 last_urgent_frame_number = *p_urgent_frame_number;
3010 }
3011
3012 /* In batchmode, when no video buffers are requested, set_parms are sent
3013 * for every capture_request. The difference between consecutive urgent
3014 * frame numbers and frame numbers should be used to interpolate the
3015 * corresponding frame numbers and time stamps */
3016 pthread_mutex_lock(&mMutex);
3017 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003018 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3019 if(idx < 0) {
3020 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3021 last_urgent_frame_number);
3022 mState = ERROR;
3023 pthread_mutex_unlock(&mMutex);
3024 return;
3025 }
3026 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003027 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3028 first_urgent_frame_number;
3029
3030 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3031 urgent_frame_number_valid,
3032 first_urgent_frame_number, last_urgent_frame_number);
3033 }
3034
3035 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003036 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3037 if(idx < 0) {
3038 LOGE("Invalid frame number received: %d. Irrecoverable error",
3039 last_frame_number);
3040 mState = ERROR;
3041 pthread_mutex_unlock(&mMutex);
3042 return;
3043 }
3044 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003045 frameNumDiff = last_frame_number + 1 -
3046 first_frame_number;
3047 mPendingBatchMap.removeItem(last_frame_number);
3048
3049 LOGD("frm: valid: %d frm_num: %d - %d",
3050 frame_number_valid,
3051 first_frame_number, last_frame_number);
3052
3053 }
3054 pthread_mutex_unlock(&mMutex);
3055
3056 if (urgent_frame_number_valid || frame_number_valid) {
3057 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3058 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3059 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3060 urgentFrameNumDiff, last_urgent_frame_number);
3061 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3062 LOGE("frameNumDiff: %d frameNum: %d",
3063 frameNumDiff, last_frame_number);
3064 }
3065
3066 for (size_t i = 0; i < loopCount; i++) {
3067 /* handleMetadataWithLock is called even for invalid_metadata for
3068 * pipeline depth calculation */
3069 if (!invalid_metadata) {
3070 /* Infer frame number. Batch metadata contains frame number of the
3071 * last frame */
3072 if (urgent_frame_number_valid) {
3073 if (i < urgentFrameNumDiff) {
3074 urgent_frame_number =
3075 first_urgent_frame_number + i;
3076 LOGD("inferred urgent frame_number: %d",
3077 urgent_frame_number);
3078 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3079 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3080 } else {
3081 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3082 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3083 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3084 }
3085 }
3086
3087 /* Infer frame number. Batch metadata contains frame number of the
3088 * last frame */
3089 if (frame_number_valid) {
3090 if (i < frameNumDiff) {
3091 frame_number = first_frame_number + i;
3092 LOGD("inferred frame_number: %d", frame_number);
3093 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3094 CAM_INTF_META_FRAME_NUMBER, frame_number);
3095 } else {
3096 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3097 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3098 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3099 }
3100 }
3101
3102 if (last_frame_capture_time) {
3103 //Infer timestamp
3104 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003105 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003106 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003107 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003108 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3109 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3110 LOGD("batch capture_time: %lld, capture_time: %lld",
3111 last_frame_capture_time, capture_time);
3112 }
3113 }
3114 pthread_mutex_lock(&mMutex);
3115 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003116 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003117 (i == 0) /* first metadata in the batch metadata */,
3118 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003119 pthread_mutex_unlock(&mMutex);
3120 }
3121
3122 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003123 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003124 mMetadataChannel->bufDone(metadata_buf);
3125 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003126 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003127 }
3128}
3129
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003130void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3131 camera3_error_msg_code_t errorCode)
3132{
3133 camera3_notify_msg_t notify_msg;
3134 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3135 notify_msg.type = CAMERA3_MSG_ERROR;
3136 notify_msg.message.error.error_code = errorCode;
3137 notify_msg.message.error.error_stream = NULL;
3138 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003139 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003140
3141 return;
3142}
Thierry Strudel3d639192016-09-09 11:52:26 -07003143/*===========================================================================
3144 * FUNCTION : handleMetadataWithLock
3145 *
3146 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3147 *
3148 * PARAMETERS : @metadata_buf: metadata buffer
3149 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3150 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003151 * @firstMetadataInBatch: Boolean to indicate whether this is the
3152 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003153 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3154 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003155 *
3156 * RETURN :
3157 *
3158 *==========================================================================*/
3159void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003160 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003161 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003162{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003163 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003164 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3165 //during flush do not send metadata from this thread
3166 LOGD("not sending metadata during flush or when mState is error");
3167 if (free_and_bufdone_meta_buf) {
3168 mMetadataChannel->bufDone(metadata_buf);
3169 free(metadata_buf);
3170 }
3171 return;
3172 }
3173
3174 //not in flush
3175 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3176 int32_t frame_number_valid, urgent_frame_number_valid;
3177 uint32_t frame_number, urgent_frame_number;
3178 int64_t capture_time;
3179 nsecs_t currentSysTime;
3180
3181 int32_t *p_frame_number_valid =
3182 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3183 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3184 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3185 int32_t *p_urgent_frame_number_valid =
3186 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3187 uint32_t *p_urgent_frame_number =
3188 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3189 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3190 metadata) {
3191 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3192 *p_frame_number_valid, *p_frame_number);
3193 }
3194
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003195 camera_metadata_t *resultMetadata = nullptr;
3196
Thierry Strudel3d639192016-09-09 11:52:26 -07003197 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3198 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3199 LOGE("Invalid metadata");
3200 if (free_and_bufdone_meta_buf) {
3201 mMetadataChannel->bufDone(metadata_buf);
3202 free(metadata_buf);
3203 }
3204 goto done_metadata;
3205 }
3206 frame_number_valid = *p_frame_number_valid;
3207 frame_number = *p_frame_number;
3208 capture_time = *p_capture_time;
3209 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3210 urgent_frame_number = *p_urgent_frame_number;
3211 currentSysTime = systemTime(CLOCK_MONOTONIC);
3212
3213 // Detect if buffers from any requests are overdue
3214 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003215 int64_t timeout;
3216 {
3217 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3218 // If there is a pending HDR+ request, the following requests may be blocked until the
3219 // HDR+ request is done. So allow a longer timeout.
3220 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3221 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3222 }
3223
3224 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003225 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003226 assert(missed.stream->priv);
3227 if (missed.stream->priv) {
3228 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3229 assert(ch->mStreams[0]);
3230 if (ch->mStreams[0]) {
3231 LOGE("Cancel missing frame = %d, buffer = %p,"
3232 "stream type = %d, stream format = %d",
3233 req.frame_number, missed.buffer,
3234 ch->mStreams[0]->getMyType(), missed.stream->format);
3235 ch->timeoutFrame(req.frame_number);
3236 }
3237 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 }
3239 }
3240 }
3241 //Partial result on process_capture_result for timestamp
3242 if (urgent_frame_number_valid) {
3243 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3244 urgent_frame_number, capture_time);
3245
3246 //Recieved an urgent Frame Number, handle it
3247 //using partial results
3248 for (pendingRequestIterator i =
3249 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3250 LOGD("Iterator Frame = %d urgent frame = %d",
3251 i->frame_number, urgent_frame_number);
3252
3253 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3254 (i->partial_result_cnt == 0)) {
3255 LOGE("Error: HAL missed urgent metadata for frame number %d",
3256 i->frame_number);
3257 }
3258
3259 if (i->frame_number == urgent_frame_number &&
3260 i->bUrgentReceived == 0) {
3261
3262 camera3_capture_result_t result;
3263 memset(&result, 0, sizeof(camera3_capture_result_t));
3264
3265 i->partial_result_cnt++;
3266 i->bUrgentReceived = 1;
3267 // Extract 3A metadata
3268 result.result =
3269 translateCbUrgentMetadataToResultMetadata(metadata);
3270 // Populate metadata result
3271 result.frame_number = urgent_frame_number;
3272 result.num_output_buffers = 0;
3273 result.output_buffers = NULL;
3274 result.partial_result = i->partial_result_cnt;
3275
Chien-Yu Chenee335912017-02-09 17:53:20 -08003276 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003277 // Notify HDR+ client about the partial metadata.
3278 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3279 result.partial_result == PARTIAL_RESULT_COUNT);
3280 }
3281
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003282 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003283 LOGD("urgent frame_number = %u, capture_time = %lld",
3284 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003285 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3286 // Instant AEC settled for this frame.
3287 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3288 mInstantAECSettledFrameNumber = urgent_frame_number;
3289 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 free_camera_metadata((camera_metadata_t *)result.result);
3291 break;
3292 }
3293 }
3294 }
3295
3296 if (!frame_number_valid) {
3297 LOGD("Not a valid normal frame number, used as SOF only");
3298 if (free_and_bufdone_meta_buf) {
3299 mMetadataChannel->bufDone(metadata_buf);
3300 free(metadata_buf);
3301 }
3302 goto done_metadata;
3303 }
3304 LOGH("valid frame_number = %u, capture_time = %lld",
3305 frame_number, capture_time);
3306
Emilian Peev7650c122017-01-19 08:24:33 -08003307 if (metadata->is_depth_data_valid) {
3308 handleDepthDataLocked(metadata->depth_data, frame_number);
3309 }
3310
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003311 // Check whether any stream buffer corresponding to this is dropped or not
3312 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3313 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3314 for (auto & pendingRequest : mPendingRequestsList) {
3315 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3316 mInstantAECSettledFrameNumber)) {
3317 camera3_notify_msg_t notify_msg = {};
3318 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003319 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003320 QCamera3ProcessingChannel *channel =
3321 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003322 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003323 if (p_cam_frame_drop) {
3324 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003325 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003326 // Got the stream ID for drop frame.
3327 dropFrame = true;
3328 break;
3329 }
3330 }
3331 } else {
3332 // This is instant AEC case.
3333 // For instant AEC drop the stream untill AEC is settled.
3334 dropFrame = true;
3335 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003336
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003337 if (dropFrame) {
3338 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3339 if (p_cam_frame_drop) {
3340 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003341 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003342 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003343 } else {
3344 // For instant AEC, inform frame drop and frame number
3345 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3346 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003347 pendingRequest.frame_number, streamID,
3348 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003349 }
3350 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003351 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003352 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003353 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003355 if (p_cam_frame_drop) {
3356 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003357 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003358 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003359 } else {
3360 // For instant AEC, inform frame drop and frame number
3361 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3362 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003363 pendingRequest.frame_number, streamID,
3364 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003365 }
3366 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003367 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003368 PendingFrameDrop.stream_ID = streamID;
3369 // Add the Frame drop info to mPendingFrameDropList
3370 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003372 }
3373 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003374 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003375
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003376 for (auto & pendingRequest : mPendingRequestsList) {
3377 // Find the pending request with the frame number.
3378 if (pendingRequest.frame_number == frame_number) {
3379 // Update the sensor timestamp.
3380 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003381
Thierry Strudel3d639192016-09-09 11:52:26 -07003382
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003383 /* Set the timestamp in display metadata so that clients aware of
3384 private_handle such as VT can use this un-modified timestamps.
3385 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003386 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003387
Thierry Strudel3d639192016-09-09 11:52:26 -07003388 // Find channel requiring metadata, meaning internal offline postprocess
3389 // is needed.
3390 //TODO: for now, we don't support two streams requiring metadata at the same time.
3391 // (because we are not making copies, and metadata buffer is not reference counted.
3392 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003393 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3394 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003395 if (iter->need_metadata) {
3396 internalPproc = true;
3397 QCamera3ProcessingChannel *channel =
3398 (QCamera3ProcessingChannel *)iter->stream->priv;
3399 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003400 if(p_is_metabuf_queued != NULL) {
3401 *p_is_metabuf_queued = true;
3402 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003403 break;
3404 }
3405 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003406 for (auto itr = pendingRequest.internalRequestList.begin();
3407 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003408 if (itr->need_metadata) {
3409 internalPproc = true;
3410 QCamera3ProcessingChannel *channel =
3411 (QCamera3ProcessingChannel *)itr->stream->priv;
3412 channel->queueReprocMetadata(metadata_buf);
3413 break;
3414 }
3415 }
3416
Thierry Strudel54dc9782017-02-15 12:12:10 -08003417 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003418 resultMetadata = translateFromHalMetadata(metadata,
3419 pendingRequest.timestamp, pendingRequest.request_id,
3420 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3421 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003422 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003423 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003424 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003425 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003426 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003427 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003428
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003429 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003430
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003431 if (pendingRequest.blob_request) {
3432 //Dump tuning metadata if enabled and available
3433 char prop[PROPERTY_VALUE_MAX];
3434 memset(prop, 0, sizeof(prop));
3435 property_get("persist.camera.dumpmetadata", prop, "0");
3436 int32_t enabled = atoi(prop);
3437 if (enabled && metadata->is_tuning_params_valid) {
3438 dumpMetadataToFile(metadata->tuning_params,
3439 mMetaFrameCount,
3440 enabled,
3441 "Snapshot",
3442 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003443 }
3444 }
3445
3446 if (!internalPproc) {
3447 LOGD("couldn't find need_metadata for this metadata");
3448 // Return metadata buffer
3449 if (free_and_bufdone_meta_buf) {
3450 mMetadataChannel->bufDone(metadata_buf);
3451 free(metadata_buf);
3452 }
3453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003454
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003455 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003456 }
3457 }
3458
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003459 // Try to send out shutter callbacks and capture results.
3460 handlePendingResultsWithLock(frame_number, resultMetadata);
3461 return;
3462
Thierry Strudel3d639192016-09-09 11:52:26 -07003463done_metadata:
3464 for (pendingRequestIterator i = mPendingRequestsList.begin();
3465 i != mPendingRequestsList.end() ;i++) {
3466 i->pipeline_depth++;
3467 }
3468 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3469 unblockRequestIfNecessary();
3470}
3471
3472/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003473 * FUNCTION : handleDepthDataWithLock
3474 *
3475 * DESCRIPTION: Handles incoming depth data
3476 *
3477 * PARAMETERS : @depthData : Depth data
3478 * @frameNumber: Frame number of the incoming depth data
3479 *
3480 * RETURN :
3481 *
3482 *==========================================================================*/
3483void QCamera3HardwareInterface::handleDepthDataLocked(
3484 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3485 uint32_t currentFrameNumber;
3486 buffer_handle_t *depthBuffer;
3487
3488 if (nullptr == mDepthChannel) {
3489 LOGE("Depth channel not present!");
3490 return;
3491 }
3492
3493 camera3_stream_buffer_t resultBuffer =
3494 {.acquire_fence = -1,
3495 .release_fence = -1,
3496 .status = CAMERA3_BUFFER_STATUS_OK,
3497 .buffer = nullptr,
3498 .stream = mDepthChannel->getStream()};
3499 camera3_capture_result_t result =
3500 {.result = nullptr,
3501 .num_output_buffers = 1,
3502 .output_buffers = &resultBuffer,
3503 .partial_result = 0,
3504 .frame_number = 0};
3505
3506 do {
3507 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3508 if (nullptr == depthBuffer) {
3509 break;
3510 }
3511
3512 result.frame_number = currentFrameNumber;
3513 resultBuffer.buffer = depthBuffer;
3514 if (currentFrameNumber == frameNumber) {
3515 int32_t rc = mDepthChannel->populateDepthData(depthData,
3516 frameNumber);
3517 if (NO_ERROR != rc) {
3518 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3519 } else {
3520 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3521 }
3522 } else if (currentFrameNumber > frameNumber) {
3523 break;
3524 } else {
3525 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3526 {{currentFrameNumber, mDepthChannel->getStream(),
3527 CAMERA3_MSG_ERROR_BUFFER}}};
3528 orchestrateNotify(&notify_msg);
3529
3530 LOGE("Depth buffer for frame number: %d is missing "
3531 "returning back!", currentFrameNumber);
3532 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3533 }
3534 mDepthChannel->unmapBuffer(currentFrameNumber);
3535
3536 orchestrateResult(&result);
3537 } while (currentFrameNumber < frameNumber);
3538}
3539
3540/*===========================================================================
3541 * FUNCTION : notifyErrorFoPendingDepthData
3542 *
3543 * DESCRIPTION: Returns error for any pending depth buffers
3544 *
3545 * PARAMETERS : depthCh - depth channel that needs to get flushed
3546 *
3547 * RETURN :
3548 *
3549 *==========================================================================*/
3550void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3551 QCamera3DepthChannel *depthCh) {
3552 uint32_t currentFrameNumber;
3553 buffer_handle_t *depthBuffer;
3554
3555 if (nullptr == depthCh) {
3556 return;
3557 }
3558
3559 camera3_notify_msg_t notify_msg =
3560 {.type = CAMERA3_MSG_ERROR,
3561 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3562 camera3_stream_buffer_t resultBuffer =
3563 {.acquire_fence = -1,
3564 .release_fence = -1,
3565 .buffer = nullptr,
3566 .stream = depthCh->getStream(),
3567 .status = CAMERA3_BUFFER_STATUS_ERROR};
3568 camera3_capture_result_t result =
3569 {.result = nullptr,
3570 .frame_number = 0,
3571 .num_output_buffers = 1,
3572 .partial_result = 0,
3573 .output_buffers = &resultBuffer};
3574
3575 while (nullptr !=
3576 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3577 depthCh->unmapBuffer(currentFrameNumber);
3578
3579 notify_msg.message.error.frame_number = currentFrameNumber;
3580 orchestrateNotify(&notify_msg);
3581
3582 resultBuffer.buffer = depthBuffer;
3583 result.frame_number = currentFrameNumber;
3584 orchestrateResult(&result);
3585 };
3586}
3587
3588/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003589 * FUNCTION : hdrPlusPerfLock
3590 *
3591 * DESCRIPTION: perf lock for HDR+ using custom intent
3592 *
3593 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3594 *
3595 * RETURN : None
3596 *
3597 *==========================================================================*/
3598void QCamera3HardwareInterface::hdrPlusPerfLock(
3599 mm_camera_super_buf_t *metadata_buf)
3600{
3601 if (NULL == metadata_buf) {
3602 LOGE("metadata_buf is NULL");
3603 return;
3604 }
3605 metadata_buffer_t *metadata =
3606 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3607 int32_t *p_frame_number_valid =
3608 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3609 uint32_t *p_frame_number =
3610 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3611
3612 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3613 LOGE("%s: Invalid metadata", __func__);
3614 return;
3615 }
3616
3617 //acquire perf lock for 5 sec after the last HDR frame is captured
3618 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3619 if ((p_frame_number != NULL) &&
3620 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003621 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003622 }
3623 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003624}
3625
3626/*===========================================================================
3627 * FUNCTION : handleInputBufferWithLock
3628 *
3629 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3630 *
3631 * PARAMETERS : @frame_number: frame number of the input buffer
3632 *
3633 * RETURN :
3634 *
3635 *==========================================================================*/
3636void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3637{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003638 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003639 pendingRequestIterator i = mPendingRequestsList.begin();
3640 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3641 i++;
3642 }
3643 if (i != mPendingRequestsList.end() && i->input_buffer) {
3644 //found the right request
3645 if (!i->shutter_notified) {
3646 CameraMetadata settings;
3647 camera3_notify_msg_t notify_msg;
3648 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3649 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3650 if(i->settings) {
3651 settings = i->settings;
3652 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3653 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3654 } else {
3655 LOGE("No timestamp in input settings! Using current one.");
3656 }
3657 } else {
3658 LOGE("Input settings missing!");
3659 }
3660
3661 notify_msg.type = CAMERA3_MSG_SHUTTER;
3662 notify_msg.message.shutter.frame_number = frame_number;
3663 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003664 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003665 i->shutter_notified = true;
3666 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3667 i->frame_number, notify_msg.message.shutter.timestamp);
3668 }
3669
3670 if (i->input_buffer->release_fence != -1) {
3671 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3672 close(i->input_buffer->release_fence);
3673 if (rc != OK) {
3674 LOGE("input buffer sync wait failed %d", rc);
3675 }
3676 }
3677
3678 camera3_capture_result result;
3679 memset(&result, 0, sizeof(camera3_capture_result));
3680 result.frame_number = frame_number;
3681 result.result = i->settings;
3682 result.input_buffer = i->input_buffer;
3683 result.partial_result = PARTIAL_RESULT_COUNT;
3684
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003685 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003686 LOGD("Input request metadata and input buffer frame_number = %u",
3687 i->frame_number);
3688 i = erasePendingRequest(i);
3689 } else {
3690 LOGE("Could not find input request for frame number %d", frame_number);
3691 }
3692}
3693
3694/*===========================================================================
3695 * FUNCTION : handleBufferWithLock
3696 *
3697 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3698 *
3699 * PARAMETERS : @buffer: image buffer for the callback
3700 * @frame_number: frame number of the image buffer
3701 *
3702 * RETURN :
3703 *
3704 *==========================================================================*/
3705void QCamera3HardwareInterface::handleBufferWithLock(
3706 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3707{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003708 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003709
3710 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3711 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3712 }
3713
Thierry Strudel3d639192016-09-09 11:52:26 -07003714 /* Nothing to be done during error state */
3715 if ((ERROR == mState) || (DEINIT == mState)) {
3716 return;
3717 }
3718 if (mFlushPerf) {
3719 handleBuffersDuringFlushLock(buffer);
3720 return;
3721 }
3722 //not in flush
3723 // If the frame number doesn't exist in the pending request list,
3724 // directly send the buffer to the frameworks, and update pending buffers map
3725 // Otherwise, book-keep the buffer.
3726 pendingRequestIterator i = mPendingRequestsList.begin();
3727 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3728 i++;
3729 }
3730 if (i == mPendingRequestsList.end()) {
3731 // Verify all pending requests frame_numbers are greater
3732 for (pendingRequestIterator j = mPendingRequestsList.begin();
3733 j != mPendingRequestsList.end(); j++) {
3734 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3735 LOGW("Error: pending live frame number %d is smaller than %d",
3736 j->frame_number, frame_number);
3737 }
3738 }
3739 camera3_capture_result_t result;
3740 memset(&result, 0, sizeof(camera3_capture_result_t));
3741 result.result = NULL;
3742 result.frame_number = frame_number;
3743 result.num_output_buffers = 1;
3744 result.partial_result = 0;
3745 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3746 m != mPendingFrameDropList.end(); m++) {
3747 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3748 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3749 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3750 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3751 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3752 frame_number, streamID);
3753 m = mPendingFrameDropList.erase(m);
3754 break;
3755 }
3756 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003757 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003758 result.output_buffers = buffer;
3759 LOGH("result frame_number = %d, buffer = %p",
3760 frame_number, buffer->buffer);
3761
3762 mPendingBuffersMap.removeBuf(buffer->buffer);
3763
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003764 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003765 } else {
3766 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003767 if (i->input_buffer->release_fence != -1) {
3768 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3769 close(i->input_buffer->release_fence);
3770 if (rc != OK) {
3771 LOGE("input buffer sync wait failed %d", rc);
3772 }
3773 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003774 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003775
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003776 // Put buffer into the pending request
3777 for (auto &requestedBuffer : i->buffers) {
3778 if (requestedBuffer.stream == buffer->stream) {
3779 if (requestedBuffer.buffer != nullptr) {
3780 LOGE("Error: buffer is already set");
3781 } else {
3782 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3783 sizeof(camera3_stream_buffer_t));
3784 *(requestedBuffer.buffer) = *buffer;
3785 LOGH("cache buffer %p at result frame_number %u",
3786 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003787 }
3788 }
3789 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003790
3791 if (i->input_buffer) {
3792 // For a reprocessing request, try to send out shutter callback and result metadata.
3793 handlePendingResultsWithLock(frame_number, nullptr);
3794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003795 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003796
3797 if (mPreviewStarted == false) {
3798 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3799 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3800 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3801 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3802 mPreviewStarted = true;
3803
3804 // Set power hint for preview
3805 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3806 }
3807 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003808}
3809
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3811 const camera_metadata_t *resultMetadata)
3812{
3813 // Find the pending request for this result metadata.
3814 auto requestIter = mPendingRequestsList.begin();
3815 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3816 requestIter++;
3817 }
3818
3819 if (requestIter == mPendingRequestsList.end()) {
3820 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3821 return;
3822 }
3823
3824 // Update the result metadata
3825 requestIter->resultMetadata = resultMetadata;
3826
3827 // Check what type of request this is.
3828 bool liveRequest = false;
3829 if (requestIter->hdrplus) {
3830 // HDR+ request doesn't have partial results.
3831 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3832 } else if (requestIter->input_buffer != nullptr) {
3833 // Reprocessing request result is the same as settings.
3834 requestIter->resultMetadata = requestIter->settings;
3835 // Reprocessing request doesn't have partial results.
3836 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3837 } else {
3838 liveRequest = true;
3839 requestIter->partial_result_cnt++;
3840 mPendingLiveRequest--;
3841
3842 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chenee335912017-02-09 17:53:20 -08003843 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003844 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3845 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3846 }
3847 }
3848
3849 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3850 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3851 bool readyToSend = true;
3852
3853 // Iterate through the pending requests to send out shutter callbacks and results that are
3854 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3855 // live requests that don't have result metadata yet.
3856 auto iter = mPendingRequestsList.begin();
3857 while (iter != mPendingRequestsList.end()) {
3858 // Check if current pending request is ready. If it's not ready, the following pending
3859 // requests are also not ready.
3860 if (readyToSend && iter->resultMetadata == nullptr) {
3861 readyToSend = false;
3862 }
3863
3864 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3865
3866 std::vector<camera3_stream_buffer_t> outputBuffers;
3867
3868 camera3_capture_result_t result = {};
3869 result.frame_number = iter->frame_number;
3870 result.result = iter->resultMetadata;
3871 result.partial_result = iter->partial_result_cnt;
3872
3873 // If this pending buffer has result metadata, we may be able to send out shutter callback
3874 // and result metadata.
3875 if (iter->resultMetadata != nullptr) {
3876 if (!readyToSend) {
3877 // If any of the previous pending request is not ready, this pending request is
3878 // also not ready to send in order to keep shutter callbacks and result metadata
3879 // in order.
3880 iter++;
3881 continue;
3882 }
3883
3884 // Invoke shutter callback if not yet.
3885 if (!iter->shutter_notified) {
3886 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3887
3888 // Find the timestamp in HDR+ result metadata
3889 camera_metadata_ro_entry_t entry;
3890 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3891 ANDROID_SENSOR_TIMESTAMP, &entry);
3892 if (res != OK) {
3893 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3894 __FUNCTION__, iter->frame_number, strerror(-res), res);
3895 } else {
3896 timestamp = entry.data.i64[0];
3897 }
3898
3899 camera3_notify_msg_t notify_msg = {};
3900 notify_msg.type = CAMERA3_MSG_SHUTTER;
3901 notify_msg.message.shutter.frame_number = iter->frame_number;
3902 notify_msg.message.shutter.timestamp = timestamp;
3903 orchestrateNotify(&notify_msg);
3904 iter->shutter_notified = true;
3905 }
3906
3907 result.input_buffer = iter->input_buffer;
3908
3909 // Prepare output buffer array
3910 for (auto bufferInfoIter = iter->buffers.begin();
3911 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3912 if (bufferInfoIter->buffer != nullptr) {
3913
3914 QCamera3Channel *channel =
3915 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3916 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3917
3918 // Check if this buffer is a dropped frame.
3919 auto frameDropIter = mPendingFrameDropList.begin();
3920 while (frameDropIter != mPendingFrameDropList.end()) {
3921 if((frameDropIter->stream_ID == streamID) &&
3922 (frameDropIter->frame_number == frameNumber)) {
3923 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3924 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3925 streamID);
3926 mPendingFrameDropList.erase(frameDropIter);
3927 break;
3928 } else {
3929 frameDropIter++;
3930 }
3931 }
3932
3933 // Check buffer error status
3934 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3935 bufferInfoIter->buffer->buffer);
3936 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3937
3938 outputBuffers.push_back(*(bufferInfoIter->buffer));
3939 free(bufferInfoIter->buffer);
3940 bufferInfoIter->buffer = NULL;
3941 }
3942 }
3943
3944 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3945 result.num_output_buffers = outputBuffers.size();
3946 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3947 // If the result metadata belongs to a live request, notify errors for previous pending
3948 // live requests.
3949 mPendingLiveRequest--;
3950
3951 CameraMetadata dummyMetadata;
3952 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3953 result.result = dummyMetadata.release();
3954
3955 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3956 } else {
3957 iter++;
3958 continue;
3959 }
3960
3961 orchestrateResult(&result);
3962
3963 // For reprocessing, result metadata is the same as settings so do not free it here to
3964 // avoid double free.
3965 if (result.result != iter->settings) {
3966 free_camera_metadata((camera_metadata_t *)result.result);
3967 }
3968 iter->resultMetadata = nullptr;
3969 iter = erasePendingRequest(iter);
3970 }
3971
3972 if (liveRequest) {
3973 for (auto &iter : mPendingRequestsList) {
3974 // Increment pipeline depth for the following pending requests.
3975 if (iter.frame_number > frameNumber) {
3976 iter.pipeline_depth++;
3977 }
3978 }
3979 }
3980
3981 unblockRequestIfNecessary();
3982}
3983
Thierry Strudel3d639192016-09-09 11:52:26 -07003984/*===========================================================================
3985 * FUNCTION : unblockRequestIfNecessary
3986 *
3987 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3988 * that mMutex is held when this function is called.
3989 *
3990 * PARAMETERS :
3991 *
3992 * RETURN :
3993 *
3994 *==========================================================================*/
3995void QCamera3HardwareInterface::unblockRequestIfNecessary()
3996{
3997 // Unblock process_capture_request
3998 pthread_cond_signal(&mRequestCond);
3999}
4000
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004001/*===========================================================================
4002 * FUNCTION : isHdrSnapshotRequest
4003 *
4004 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4005 *
4006 * PARAMETERS : camera3 request structure
4007 *
4008 * RETURN : boolean decision variable
4009 *
4010 *==========================================================================*/
4011bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4012{
4013 if (request == NULL) {
4014 LOGE("Invalid request handle");
4015 assert(0);
4016 return false;
4017 }
4018
4019 if (!mForceHdrSnapshot) {
4020 CameraMetadata frame_settings;
4021 frame_settings = request->settings;
4022
4023 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4024 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4025 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4026 return false;
4027 }
4028 } else {
4029 return false;
4030 }
4031
4032 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4033 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4034 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4035 return false;
4036 }
4037 } else {
4038 return false;
4039 }
4040 }
4041
4042 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4043 if (request->output_buffers[i].stream->format
4044 == HAL_PIXEL_FORMAT_BLOB) {
4045 return true;
4046 }
4047 }
4048
4049 return false;
4050}
4051/*===========================================================================
4052 * FUNCTION : orchestrateRequest
4053 *
4054 * DESCRIPTION: Orchestrates a capture request from camera service
4055 *
4056 * PARAMETERS :
4057 * @request : request from framework to process
4058 *
4059 * RETURN : Error status codes
4060 *
4061 *==========================================================================*/
4062int32_t QCamera3HardwareInterface::orchestrateRequest(
4063 camera3_capture_request_t *request)
4064{
4065
4066 uint32_t originalFrameNumber = request->frame_number;
4067 uint32_t originalOutputCount = request->num_output_buffers;
4068 const camera_metadata_t *original_settings = request->settings;
4069 List<InternalRequest> internallyRequestedStreams;
4070 List<InternalRequest> emptyInternalList;
4071
4072 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4073 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4074 uint32_t internalFrameNumber;
4075 CameraMetadata modified_meta;
4076
4077
4078 /* Add Blob channel to list of internally requested streams */
4079 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4080 if (request->output_buffers[i].stream->format
4081 == HAL_PIXEL_FORMAT_BLOB) {
4082 InternalRequest streamRequested;
4083 streamRequested.meteringOnly = 1;
4084 streamRequested.need_metadata = 0;
4085 streamRequested.stream = request->output_buffers[i].stream;
4086 internallyRequestedStreams.push_back(streamRequested);
4087 }
4088 }
4089 request->num_output_buffers = 0;
4090 auto itr = internallyRequestedStreams.begin();
4091
4092 /* Modify setting to set compensation */
4093 modified_meta = request->settings;
4094 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4095 uint8_t aeLock = 1;
4096 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4097 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4098 camera_metadata_t *modified_settings = modified_meta.release();
4099 request->settings = modified_settings;
4100
4101 /* Capture Settling & -2x frame */
4102 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4103 request->frame_number = internalFrameNumber;
4104 processCaptureRequest(request, internallyRequestedStreams);
4105
4106 request->num_output_buffers = originalOutputCount;
4107 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4108 request->frame_number = internalFrameNumber;
4109 processCaptureRequest(request, emptyInternalList);
4110 request->num_output_buffers = 0;
4111
4112 modified_meta = modified_settings;
4113 expCompensation = 0;
4114 aeLock = 1;
4115 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4116 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4117 modified_settings = modified_meta.release();
4118 request->settings = modified_settings;
4119
4120 /* Capture Settling & 0X frame */
4121
4122 itr = internallyRequestedStreams.begin();
4123 if (itr == internallyRequestedStreams.end()) {
4124 LOGE("Error Internally Requested Stream list is empty");
4125 assert(0);
4126 } else {
4127 itr->need_metadata = 0;
4128 itr->meteringOnly = 1;
4129 }
4130
4131 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4132 request->frame_number = internalFrameNumber;
4133 processCaptureRequest(request, internallyRequestedStreams);
4134
4135 itr = internallyRequestedStreams.begin();
4136 if (itr == internallyRequestedStreams.end()) {
4137 ALOGE("Error Internally Requested Stream list is empty");
4138 assert(0);
4139 } else {
4140 itr->need_metadata = 1;
4141 itr->meteringOnly = 0;
4142 }
4143
4144 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4145 request->frame_number = internalFrameNumber;
4146 processCaptureRequest(request, internallyRequestedStreams);
4147
4148 /* Capture 2X frame*/
4149 modified_meta = modified_settings;
4150 expCompensation = GB_HDR_2X_STEP_EV;
4151 aeLock = 1;
4152 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4153 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4154 modified_settings = modified_meta.release();
4155 request->settings = modified_settings;
4156
4157 itr = internallyRequestedStreams.begin();
4158 if (itr == internallyRequestedStreams.end()) {
4159 ALOGE("Error Internally Requested Stream list is empty");
4160 assert(0);
4161 } else {
4162 itr->need_metadata = 0;
4163 itr->meteringOnly = 1;
4164 }
4165 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4166 request->frame_number = internalFrameNumber;
4167 processCaptureRequest(request, internallyRequestedStreams);
4168
4169 itr = internallyRequestedStreams.begin();
4170 if (itr == internallyRequestedStreams.end()) {
4171 ALOGE("Error Internally Requested Stream list is empty");
4172 assert(0);
4173 } else {
4174 itr->need_metadata = 1;
4175 itr->meteringOnly = 0;
4176 }
4177
4178 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4179 request->frame_number = internalFrameNumber;
4180 processCaptureRequest(request, internallyRequestedStreams);
4181
4182
4183 /* Capture 2X on original streaming config*/
4184 internallyRequestedStreams.clear();
4185
4186 /* Restore original settings pointer */
4187 request->settings = original_settings;
4188 } else {
4189 uint32_t internalFrameNumber;
4190 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4191 request->frame_number = internalFrameNumber;
4192 return processCaptureRequest(request, internallyRequestedStreams);
4193 }
4194
4195 return NO_ERROR;
4196}
4197
4198/*===========================================================================
4199 * FUNCTION : orchestrateResult
4200 *
4201 * DESCRIPTION: Orchestrates a capture result to camera service
4202 *
4203 * PARAMETERS :
4204 * @request : request from framework to process
4205 *
4206 * RETURN :
4207 *
4208 *==========================================================================*/
4209void QCamera3HardwareInterface::orchestrateResult(
4210 camera3_capture_result_t *result)
4211{
4212 uint32_t frameworkFrameNumber;
4213 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4214 frameworkFrameNumber);
4215 if (rc != NO_ERROR) {
4216 LOGE("Cannot find translated frameworkFrameNumber");
4217 assert(0);
4218 } else {
4219 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004220 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004221 } else {
4222 result->frame_number = frameworkFrameNumber;
4223 mCallbackOps->process_capture_result(mCallbackOps, result);
4224 }
4225 }
4226}
4227
4228/*===========================================================================
4229 * FUNCTION : orchestrateNotify
4230 *
4231 * DESCRIPTION: Orchestrates a notify to camera service
4232 *
4233 * PARAMETERS :
4234 * @request : request from framework to process
4235 *
4236 * RETURN :
4237 *
4238 *==========================================================================*/
4239void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4240{
4241 uint32_t frameworkFrameNumber;
4242 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004243 int32_t rc = NO_ERROR;
4244
4245 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004246 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004247
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004248 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004249 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4250 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4251 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004252 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004253 LOGE("Cannot find translated frameworkFrameNumber");
4254 assert(0);
4255 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004256 }
4257 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004258
4259 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4260 LOGD("Internal Request drop the notifyCb");
4261 } else {
4262 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4263 mCallbackOps->notify(mCallbackOps, notify_msg);
4264 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004265}
4266
4267/*===========================================================================
4268 * FUNCTION : FrameNumberRegistry
4269 *
4270 * DESCRIPTION: Constructor
4271 *
4272 * PARAMETERS :
4273 *
4274 * RETURN :
4275 *
4276 *==========================================================================*/
4277FrameNumberRegistry::FrameNumberRegistry()
4278{
4279 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4280}
4281
4282/*===========================================================================
4283 * FUNCTION : ~FrameNumberRegistry
4284 *
4285 * DESCRIPTION: Destructor
4286 *
4287 * PARAMETERS :
4288 *
4289 * RETURN :
4290 *
4291 *==========================================================================*/
4292FrameNumberRegistry::~FrameNumberRegistry()
4293{
4294}
4295
4296/*===========================================================================
4297 * FUNCTION : PurgeOldEntriesLocked
4298 *
4299 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4300 *
4301 * PARAMETERS :
4302 *
4303 * RETURN : NONE
4304 *
4305 *==========================================================================*/
4306void FrameNumberRegistry::purgeOldEntriesLocked()
4307{
4308 while (_register.begin() != _register.end()) {
4309 auto itr = _register.begin();
4310 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4311 _register.erase(itr);
4312 } else {
4313 return;
4314 }
4315 }
4316}
4317
4318/*===========================================================================
4319 * FUNCTION : allocStoreInternalFrameNumber
4320 *
4321 * DESCRIPTION: Method to note down a framework request and associate a new
4322 * internal request number against it
4323 *
4324 * PARAMETERS :
4325 * @fFrameNumber: Identifier given by framework
4326 * @internalFN : Output parameter which will have the newly generated internal
4327 * entry
4328 *
4329 * RETURN : Error code
4330 *
4331 *==========================================================================*/
4332int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4333 uint32_t &internalFrameNumber)
4334{
4335 Mutex::Autolock lock(mRegistryLock);
4336 internalFrameNumber = _nextFreeInternalNumber++;
4337 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4338 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4339 purgeOldEntriesLocked();
4340 return NO_ERROR;
4341}
4342
4343/*===========================================================================
4344 * FUNCTION : generateStoreInternalFrameNumber
4345 *
4346 * DESCRIPTION: Method to associate a new internal request number independent
4347 * of any associate with framework requests
4348 *
4349 * PARAMETERS :
4350 * @internalFrame#: Output parameter which will have the newly generated internal
4351 *
4352 *
4353 * RETURN : Error code
4354 *
4355 *==========================================================================*/
4356int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4357{
4358 Mutex::Autolock lock(mRegistryLock);
4359 internalFrameNumber = _nextFreeInternalNumber++;
4360 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4361 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4362 purgeOldEntriesLocked();
4363 return NO_ERROR;
4364}
4365
4366/*===========================================================================
4367 * FUNCTION : getFrameworkFrameNumber
4368 *
4369 * DESCRIPTION: Method to query the framework framenumber given an internal #
4370 *
4371 * PARAMETERS :
4372 * @internalFrame#: Internal reference
4373 * @frameworkframenumber: Output parameter holding framework frame entry
4374 *
4375 * RETURN : Error code
4376 *
4377 *==========================================================================*/
4378int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4379 uint32_t &frameworkFrameNumber)
4380{
4381 Mutex::Autolock lock(mRegistryLock);
4382 auto itr = _register.find(internalFrameNumber);
4383 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004384 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004385 return -ENOENT;
4386 }
4387
4388 frameworkFrameNumber = itr->second;
4389 purgeOldEntriesLocked();
4390 return NO_ERROR;
4391}
Thierry Strudel3d639192016-09-09 11:52:26 -07004392
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004393status_t QCamera3HardwareInterface::fillPbStreamConfig(
4394 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4395 QCamera3Channel *channel, uint32_t streamIndex) {
4396 if (config == nullptr) {
4397 LOGE("%s: config is null", __FUNCTION__);
4398 return BAD_VALUE;
4399 }
4400
4401 if (channel == nullptr) {
4402 LOGE("%s: channel is null", __FUNCTION__);
4403 return BAD_VALUE;
4404 }
4405
4406 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4407 if (stream == nullptr) {
4408 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4409 return NAME_NOT_FOUND;
4410 }
4411
4412 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4413 if (streamInfo == nullptr) {
4414 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4415 return NAME_NOT_FOUND;
4416 }
4417
4418 config->id = pbStreamId;
4419 config->image.width = streamInfo->dim.width;
4420 config->image.height = streamInfo->dim.height;
4421 config->image.padding = 0;
4422 config->image.format = pbStreamFormat;
4423
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004424 uint32_t totalPlaneSize = 0;
4425
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004426 // Fill plane information.
4427 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4428 pbcamera::PlaneConfiguration plane;
4429 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4430 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4431 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004432
4433 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004434 }
4435
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004436 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004437 return OK;
4438}
4439
Thierry Strudel3d639192016-09-09 11:52:26 -07004440/*===========================================================================
4441 * FUNCTION : processCaptureRequest
4442 *
4443 * DESCRIPTION: process a capture request from camera service
4444 *
4445 * PARAMETERS :
4446 * @request : request from framework to process
4447 *
4448 * RETURN :
4449 *
4450 *==========================================================================*/
4451int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004452 camera3_capture_request_t *request,
4453 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004454{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004455 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004456 int rc = NO_ERROR;
4457 int32_t request_id;
4458 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004459 bool isVidBufRequested = false;
4460 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004461 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004462
4463 pthread_mutex_lock(&mMutex);
4464
4465 // Validate current state
4466 switch (mState) {
4467 case CONFIGURED:
4468 case STARTED:
4469 /* valid state */
4470 break;
4471
4472 case ERROR:
4473 pthread_mutex_unlock(&mMutex);
4474 handleCameraDeviceError();
4475 return -ENODEV;
4476
4477 default:
4478 LOGE("Invalid state %d", mState);
4479 pthread_mutex_unlock(&mMutex);
4480 return -ENODEV;
4481 }
4482
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004483 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004484 if (rc != NO_ERROR) {
4485 LOGE("incoming request is not valid");
4486 pthread_mutex_unlock(&mMutex);
4487 return rc;
4488 }
4489
4490 meta = request->settings;
4491
4492 // For first capture request, send capture intent, and
4493 // stream on all streams
4494 if (mState == CONFIGURED) {
4495 // send an unconfigure to the backend so that the isp
4496 // resources are deallocated
4497 if (!mFirstConfiguration) {
4498 cam_stream_size_info_t stream_config_info;
4499 int32_t hal_version = CAM_HAL_V3;
4500 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4501 stream_config_info.buffer_info.min_buffers =
4502 MIN_INFLIGHT_REQUESTS;
4503 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004504 m_bIs4KVideo ? 0 :
4505 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004506 clear_metadata_buffer(mParameters);
4507 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4508 CAM_INTF_PARM_HAL_VERSION, hal_version);
4509 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4510 CAM_INTF_META_STREAM_INFO, stream_config_info);
4511 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4512 mParameters);
4513 if (rc < 0) {
4514 LOGE("set_parms for unconfigure failed");
4515 pthread_mutex_unlock(&mMutex);
4516 return rc;
4517 }
4518 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004519 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004520 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004521 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004522 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004523 property_get("persist.camera.is_type", is_type_value, "4");
4524 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4525 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4526 property_get("persist.camera.is_type_preview", is_type_value, "4");
4527 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4528 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004529
4530 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4531 int32_t hal_version = CAM_HAL_V3;
4532 uint8_t captureIntent =
4533 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4534 mCaptureIntent = captureIntent;
4535 clear_metadata_buffer(mParameters);
4536 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4537 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4538 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004539 if (mFirstConfiguration) {
4540 // configure instant AEC
4541 // Instant AEC is a session based parameter and it is needed only
4542 // once per complete session after open camera.
4543 // i.e. This is set only once for the first capture request, after open camera.
4544 setInstantAEC(meta);
4545 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004546 uint8_t fwkVideoStabMode=0;
4547 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4548 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4549 }
4550
4551 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4552 // turn it on for video/preview
4553 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4554 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004555 int32_t vsMode;
4556 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4558 rc = BAD_VALUE;
4559 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004560 LOGD("setEis %d", setEis);
4561 bool eis3Supported = false;
4562 size_t count = IS_TYPE_MAX;
4563 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4564 for (size_t i = 0; i < count; i++) {
4565 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4566 eis3Supported = true;
4567 break;
4568 }
4569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004570
4571 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004572 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004573 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4574 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004575 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4576 is_type = isTypePreview;
4577 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4578 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4579 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004580 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004581 } else {
4582 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004583 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004584 } else {
4585 is_type = IS_TYPE_NONE;
4586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004587 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004588 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004589 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4590 }
4591 }
4592
4593 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4594 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4595
Thierry Strudel54dc9782017-02-15 12:12:10 -08004596 //Disable tintless only if the property is set to 0
4597 memset(prop, 0, sizeof(prop));
4598 property_get("persist.camera.tintless.enable", prop, "1");
4599 int32_t tintless_value = atoi(prop);
4600
Thierry Strudel3d639192016-09-09 11:52:26 -07004601 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4602 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004603
Thierry Strudel3d639192016-09-09 11:52:26 -07004604 //Disable CDS for HFR mode or if DIS/EIS is on.
4605 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4606 //after every configure_stream
4607 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4608 (m_bIsVideo)) {
4609 int32_t cds = CAM_CDS_MODE_OFF;
4610 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4611 CAM_INTF_PARM_CDS_MODE, cds))
4612 LOGE("Failed to disable CDS for HFR mode");
4613
4614 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004615
4616 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4617 uint8_t* use_av_timer = NULL;
4618
4619 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004620 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004621 use_av_timer = &m_debug_avtimer;
4622 }
4623 else{
4624 use_av_timer =
4625 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004626 if (use_av_timer) {
4627 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4628 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004629 }
4630
4631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4632 rc = BAD_VALUE;
4633 }
4634 }
4635
Thierry Strudel3d639192016-09-09 11:52:26 -07004636 setMobicat();
4637
4638 /* Set fps and hfr mode while sending meta stream info so that sensor
4639 * can configure appropriate streaming mode */
4640 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004641 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4642 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004643 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4644 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004645 if (rc == NO_ERROR) {
4646 int32_t max_fps =
4647 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004648 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004649 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4650 }
4651 /* For HFR, more buffers are dequeued upfront to improve the performance */
4652 if (mBatchSize) {
4653 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4654 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4655 }
4656 }
4657 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004658 LOGE("setHalFpsRange failed");
4659 }
4660 }
4661 if (meta.exists(ANDROID_CONTROL_MODE)) {
4662 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4663 rc = extractSceneMode(meta, metaMode, mParameters);
4664 if (rc != NO_ERROR) {
4665 LOGE("extractSceneMode failed");
4666 }
4667 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004668 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004669
Thierry Strudel04e026f2016-10-10 11:27:36 -07004670 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4671 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4672 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4673 rc = setVideoHdrMode(mParameters, vhdr);
4674 if (rc != NO_ERROR) {
4675 LOGE("setVideoHDR is failed");
4676 }
4677 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004678
Thierry Strudel3d639192016-09-09 11:52:26 -07004679 //TODO: validate the arguments, HSV scenemode should have only the
4680 //advertised fps ranges
4681
4682 /*set the capture intent, hal version, tintless, stream info,
4683 *and disenable parameters to the backend*/
4684 LOGD("set_parms META_STREAM_INFO " );
4685 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004686 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4687 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004688 mStreamConfigInfo.type[i],
4689 mStreamConfigInfo.stream_sizes[i].width,
4690 mStreamConfigInfo.stream_sizes[i].height,
4691 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004692 mStreamConfigInfo.format[i],
4693 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004694 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004695
Thierry Strudel3d639192016-09-09 11:52:26 -07004696 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4697 mParameters);
4698 if (rc < 0) {
4699 LOGE("set_parms failed for hal version, stream info");
4700 }
4701
Chien-Yu Chenee335912017-02-09 17:53:20 -08004702 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4703 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004704 if (rc != NO_ERROR) {
4705 LOGE("Failed to get sensor output size");
4706 pthread_mutex_unlock(&mMutex);
4707 goto error_exit;
4708 }
4709
4710 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4711 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004712 mSensorModeInfo.active_array_size.width,
4713 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004714
4715 /* Set batchmode before initializing channel. Since registerBuffer
4716 * internally initializes some of the channels, better set batchmode
4717 * even before first register buffer */
4718 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4719 it != mStreamInfo.end(); it++) {
4720 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4721 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4722 && mBatchSize) {
4723 rc = channel->setBatchSize(mBatchSize);
4724 //Disable per frame map unmap for HFR/batchmode case
4725 rc |= channel->setPerFrameMapUnmap(false);
4726 if (NO_ERROR != rc) {
4727 LOGE("Channel init failed %d", rc);
4728 pthread_mutex_unlock(&mMutex);
4729 goto error_exit;
4730 }
4731 }
4732 }
4733
4734 //First initialize all streams
4735 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4736 it != mStreamInfo.end(); it++) {
4737 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4738 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4739 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004740 setEis) {
4741 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4742 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4743 is_type = mStreamConfigInfo.is_type[i];
4744 break;
4745 }
4746 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004747 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004748 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 rc = channel->initialize(IS_TYPE_NONE);
4750 }
4751 if (NO_ERROR != rc) {
4752 LOGE("Channel initialization failed %d", rc);
4753 pthread_mutex_unlock(&mMutex);
4754 goto error_exit;
4755 }
4756 }
4757
4758 if (mRawDumpChannel) {
4759 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4760 if (rc != NO_ERROR) {
4761 LOGE("Error: Raw Dump Channel init failed");
4762 pthread_mutex_unlock(&mMutex);
4763 goto error_exit;
4764 }
4765 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004766 if (mHdrPlusRawSrcChannel) {
4767 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4768 if (rc != NO_ERROR) {
4769 LOGE("Error: HDR+ RAW Source Channel init failed");
4770 pthread_mutex_unlock(&mMutex);
4771 goto error_exit;
4772 }
4773 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 if (mSupportChannel) {
4775 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4776 if (rc < 0) {
4777 LOGE("Support channel initialization failed");
4778 pthread_mutex_unlock(&mMutex);
4779 goto error_exit;
4780 }
4781 }
4782 if (mAnalysisChannel) {
4783 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4784 if (rc < 0) {
4785 LOGE("Analysis channel initialization failed");
4786 pthread_mutex_unlock(&mMutex);
4787 goto error_exit;
4788 }
4789 }
4790 if (mDummyBatchChannel) {
4791 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4792 if (rc < 0) {
4793 LOGE("mDummyBatchChannel setBatchSize failed");
4794 pthread_mutex_unlock(&mMutex);
4795 goto error_exit;
4796 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004797 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 if (rc < 0) {
4799 LOGE("mDummyBatchChannel initialization failed");
4800 pthread_mutex_unlock(&mMutex);
4801 goto error_exit;
4802 }
4803 }
4804
4805 // Set bundle info
4806 rc = setBundleInfo();
4807 if (rc < 0) {
4808 LOGE("setBundleInfo failed %d", rc);
4809 pthread_mutex_unlock(&mMutex);
4810 goto error_exit;
4811 }
4812
4813 //update settings from app here
4814 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4815 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4816 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4817 }
4818 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4819 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4820 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4821 }
4822 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4823 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4824 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4825
4826 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4827 (mLinkedCameraId != mCameraId) ) {
4828 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4829 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004830 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004831 goto error_exit;
4832 }
4833 }
4834
4835 // add bundle related cameras
4836 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4837 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004838 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4839 &m_pDualCamCmdPtr->bundle_info;
4840 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 if (mIsDeviceLinked)
4842 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4843 else
4844 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4845
4846 pthread_mutex_lock(&gCamLock);
4847
4848 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4849 LOGE("Dualcam: Invalid Session Id ");
4850 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004852 goto error_exit;
4853 }
4854
4855 if (mIsMainCamera == 1) {
4856 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4857 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004858 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004859 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004860 // related session id should be session id of linked session
4861 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4862 } else {
4863 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4864 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004865 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004866 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4868 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004869 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07004870 pthread_mutex_unlock(&gCamLock);
4871
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004872 rc = mCameraHandle->ops->set_dual_cam_cmd(
4873 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 if (rc < 0) {
4875 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 goto error_exit;
4878 }
4879 }
4880
4881 //Then start them.
4882 LOGH("Start META Channel");
4883 rc = mMetadataChannel->start();
4884 if (rc < 0) {
4885 LOGE("META channel start failed");
4886 pthread_mutex_unlock(&mMutex);
4887 goto error_exit;
4888 }
4889
4890 if (mAnalysisChannel) {
4891 rc = mAnalysisChannel->start();
4892 if (rc < 0) {
4893 LOGE("Analysis channel start failed");
4894 mMetadataChannel->stop();
4895 pthread_mutex_unlock(&mMutex);
4896 goto error_exit;
4897 }
4898 }
4899
4900 if (mSupportChannel) {
4901 rc = mSupportChannel->start();
4902 if (rc < 0) {
4903 LOGE("Support channel start failed");
4904 mMetadataChannel->stop();
4905 /* Although support and analysis are mutually exclusive today
4906 adding it in anycase for future proofing */
4907 if (mAnalysisChannel) {
4908 mAnalysisChannel->stop();
4909 }
4910 pthread_mutex_unlock(&mMutex);
4911 goto error_exit;
4912 }
4913 }
4914 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4915 it != mStreamInfo.end(); it++) {
4916 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4917 LOGH("Start Processing Channel mask=%d",
4918 channel->getStreamTypeMask());
4919 rc = channel->start();
4920 if (rc < 0) {
4921 LOGE("channel start failed");
4922 pthread_mutex_unlock(&mMutex);
4923 goto error_exit;
4924 }
4925 }
4926
4927 if (mRawDumpChannel) {
4928 LOGD("Starting raw dump stream");
4929 rc = mRawDumpChannel->start();
4930 if (rc != NO_ERROR) {
4931 LOGE("Error Starting Raw Dump Channel");
4932 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4933 it != mStreamInfo.end(); it++) {
4934 QCamera3Channel *channel =
4935 (QCamera3Channel *)(*it)->stream->priv;
4936 LOGH("Stopping Processing Channel mask=%d",
4937 channel->getStreamTypeMask());
4938 channel->stop();
4939 }
4940 if (mSupportChannel)
4941 mSupportChannel->stop();
4942 if (mAnalysisChannel) {
4943 mAnalysisChannel->stop();
4944 }
4945 mMetadataChannel->stop();
4946 pthread_mutex_unlock(&mMutex);
4947 goto error_exit;
4948 }
4949 }
4950
4951 if (mChannelHandle) {
4952
4953 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4954 mChannelHandle);
4955 if (rc != NO_ERROR) {
4956 LOGE("start_channel failed %d", rc);
4957 pthread_mutex_unlock(&mMutex);
4958 goto error_exit;
4959 }
4960 }
4961
4962 goto no_error;
4963error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004964 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004965 return rc;
4966no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 mWokenUpByDaemon = false;
4968 mPendingLiveRequest = 0;
4969 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004970 }
4971
Chien-Yu Chenee335912017-02-09 17:53:20 -08004972 // Enable HDR+ mode for the first PREVIEW_INTENT request.
4973 if (mHdrPlusClient != nullptr && !mFirstPreviewIntentSeen &&
4974 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
4975 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
4976 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
4977 rc = enableHdrPlusModeLocked();
4978 if (rc != OK) {
4979 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
4980 pthread_mutex_unlock(&mMutex);
4981 return rc;
4982 }
4983
4984 // Start HDR+ RAW source channel if AP provides RAW input buffers.
4985 if (mHdrPlusRawSrcChannel) {
4986 rc = mHdrPlusRawSrcChannel->start();
4987 if (rc != OK) {
4988 LOGE("Error Starting HDR+ RAW Channel");
4989 pthread_mutex_unlock(&mMutex);
4990 return rc;
4991 }
4992 }
4993 mFirstPreviewIntentSeen = true;
4994 }
4995
Thierry Strudel3d639192016-09-09 11:52:26 -07004996 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004997 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004998
4999 if (mFlushPerf) {
5000 //we cannot accept any requests during flush
5001 LOGE("process_capture_request cannot proceed during flush");
5002 pthread_mutex_unlock(&mMutex);
5003 return NO_ERROR; //should return an error
5004 }
5005
5006 if (meta.exists(ANDROID_REQUEST_ID)) {
5007 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5008 mCurrentRequestId = request_id;
5009 LOGD("Received request with id: %d", request_id);
5010 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5011 LOGE("Unable to find request id field, \
5012 & no previous id available");
5013 pthread_mutex_unlock(&mMutex);
5014 return NAME_NOT_FOUND;
5015 } else {
5016 LOGD("Re-using old request id");
5017 request_id = mCurrentRequestId;
5018 }
5019
5020 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5021 request->num_output_buffers,
5022 request->input_buffer,
5023 frameNumber);
5024 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005025 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005027 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 uint32_t snapshotStreamId = 0;
5029 for (size_t i = 0; i < request->num_output_buffers; i++) {
5030 const camera3_stream_buffer_t& output = request->output_buffers[i];
5031 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5032
Emilian Peev7650c122017-01-19 08:24:33 -08005033 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5034 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005035 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005036 blob_request = 1;
5037 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5038 }
5039
5040 if (output.acquire_fence != -1) {
5041 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5042 close(output.acquire_fence);
5043 if (rc != OK) {
5044 LOGE("sync wait failed %d", rc);
5045 pthread_mutex_unlock(&mMutex);
5046 return rc;
5047 }
5048 }
5049
Emilian Peev7650c122017-01-19 08:24:33 -08005050 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
5051 depthRequestPresent = true;
5052 continue;
5053 }
5054
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005055 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005057
5058 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5059 isVidBufRequested = true;
5060 }
5061 }
5062
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005063 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5064 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5065 itr++) {
5066 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5067 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5068 channel->getStreamID(channel->getStreamTypeMask());
5069
5070 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5071 isVidBufRequested = true;
5072 }
5073 }
5074
Thierry Strudel3d639192016-09-09 11:52:26 -07005075 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005076 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005077 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005078 }
5079 if (blob_request && mRawDumpChannel) {
5080 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005081 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005083 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005084 }
5085
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005086 {
5087 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5088 // Request a RAW buffer if
5089 // 1. mHdrPlusRawSrcChannel is valid.
5090 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5091 // 3. There is no pending HDR+ request.
5092 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5093 mHdrPlusPendingRequests.size() == 0) {
5094 streamsArray.stream_request[streamsArray.num_streams].streamID =
5095 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5096 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5097 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005098 }
5099
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005100 //extract capture intent
5101 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5102 mCaptureIntent =
5103 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5104 }
5105
5106 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5107 mCacMode =
5108 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5109 }
5110
5111 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005112 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005113
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005114 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chenee335912017-02-09 17:53:20 -08005115 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005116 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5117 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005118 }
5119
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005120 if (hdrPlusRequest) {
5121 // For a HDR+ request, just set the frame parameters.
5122 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5123 if (rc < 0) {
5124 LOGE("fail to set frame parameters");
5125 pthread_mutex_unlock(&mMutex);
5126 return rc;
5127 }
5128 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 /* Parse the settings:
5130 * - For every request in NORMAL MODE
5131 * - For every request in HFR mode during preview only case
5132 * - For first request of every batch in HFR mode during video
5133 * recording. In batchmode the same settings except frame number is
5134 * repeated in each request of the batch.
5135 */
5136 if (!mBatchSize ||
5137 (mBatchSize && !isVidBufRequested) ||
5138 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005139 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005140 if (rc < 0) {
5141 LOGE("fail to set frame parameters");
5142 pthread_mutex_unlock(&mMutex);
5143 return rc;
5144 }
5145 }
5146 /* For batchMode HFR, setFrameParameters is not called for every
5147 * request. But only frame number of the latest request is parsed.
5148 * Keep track of first and last frame numbers in a batch so that
5149 * metadata for the frame numbers of batch can be duplicated in
5150 * handleBatchMetadta */
5151 if (mBatchSize) {
5152 if (!mToBeQueuedVidBufs) {
5153 //start of the batch
5154 mFirstFrameNumberInBatch = request->frame_number;
5155 }
5156 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5157 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5158 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005159 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005160 return BAD_VALUE;
5161 }
5162 }
5163 if (mNeedSensorRestart) {
5164 /* Unlock the mutex as restartSensor waits on the channels to be
5165 * stopped, which in turn calls stream callback functions -
5166 * handleBufferWithLock and handleMetadataWithLock */
5167 pthread_mutex_unlock(&mMutex);
5168 rc = dynamicUpdateMetaStreamInfo();
5169 if (rc != NO_ERROR) {
5170 LOGE("Restarting the sensor failed");
5171 return BAD_VALUE;
5172 }
5173 mNeedSensorRestart = false;
5174 pthread_mutex_lock(&mMutex);
5175 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005176 if(mResetInstantAEC) {
5177 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5178 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5179 mResetInstantAEC = false;
5180 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005181 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005182 if (request->input_buffer->acquire_fence != -1) {
5183 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5184 close(request->input_buffer->acquire_fence);
5185 if (rc != OK) {
5186 LOGE("input buffer sync wait failed %d", rc);
5187 pthread_mutex_unlock(&mMutex);
5188 return rc;
5189 }
5190 }
5191 }
5192
5193 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5194 mLastCustIntentFrmNum = frameNumber;
5195 }
5196 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005197 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 pendingRequestIterator latestRequest;
5199 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005200 pendingRequest.num_buffers = depthRequestPresent ?
5201 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 pendingRequest.request_id = request_id;
5203 pendingRequest.blob_request = blob_request;
5204 pendingRequest.timestamp = 0;
5205 pendingRequest.bUrgentReceived = 0;
5206 if (request->input_buffer) {
5207 pendingRequest.input_buffer =
5208 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5209 *(pendingRequest.input_buffer) = *(request->input_buffer);
5210 pInputBuffer = pendingRequest.input_buffer;
5211 } else {
5212 pendingRequest.input_buffer = NULL;
5213 pInputBuffer = NULL;
5214 }
5215
5216 pendingRequest.pipeline_depth = 0;
5217 pendingRequest.partial_result_cnt = 0;
5218 extractJpegMetadata(mCurJpegMeta, request);
5219 pendingRequest.jpegMetadata = mCurJpegMeta;
5220 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5221 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005222 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005223 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5224 mHybridAeEnable =
5225 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5226 }
5227 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005228 /* DevCamDebug metadata processCaptureRequest */
5229 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5230 mDevCamDebugMetaEnable =
5231 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5232 }
5233 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5234 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005235
5236 //extract CAC info
5237 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5238 mCacMode =
5239 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5240 }
5241 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005242 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243
5244 PendingBuffersInRequest bufsForCurRequest;
5245 bufsForCurRequest.frame_number = frameNumber;
5246 // Mark current timestamp for the new request
5247 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005248 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005249
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005250 if (hdrPlusRequest) {
5251 // Save settings for this request.
5252 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5253 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5254
5255 // Add to pending HDR+ request queue.
5256 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5257 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5258
5259 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5260 }
5261
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005263 if (request->output_buffers[i].stream->data_space ==
5264 HAL_DATASPACE_DEPTH) {
5265 continue;
5266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005267 RequestedBufferInfo requestedBuf;
5268 memset(&requestedBuf, 0, sizeof(requestedBuf));
5269 requestedBuf.stream = request->output_buffers[i].stream;
5270 requestedBuf.buffer = NULL;
5271 pendingRequest.buffers.push_back(requestedBuf);
5272
5273 // Add to buffer handle the pending buffers list
5274 PendingBufferInfo bufferInfo;
5275 bufferInfo.buffer = request->output_buffers[i].buffer;
5276 bufferInfo.stream = request->output_buffers[i].stream;
5277 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5278 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5279 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5280 frameNumber, bufferInfo.buffer,
5281 channel->getStreamTypeMask(), bufferInfo.stream->format);
5282 }
5283 // Add this request packet into mPendingBuffersMap
5284 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5285 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5286 mPendingBuffersMap.get_num_overall_buffers());
5287
5288 latestRequest = mPendingRequestsList.insert(
5289 mPendingRequestsList.end(), pendingRequest);
5290 if(mFlush) {
5291 LOGI("mFlush is true");
5292 pthread_mutex_unlock(&mMutex);
5293 return NO_ERROR;
5294 }
5295
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005296 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5297 // channel.
5298 if (!hdrPlusRequest) {
5299 int indexUsed;
5300 // Notify metadata channel we receive a request
5301 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005302
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005303 if(request->input_buffer != NULL){
5304 LOGD("Input request, frame_number %d", frameNumber);
5305 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5306 if (NO_ERROR != rc) {
5307 LOGE("fail to set reproc parameters");
5308 pthread_mutex_unlock(&mMutex);
5309 return rc;
5310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 }
5312
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005313 // Call request on other streams
5314 uint32_t streams_need_metadata = 0;
5315 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5316 for (size_t i = 0; i < request->num_output_buffers; i++) {
5317 const camera3_stream_buffer_t& output = request->output_buffers[i];
5318 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5319
5320 if (channel == NULL) {
5321 LOGW("invalid channel pointer for stream");
5322 continue;
5323 }
5324
5325 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5326 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5327 output.buffer, request->input_buffer, frameNumber);
5328 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005329 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5331 if (rc < 0) {
5332 LOGE("Fail to request on picture channel");
5333 pthread_mutex_unlock(&mMutex);
5334 return rc;
5335 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005337 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5338 assert(NULL != mDepthChannel);
5339 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005340
Emilian Peev7650c122017-01-19 08:24:33 -08005341 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5342 if (rc < 0) {
5343 LOGE("Fail to map on depth buffer");
5344 pthread_mutex_unlock(&mMutex);
5345 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005346 }
Emilian Peev7650c122017-01-19 08:24:33 -08005347 } else {
5348 LOGD("snapshot request with buffer %p, frame_number %d",
5349 output.buffer, frameNumber);
5350 if (!request->settings) {
5351 rc = channel->request(output.buffer, frameNumber,
5352 NULL, mPrevParameters, indexUsed);
5353 } else {
5354 rc = channel->request(output.buffer, frameNumber,
5355 NULL, mParameters, indexUsed);
5356 }
5357 if (rc < 0) {
5358 LOGE("Fail to request on picture channel");
5359 pthread_mutex_unlock(&mMutex);
5360 return rc;
5361 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005362
Emilian Peev7650c122017-01-19 08:24:33 -08005363 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5364 uint32_t j = 0;
5365 for (j = 0; j < streamsArray.num_streams; j++) {
5366 if (streamsArray.stream_request[j].streamID == streamId) {
5367 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5368 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5369 else
5370 streamsArray.stream_request[j].buf_index = indexUsed;
5371 break;
5372 }
5373 }
5374 if (j == streamsArray.num_streams) {
5375 LOGE("Did not find matching stream to update index");
5376 assert(0);
5377 }
5378
5379 pendingBufferIter->need_metadata = true;
5380 streams_need_metadata++;
5381 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005382 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005383 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5384 bool needMetadata = false;
5385 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5386 rc = yuvChannel->request(output.buffer, frameNumber,
5387 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5388 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005390 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 pthread_mutex_unlock(&mMutex);
5392 return rc;
5393 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005394
5395 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5396 uint32_t j = 0;
5397 for (j = 0; j < streamsArray.num_streams; j++) {
5398 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005399 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5400 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5401 else
5402 streamsArray.stream_request[j].buf_index = indexUsed;
5403 break;
5404 }
5405 }
5406 if (j == streamsArray.num_streams) {
5407 LOGE("Did not find matching stream to update index");
5408 assert(0);
5409 }
5410
5411 pendingBufferIter->need_metadata = needMetadata;
5412 if (needMetadata)
5413 streams_need_metadata += 1;
5414 LOGD("calling YUV channel request, need_metadata is %d",
5415 needMetadata);
5416 } else {
5417 LOGD("request with buffer %p, frame_number %d",
5418 output.buffer, frameNumber);
5419
5420 rc = channel->request(output.buffer, frameNumber, indexUsed);
5421
5422 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5423 uint32_t j = 0;
5424 for (j = 0; j < streamsArray.num_streams; j++) {
5425 if (streamsArray.stream_request[j].streamID == streamId) {
5426 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5427 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5428 else
5429 streamsArray.stream_request[j].buf_index = indexUsed;
5430 break;
5431 }
5432 }
5433 if (j == streamsArray.num_streams) {
5434 LOGE("Did not find matching stream to update index");
5435 assert(0);
5436 }
5437
5438 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5439 && mBatchSize) {
5440 mToBeQueuedVidBufs++;
5441 if (mToBeQueuedVidBufs == mBatchSize) {
5442 channel->queueBatchBuf();
5443 }
5444 }
5445 if (rc < 0) {
5446 LOGE("request failed");
5447 pthread_mutex_unlock(&mMutex);
5448 return rc;
5449 }
5450 }
5451 pendingBufferIter++;
5452 }
5453
5454 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5455 itr++) {
5456 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5457
5458 if (channel == NULL) {
5459 LOGE("invalid channel pointer for stream");
5460 assert(0);
5461 return BAD_VALUE;
5462 }
5463
5464 InternalRequest requestedStream;
5465 requestedStream = (*itr);
5466
5467
5468 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5469 LOGD("snapshot request internally input buffer %p, frame_number %d",
5470 request->input_buffer, frameNumber);
5471 if(request->input_buffer != NULL){
5472 rc = channel->request(NULL, frameNumber,
5473 pInputBuffer, &mReprocMeta, indexUsed, true,
5474 requestedStream.meteringOnly);
5475 if (rc < 0) {
5476 LOGE("Fail to request on picture channel");
5477 pthread_mutex_unlock(&mMutex);
5478 return rc;
5479 }
5480 } else {
5481 LOGD("snapshot request with frame_number %d", frameNumber);
5482 if (!request->settings) {
5483 rc = channel->request(NULL, frameNumber,
5484 NULL, mPrevParameters, indexUsed, true,
5485 requestedStream.meteringOnly);
5486 } else {
5487 rc = channel->request(NULL, frameNumber,
5488 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5489 }
5490 if (rc < 0) {
5491 LOGE("Fail to request on picture channel");
5492 pthread_mutex_unlock(&mMutex);
5493 return rc;
5494 }
5495
5496 if ((*itr).meteringOnly != 1) {
5497 requestedStream.need_metadata = 1;
5498 streams_need_metadata++;
5499 }
5500 }
5501
5502 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5503 uint32_t j = 0;
5504 for (j = 0; j < streamsArray.num_streams; j++) {
5505 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005506 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5507 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5508 else
5509 streamsArray.stream_request[j].buf_index = indexUsed;
5510 break;
5511 }
5512 }
5513 if (j == streamsArray.num_streams) {
5514 LOGE("Did not find matching stream to update index");
5515 assert(0);
5516 }
5517
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005518 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005519 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005520 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005521 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005522 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005523 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005524 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005525
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005526 //If 2 streams have need_metadata set to true, fail the request, unless
5527 //we copy/reference count the metadata buffer
5528 if (streams_need_metadata > 1) {
5529 LOGE("not supporting request in which two streams requires"
5530 " 2 HAL metadata for reprocessing");
5531 pthread_mutex_unlock(&mMutex);
5532 return -EINVAL;
5533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005534
Emilian Peev7650c122017-01-19 08:24:33 -08005535 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5537 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5538 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5539 pthread_mutex_unlock(&mMutex);
5540 return BAD_VALUE;
5541 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005542 if (request->input_buffer == NULL) {
5543 /* Set the parameters to backend:
5544 * - For every request in NORMAL MODE
5545 * - For every request in HFR mode during preview only case
5546 * - Once every batch in HFR mode during video recording
5547 */
5548 if (!mBatchSize ||
5549 (mBatchSize && !isVidBufRequested) ||
5550 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5551 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5552 mBatchSize, isVidBufRequested,
5553 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5556 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5557 uint32_t m = 0;
5558 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5559 if (streamsArray.stream_request[k].streamID ==
5560 mBatchedStreamsArray.stream_request[m].streamID)
5561 break;
5562 }
5563 if (m == mBatchedStreamsArray.num_streams) {
5564 mBatchedStreamsArray.stream_request\
5565 [mBatchedStreamsArray.num_streams].streamID =
5566 streamsArray.stream_request[k].streamID;
5567 mBatchedStreamsArray.stream_request\
5568 [mBatchedStreamsArray.num_streams].buf_index =
5569 streamsArray.stream_request[k].buf_index;
5570 mBatchedStreamsArray.num_streams =
5571 mBatchedStreamsArray.num_streams + 1;
5572 }
5573 }
5574 streamsArray = mBatchedStreamsArray;
5575 }
5576 /* Update stream id of all the requested buffers */
5577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5578 streamsArray)) {
5579 LOGE("Failed to set stream type mask in the parameters");
5580 return BAD_VALUE;
5581 }
5582
5583 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5584 mParameters);
5585 if (rc < 0) {
5586 LOGE("set_parms failed");
5587 }
5588 /* reset to zero coz, the batch is queued */
5589 mToBeQueuedVidBufs = 0;
5590 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5591 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5592 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005593 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5594 uint32_t m = 0;
5595 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5596 if (streamsArray.stream_request[k].streamID ==
5597 mBatchedStreamsArray.stream_request[m].streamID)
5598 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005599 }
5600 if (m == mBatchedStreamsArray.num_streams) {
5601 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5602 streamID = streamsArray.stream_request[k].streamID;
5603 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5604 buf_index = streamsArray.stream_request[k].buf_index;
5605 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5606 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005607 }
5608 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005609 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005611 }
5612
5613 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5614
5615 mState = STARTED;
5616 // Added a timed condition wait
5617 struct timespec ts;
5618 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005619 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005620 if (rc < 0) {
5621 isValidTimeout = 0;
5622 LOGE("Error reading the real time clock!!");
5623 }
5624 else {
5625 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005626 int64_t timeout = 5;
5627 {
5628 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5629 // If there is a pending HDR+ request, the following requests may be blocked until the
5630 // HDR+ request is done. So allow a longer timeout.
5631 if (mHdrPlusPendingRequests.size() > 0) {
5632 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5633 }
5634 }
5635 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005636 }
5637 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005638 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005639 (mState != ERROR) && (mState != DEINIT)) {
5640 if (!isValidTimeout) {
5641 LOGD("Blocking on conditional wait");
5642 pthread_cond_wait(&mRequestCond, &mMutex);
5643 }
5644 else {
5645 LOGD("Blocking on timed conditional wait");
5646 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5647 if (rc == ETIMEDOUT) {
5648 rc = -ENODEV;
5649 LOGE("Unblocked on timeout!!!!");
5650 break;
5651 }
5652 }
5653 LOGD("Unblocked");
5654 if (mWokenUpByDaemon) {
5655 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005656 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 break;
5658 }
5659 }
5660 pthread_mutex_unlock(&mMutex);
5661
5662 return rc;
5663}
5664
5665/*===========================================================================
5666 * FUNCTION : dump
5667 *
5668 * DESCRIPTION:
5669 *
5670 * PARAMETERS :
5671 *
5672 *
5673 * RETURN :
5674 *==========================================================================*/
5675void QCamera3HardwareInterface::dump(int fd)
5676{
5677 pthread_mutex_lock(&mMutex);
5678 dprintf(fd, "\n Camera HAL3 information Begin \n");
5679
5680 dprintf(fd, "\nNumber of pending requests: %zu \n",
5681 mPendingRequestsList.size());
5682 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5683 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5684 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5685 for(pendingRequestIterator i = mPendingRequestsList.begin();
5686 i != mPendingRequestsList.end(); i++) {
5687 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5688 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5689 i->input_buffer);
5690 }
5691 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5692 mPendingBuffersMap.get_num_overall_buffers());
5693 dprintf(fd, "-------+------------------\n");
5694 dprintf(fd, " Frame | Stream type mask \n");
5695 dprintf(fd, "-------+------------------\n");
5696 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5697 for(auto &j : req.mPendingBufferList) {
5698 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5699 dprintf(fd, " %5d | %11d \n",
5700 req.frame_number, channel->getStreamTypeMask());
5701 }
5702 }
5703 dprintf(fd, "-------+------------------\n");
5704
5705 dprintf(fd, "\nPending frame drop list: %zu\n",
5706 mPendingFrameDropList.size());
5707 dprintf(fd, "-------+-----------\n");
5708 dprintf(fd, " Frame | Stream ID \n");
5709 dprintf(fd, "-------+-----------\n");
5710 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5711 i != mPendingFrameDropList.end(); i++) {
5712 dprintf(fd, " %5d | %9d \n",
5713 i->frame_number, i->stream_ID);
5714 }
5715 dprintf(fd, "-------+-----------\n");
5716
5717 dprintf(fd, "\n Camera HAL3 information End \n");
5718
5719 /* use dumpsys media.camera as trigger to send update debug level event */
5720 mUpdateDebugLevel = true;
5721 pthread_mutex_unlock(&mMutex);
5722 return;
5723}
5724
5725/*===========================================================================
5726 * FUNCTION : flush
5727 *
5728 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5729 * conditionally restarts channels
5730 *
5731 * PARAMETERS :
5732 * @ restartChannels: re-start all channels
5733 *
5734 *
5735 * RETURN :
5736 * 0 on success
5737 * Error code on failure
5738 *==========================================================================*/
5739int QCamera3HardwareInterface::flush(bool restartChannels)
5740{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005741 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005742 int32_t rc = NO_ERROR;
5743
5744 LOGD("Unblocking Process Capture Request");
5745 pthread_mutex_lock(&mMutex);
5746 mFlush = true;
5747 pthread_mutex_unlock(&mMutex);
5748
5749 rc = stopAllChannels();
5750 // unlink of dualcam
5751 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005752 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5753 &m_pDualCamCmdPtr->bundle_info;
5754 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005755 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5756 pthread_mutex_lock(&gCamLock);
5757
5758 if (mIsMainCamera == 1) {
5759 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5760 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005761 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005762 // related session id should be session id of linked session
5763 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5764 } else {
5765 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5766 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005767 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005768 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5769 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005770 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005771 pthread_mutex_unlock(&gCamLock);
5772
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005773 rc = mCameraHandle->ops->set_dual_cam_cmd(
5774 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005775 if (rc < 0) {
5776 LOGE("Dualcam: Unlink failed, but still proceed to close");
5777 }
5778 }
5779
5780 if (rc < 0) {
5781 LOGE("stopAllChannels failed");
5782 return rc;
5783 }
5784 if (mChannelHandle) {
5785 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5786 mChannelHandle);
5787 }
5788
5789 // Reset bundle info
5790 rc = setBundleInfo();
5791 if (rc < 0) {
5792 LOGE("setBundleInfo failed %d", rc);
5793 return rc;
5794 }
5795
5796 // Mutex Lock
5797 pthread_mutex_lock(&mMutex);
5798
5799 // Unblock process_capture_request
5800 mPendingLiveRequest = 0;
5801 pthread_cond_signal(&mRequestCond);
5802
5803 rc = notifyErrorForPendingRequests();
5804 if (rc < 0) {
5805 LOGE("notifyErrorForPendingRequests failed");
5806 pthread_mutex_unlock(&mMutex);
5807 return rc;
5808 }
5809
5810 mFlush = false;
5811
5812 // Start the Streams/Channels
5813 if (restartChannels) {
5814 rc = startAllChannels();
5815 if (rc < 0) {
5816 LOGE("startAllChannels failed");
5817 pthread_mutex_unlock(&mMutex);
5818 return rc;
5819 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005820 if (mChannelHandle) {
5821 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5822 mChannelHandle);
5823 if (rc < 0) {
5824 LOGE("start_channel failed");
5825 pthread_mutex_unlock(&mMutex);
5826 return rc;
5827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005828 }
5829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005830 pthread_mutex_unlock(&mMutex);
5831
5832 return 0;
5833}
5834
5835/*===========================================================================
5836 * FUNCTION : flushPerf
5837 *
5838 * DESCRIPTION: This is the performance optimization version of flush that does
5839 * not use stream off, rather flushes the system
5840 *
5841 * PARAMETERS :
5842 *
5843 *
5844 * RETURN : 0 : success
5845 * -EINVAL: input is malformed (device is not valid)
5846 * -ENODEV: if the device has encountered a serious error
5847 *==========================================================================*/
5848int QCamera3HardwareInterface::flushPerf()
5849{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005850 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005851 int32_t rc = 0;
5852 struct timespec timeout;
5853 bool timed_wait = false;
5854
5855 pthread_mutex_lock(&mMutex);
5856 mFlushPerf = true;
5857 mPendingBuffersMap.numPendingBufsAtFlush =
5858 mPendingBuffersMap.get_num_overall_buffers();
5859 LOGD("Calling flush. Wait for %d buffers to return",
5860 mPendingBuffersMap.numPendingBufsAtFlush);
5861
5862 /* send the flush event to the backend */
5863 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5864 if (rc < 0) {
5865 LOGE("Error in flush: IOCTL failure");
5866 mFlushPerf = false;
5867 pthread_mutex_unlock(&mMutex);
5868 return -ENODEV;
5869 }
5870
5871 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5872 LOGD("No pending buffers in HAL, return flush");
5873 mFlushPerf = false;
5874 pthread_mutex_unlock(&mMutex);
5875 return rc;
5876 }
5877
5878 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005879 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005880 if (rc < 0) {
5881 LOGE("Error reading the real time clock, cannot use timed wait");
5882 } else {
5883 timeout.tv_sec += FLUSH_TIMEOUT;
5884 timed_wait = true;
5885 }
5886
5887 //Block on conditional variable
5888 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5889 LOGD("Waiting on mBuffersCond");
5890 if (!timed_wait) {
5891 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5892 if (rc != 0) {
5893 LOGE("pthread_cond_wait failed due to rc = %s",
5894 strerror(rc));
5895 break;
5896 }
5897 } else {
5898 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5899 if (rc != 0) {
5900 LOGE("pthread_cond_timedwait failed due to rc = %s",
5901 strerror(rc));
5902 break;
5903 }
5904 }
5905 }
5906 if (rc != 0) {
5907 mFlushPerf = false;
5908 pthread_mutex_unlock(&mMutex);
5909 return -ENODEV;
5910 }
5911
5912 LOGD("Received buffers, now safe to return them");
5913
5914 //make sure the channels handle flush
5915 //currently only required for the picture channel to release snapshot resources
5916 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5917 it != mStreamInfo.end(); it++) {
5918 QCamera3Channel *channel = (*it)->channel;
5919 if (channel) {
5920 rc = channel->flush();
5921 if (rc) {
5922 LOGE("Flushing the channels failed with error %d", rc);
5923 // even though the channel flush failed we need to continue and
5924 // return the buffers we have to the framework, however the return
5925 // value will be an error
5926 rc = -ENODEV;
5927 }
5928 }
5929 }
5930
5931 /* notify the frameworks and send errored results */
5932 rc = notifyErrorForPendingRequests();
5933 if (rc < 0) {
5934 LOGE("notifyErrorForPendingRequests failed");
5935 pthread_mutex_unlock(&mMutex);
5936 return rc;
5937 }
5938
5939 //unblock process_capture_request
5940 mPendingLiveRequest = 0;
5941 unblockRequestIfNecessary();
5942
5943 mFlushPerf = false;
5944 pthread_mutex_unlock(&mMutex);
5945 LOGD ("Flush Operation complete. rc = %d", rc);
5946 return rc;
5947}
5948
5949/*===========================================================================
5950 * FUNCTION : handleCameraDeviceError
5951 *
5952 * DESCRIPTION: This function calls internal flush and notifies the error to
5953 * framework and updates the state variable.
5954 *
5955 * PARAMETERS : None
5956 *
5957 * RETURN : NO_ERROR on Success
5958 * Error code on failure
5959 *==========================================================================*/
5960int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5961{
5962 int32_t rc = NO_ERROR;
5963
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005964 {
5965 Mutex::Autolock lock(mFlushLock);
5966 pthread_mutex_lock(&mMutex);
5967 if (mState != ERROR) {
5968 //if mState != ERROR, nothing to be done
5969 pthread_mutex_unlock(&mMutex);
5970 return NO_ERROR;
5971 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005972 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005973
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005974 rc = flush(false /* restart channels */);
5975 if (NO_ERROR != rc) {
5976 LOGE("internal flush to handle mState = ERROR failed");
5977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005978
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005979 pthread_mutex_lock(&mMutex);
5980 mState = DEINIT;
5981 pthread_mutex_unlock(&mMutex);
5982 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005983
5984 camera3_notify_msg_t notify_msg;
5985 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5986 notify_msg.type = CAMERA3_MSG_ERROR;
5987 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5988 notify_msg.message.error.error_stream = NULL;
5989 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005990 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005991
5992 return rc;
5993}
5994
5995/*===========================================================================
5996 * FUNCTION : captureResultCb
5997 *
5998 * DESCRIPTION: Callback handler for all capture result
5999 * (streams, as well as metadata)
6000 *
6001 * PARAMETERS :
6002 * @metadata : metadata information
6003 * @buffer : actual gralloc buffer to be returned to frameworks.
6004 * NULL if metadata.
6005 *
6006 * RETURN : NONE
6007 *==========================================================================*/
6008void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6009 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6010{
6011 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006012 pthread_mutex_lock(&mMutex);
6013 uint8_t batchSize = mBatchSize;
6014 pthread_mutex_unlock(&mMutex);
6015 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006016 handleBatchMetadata(metadata_buf,
6017 true /* free_and_bufdone_meta_buf */);
6018 } else { /* mBatchSize = 0 */
6019 hdrPlusPerfLock(metadata_buf);
6020 pthread_mutex_lock(&mMutex);
6021 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006022 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006023 false /* first frame of batch metadata */ ,
6024 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006025 pthread_mutex_unlock(&mMutex);
6026 }
6027 } else if (isInputBuffer) {
6028 pthread_mutex_lock(&mMutex);
6029 handleInputBufferWithLock(frame_number);
6030 pthread_mutex_unlock(&mMutex);
6031 } else {
6032 pthread_mutex_lock(&mMutex);
6033 handleBufferWithLock(buffer, frame_number);
6034 pthread_mutex_unlock(&mMutex);
6035 }
6036 return;
6037}
6038
6039/*===========================================================================
6040 * FUNCTION : getReprocessibleOutputStreamId
6041 *
6042 * DESCRIPTION: Get source output stream id for the input reprocess stream
6043 * based on size and format, which would be the largest
6044 * output stream if an input stream exists.
6045 *
6046 * PARAMETERS :
6047 * @id : return the stream id if found
6048 *
6049 * RETURN : int32_t type of status
6050 * NO_ERROR -- success
6051 * none-zero failure code
6052 *==========================================================================*/
6053int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6054{
6055 /* check if any output or bidirectional stream with the same size and format
6056 and return that stream */
6057 if ((mInputStreamInfo.dim.width > 0) &&
6058 (mInputStreamInfo.dim.height > 0)) {
6059 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6060 it != mStreamInfo.end(); it++) {
6061
6062 camera3_stream_t *stream = (*it)->stream;
6063 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6064 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6065 (stream->format == mInputStreamInfo.format)) {
6066 // Usage flag for an input stream and the source output stream
6067 // may be different.
6068 LOGD("Found reprocessible output stream! %p", *it);
6069 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6070 stream->usage, mInputStreamInfo.usage);
6071
6072 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6073 if (channel != NULL && channel->mStreams[0]) {
6074 id = channel->mStreams[0]->getMyServerID();
6075 return NO_ERROR;
6076 }
6077 }
6078 }
6079 } else {
6080 LOGD("No input stream, so no reprocessible output stream");
6081 }
6082 return NAME_NOT_FOUND;
6083}
6084
6085/*===========================================================================
6086 * FUNCTION : lookupFwkName
6087 *
6088 * DESCRIPTION: In case the enum is not same in fwk and backend
6089 * make sure the parameter is correctly propogated
6090 *
6091 * PARAMETERS :
6092 * @arr : map between the two enums
6093 * @len : len of the map
6094 * @hal_name : name of the hal_parm to map
6095 *
6096 * RETURN : int type of status
6097 * fwk_name -- success
6098 * none-zero failure code
6099 *==========================================================================*/
6100template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6101 size_t len, halType hal_name)
6102{
6103
6104 for (size_t i = 0; i < len; i++) {
6105 if (arr[i].hal_name == hal_name) {
6106 return arr[i].fwk_name;
6107 }
6108 }
6109
6110 /* Not able to find matching framework type is not necessarily
6111 * an error case. This happens when mm-camera supports more attributes
6112 * than the frameworks do */
6113 LOGH("Cannot find matching framework type");
6114 return NAME_NOT_FOUND;
6115}
6116
6117/*===========================================================================
6118 * FUNCTION : lookupHalName
6119 *
6120 * DESCRIPTION: In case the enum is not same in fwk and backend
6121 * make sure the parameter is correctly propogated
6122 *
6123 * PARAMETERS :
6124 * @arr : map between the two enums
6125 * @len : len of the map
6126 * @fwk_name : name of the hal_parm to map
6127 *
6128 * RETURN : int32_t type of status
6129 * hal_name -- success
6130 * none-zero failure code
6131 *==========================================================================*/
6132template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6133 size_t len, fwkType fwk_name)
6134{
6135 for (size_t i = 0; i < len; i++) {
6136 if (arr[i].fwk_name == fwk_name) {
6137 return arr[i].hal_name;
6138 }
6139 }
6140
6141 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6142 return NAME_NOT_FOUND;
6143}
6144
6145/*===========================================================================
6146 * FUNCTION : lookupProp
6147 *
6148 * DESCRIPTION: lookup a value by its name
6149 *
6150 * PARAMETERS :
6151 * @arr : map between the two enums
6152 * @len : size of the map
6153 * @name : name to be looked up
6154 *
6155 * RETURN : Value if found
6156 * CAM_CDS_MODE_MAX if not found
6157 *==========================================================================*/
6158template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6159 size_t len, const char *name)
6160{
6161 if (name) {
6162 for (size_t i = 0; i < len; i++) {
6163 if (!strcmp(arr[i].desc, name)) {
6164 return arr[i].val;
6165 }
6166 }
6167 }
6168 return CAM_CDS_MODE_MAX;
6169}
6170
6171/*===========================================================================
6172 *
6173 * DESCRIPTION:
6174 *
6175 * PARAMETERS :
6176 * @metadata : metadata information from callback
6177 * @timestamp: metadata buffer timestamp
6178 * @request_id: request id
6179 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006180 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006181 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6182 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006183 * @pprocDone: whether internal offline postprocsesing is done
6184 *
6185 * RETURN : camera_metadata_t*
6186 * metadata in a format specified by fwk
6187 *==========================================================================*/
6188camera_metadata_t*
6189QCamera3HardwareInterface::translateFromHalMetadata(
6190 metadata_buffer_t *metadata,
6191 nsecs_t timestamp,
6192 int32_t request_id,
6193 const CameraMetadata& jpegMetadata,
6194 uint8_t pipeline_depth,
6195 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006196 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006197 /* DevCamDebug metadata translateFromHalMetadata argument */
6198 uint8_t DevCamDebug_meta_enable,
6199 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006200 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006201 uint8_t fwk_cacMode,
6202 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006203{
6204 CameraMetadata camMetadata;
6205 camera_metadata_t *resultMetadata;
6206
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006207 if (mBatchSize && !firstMetadataInBatch) {
6208 /* In batch mode, use cached metadata from the first metadata
6209 in the batch */
6210 camMetadata.clear();
6211 camMetadata = mCachedMetadata;
6212 }
6213
Thierry Strudel3d639192016-09-09 11:52:26 -07006214 if (jpegMetadata.entryCount())
6215 camMetadata.append(jpegMetadata);
6216
6217 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6218 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6219 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6220 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006221 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006222 if (mBatchSize == 0) {
6223 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6224 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6225 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006226
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006227 if (mBatchSize && !firstMetadataInBatch) {
6228 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6229 resultMetadata = camMetadata.release();
6230 return resultMetadata;
6231 }
6232
Samuel Ha68ba5172016-12-15 18:41:12 -08006233 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6234 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6235 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6236 // DevCamDebug metadata translateFromHalMetadata AF
6237 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6238 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6239 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6240 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6241 }
6242 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6243 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6244 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6245 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6246 }
6247 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6248 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6249 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6250 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6251 }
6252 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6253 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6254 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6255 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6256 }
6257 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6258 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6259 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6260 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6261 }
6262 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6263 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6264 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6265 *DevCamDebug_af_monitor_pdaf_target_pos;
6266 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6267 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6268 }
6269 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6270 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6271 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6272 *DevCamDebug_af_monitor_pdaf_confidence;
6273 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6274 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6275 }
6276 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6277 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6278 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6279 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6280 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6281 }
6282 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6283 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6284 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6285 *DevCamDebug_af_monitor_tof_target_pos;
6286 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6287 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6288 }
6289 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6290 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6291 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6292 *DevCamDebug_af_monitor_tof_confidence;
6293 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6294 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6295 }
6296 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6297 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6298 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6299 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6300 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6301 }
6302 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6303 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6304 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6305 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6306 &fwk_DevCamDebug_af_monitor_type_select, 1);
6307 }
6308 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6309 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6310 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6311 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6312 &fwk_DevCamDebug_af_monitor_refocus, 1);
6313 }
6314 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6315 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6316 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6317 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6318 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6319 }
6320 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6321 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6322 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6323 *DevCamDebug_af_search_pdaf_target_pos;
6324 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6325 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6326 }
6327 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6328 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6329 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6330 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6331 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6332 }
6333 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6334 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6335 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6336 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6337 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6338 }
6339 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6340 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6341 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6342 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6343 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6344 }
6345 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6346 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6347 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6348 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6349 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6350 }
6351 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6352 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6353 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6354 *DevCamDebug_af_search_tof_target_pos;
6355 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6356 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6357 }
6358 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6359 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6360 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6361 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6362 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6363 }
6364 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6365 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6366 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6367 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6368 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6369 }
6370 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6371 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6372 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6373 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6374 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6375 }
6376 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6377 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6378 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6379 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6380 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6381 }
6382 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6383 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6384 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6385 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6386 &fwk_DevCamDebug_af_search_type_select, 1);
6387 }
6388 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6389 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6390 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6391 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6392 &fwk_DevCamDebug_af_search_next_pos, 1);
6393 }
6394 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6395 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6396 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6397 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6398 &fwk_DevCamDebug_af_search_target_pos, 1);
6399 }
6400 // DevCamDebug metadata translateFromHalMetadata AEC
6401 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6402 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6403 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6404 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6405 }
6406 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6407 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6408 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6409 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6410 }
6411 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6412 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6413 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6414 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6415 }
6416 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6417 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6418 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6419 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6420 }
6421 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6422 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6423 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6424 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6425 }
6426 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6427 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6428 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6429 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6430 }
6431 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6432 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6433 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6434 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6435 }
6436 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6437 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6438 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6439 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6440 }
Samuel Ha34229982017-02-17 13:51:11 -08006441 // DevCamDebug metadata translateFromHalMetadata zzHDR
6442 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6443 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6444 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6445 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6446 }
6447 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6448 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6449 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6450 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6451 }
6452 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6453 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6454 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6455 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6456 }
6457 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6458 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6459 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6460 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6461 }
6462 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6463 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6464 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6465 *DevCamDebug_aec_hdr_sensitivity_ratio;
6466 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6467 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6468 }
6469 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6470 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6471 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6472 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6473 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6474 }
6475 // DevCamDebug metadata translateFromHalMetadata ADRC
6476 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6477 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6478 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6479 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6480 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6481 }
6482 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6483 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6484 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6485 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6486 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6487 }
6488 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6489 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6490 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6491 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6492 }
6493 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6494 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6495 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6496 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6497 }
6498 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6499 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6500 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6501 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6502 }
6503 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6504 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6505 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6506 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6507 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006508 // DevCamDebug metadata translateFromHalMetadata AWB
6509 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6510 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6511 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6512 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6513 }
6514 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6515 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6516 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6517 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6518 }
6519 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6520 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6521 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6522 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6523 }
6524 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6525 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6526 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6527 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6528 }
6529 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6530 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6531 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6532 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6533 }
6534 }
6535 // atrace_end(ATRACE_TAG_ALWAYS);
6536
Thierry Strudel3d639192016-09-09 11:52:26 -07006537 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6538 int64_t fwk_frame_number = *frame_number;
6539 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6540 }
6541
6542 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6543 int32_t fps_range[2];
6544 fps_range[0] = (int32_t)float_range->min_fps;
6545 fps_range[1] = (int32_t)float_range->max_fps;
6546 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6547 fps_range, 2);
6548 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6549 fps_range[0], fps_range[1]);
6550 }
6551
6552 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6553 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6554 }
6555
6556 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6557 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6558 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6559 *sceneMode);
6560 if (NAME_NOT_FOUND != val) {
6561 uint8_t fwkSceneMode = (uint8_t)val;
6562 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6563 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6564 fwkSceneMode);
6565 }
6566 }
6567
6568 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6569 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6570 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6571 }
6572
6573 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6574 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6575 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6576 }
6577
6578 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6579 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6580 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6581 }
6582
6583 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6584 CAM_INTF_META_EDGE_MODE, metadata) {
6585 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6586 }
6587
6588 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6589 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6590 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6591 }
6592
6593 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6594 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6595 }
6596
6597 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6598 if (0 <= *flashState) {
6599 uint8_t fwk_flashState = (uint8_t) *flashState;
6600 if (!gCamCapability[mCameraId]->flash_available) {
6601 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6602 }
6603 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6604 }
6605 }
6606
6607 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6608 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6609 if (NAME_NOT_FOUND != val) {
6610 uint8_t fwk_flashMode = (uint8_t)val;
6611 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6612 }
6613 }
6614
6615 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6616 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6617 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6618 }
6619
6620 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6621 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6622 }
6623
6624 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6625 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6626 }
6627
6628 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6629 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6630 }
6631
6632 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6633 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6634 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6635 }
6636
6637 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6638 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6639 LOGD("fwk_videoStab = %d", fwk_videoStab);
6640 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6641 } else {
6642 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6643 // and so hardcoding the Video Stab result to OFF mode.
6644 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6645 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006646 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006647 }
6648
6649 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6650 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6651 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6652 }
6653
6654 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6655 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6656 }
6657
Thierry Strudel3d639192016-09-09 11:52:26 -07006658 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6659 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006660 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006661
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006662 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6663 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006664
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006665 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006666 blackLevelAppliedPattern->cam_black_level[0],
6667 blackLevelAppliedPattern->cam_black_level[1],
6668 blackLevelAppliedPattern->cam_black_level[2],
6669 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006670 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6671 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006672
6673#ifndef USE_HAL_3_3
6674 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006675 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6676 // depth space.
6677 fwk_blackLevelInd[0] /= 4.0;
6678 fwk_blackLevelInd[1] /= 4.0;
6679 fwk_blackLevelInd[2] /= 4.0;
6680 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006681 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6682 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006683#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006684 }
6685
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006686#ifndef USE_HAL_3_3
6687 // Fixed whitelevel is used by ISP/Sensor
6688 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6689 &gCamCapability[mCameraId]->white_level, 1);
6690#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006691
6692 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6693 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6694 int32_t scalerCropRegion[4];
6695 scalerCropRegion[0] = hScalerCropRegion->left;
6696 scalerCropRegion[1] = hScalerCropRegion->top;
6697 scalerCropRegion[2] = hScalerCropRegion->width;
6698 scalerCropRegion[3] = hScalerCropRegion->height;
6699
6700 // Adjust crop region from sensor output coordinate system to active
6701 // array coordinate system.
6702 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6703 scalerCropRegion[2], scalerCropRegion[3]);
6704
6705 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6706 }
6707
6708 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6709 LOGD("sensorExpTime = %lld", *sensorExpTime);
6710 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6711 }
6712
6713 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6714 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6715 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6716 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6717 }
6718
6719 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6720 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6721 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6722 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6723 sensorRollingShutterSkew, 1);
6724 }
6725
6726 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6727 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6728 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6729
6730 //calculate the noise profile based on sensitivity
6731 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6732 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6733 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6734 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6735 noise_profile[i] = noise_profile_S;
6736 noise_profile[i+1] = noise_profile_O;
6737 }
6738 LOGD("noise model entry (S, O) is (%f, %f)",
6739 noise_profile_S, noise_profile_O);
6740 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6741 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6742 }
6743
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006744#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006745 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006746 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006747 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006748 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006749 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6750 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6751 }
6752 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006753#endif
6754
Thierry Strudel3d639192016-09-09 11:52:26 -07006755 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6756 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6757 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6758 }
6759
6760 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6761 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6762 *faceDetectMode);
6763 if (NAME_NOT_FOUND != val) {
6764 uint8_t fwk_faceDetectMode = (uint8_t)val;
6765 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6766
6767 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6768 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6769 CAM_INTF_META_FACE_DETECTION, metadata) {
6770 uint8_t numFaces = MIN(
6771 faceDetectionInfo->num_faces_detected, MAX_ROI);
6772 int32_t faceIds[MAX_ROI];
6773 uint8_t faceScores[MAX_ROI];
6774 int32_t faceRectangles[MAX_ROI * 4];
6775 int32_t faceLandmarks[MAX_ROI * 6];
6776 size_t j = 0, k = 0;
6777
6778 for (size_t i = 0; i < numFaces; i++) {
6779 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6780 // Adjust crop region from sensor output coordinate system to active
6781 // array coordinate system.
6782 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6783 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6784 rect.width, rect.height);
6785
6786 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6787 faceRectangles+j, -1);
6788
6789 j+= 4;
6790 }
6791 if (numFaces <= 0) {
6792 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6793 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6794 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6795 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6796 }
6797
6798 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6799 numFaces);
6800 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6801 faceRectangles, numFaces * 4U);
6802 if (fwk_faceDetectMode ==
6803 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6804 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6805 CAM_INTF_META_FACE_LANDMARK, metadata) {
6806
6807 for (size_t i = 0; i < numFaces; i++) {
6808 // Map the co-ordinate sensor output coordinate system to active
6809 // array coordinate system.
6810 mCropRegionMapper.toActiveArray(
6811 landmarks->face_landmarks[i].left_eye_center.x,
6812 landmarks->face_landmarks[i].left_eye_center.y);
6813 mCropRegionMapper.toActiveArray(
6814 landmarks->face_landmarks[i].right_eye_center.x,
6815 landmarks->face_landmarks[i].right_eye_center.y);
6816 mCropRegionMapper.toActiveArray(
6817 landmarks->face_landmarks[i].mouth_center.x,
6818 landmarks->face_landmarks[i].mouth_center.y);
6819
6820 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006821 k+= TOTAL_LANDMARK_INDICES;
6822 }
6823 } else {
6824 for (size_t i = 0; i < numFaces; i++) {
6825 setInvalidLandmarks(faceLandmarks+k);
6826 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006827 }
6828 }
6829
6830 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6831 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6832 faceLandmarks, numFaces * 6U);
6833 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006834 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6835 CAM_INTF_META_FACE_BLINK, metadata) {
6836 uint8_t detected[MAX_ROI];
6837 uint8_t degree[MAX_ROI * 2];
6838 for (size_t i = 0; i < numFaces; i++) {
6839 detected[i] = blinks->blink[i].blink_detected;
6840 degree[2 * i] = blinks->blink[i].left_blink;
6841 degree[2 * i + 1] = blinks->blink[i].right_blink;
6842 }
6843 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6844 detected, numFaces);
6845 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6846 degree, numFaces * 2);
6847 }
6848 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6849 CAM_INTF_META_FACE_SMILE, metadata) {
6850 uint8_t degree[MAX_ROI];
6851 uint8_t confidence[MAX_ROI];
6852 for (size_t i = 0; i < numFaces; i++) {
6853 degree[i] = smiles->smile[i].smile_degree;
6854 confidence[i] = smiles->smile[i].smile_confidence;
6855 }
6856 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6857 degree, numFaces);
6858 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6859 confidence, numFaces);
6860 }
6861 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6862 CAM_INTF_META_FACE_GAZE, metadata) {
6863 int8_t angle[MAX_ROI];
6864 int32_t direction[MAX_ROI * 3];
6865 int8_t degree[MAX_ROI * 2];
6866 for (size_t i = 0; i < numFaces; i++) {
6867 angle[i] = gazes->gaze[i].gaze_angle;
6868 direction[3 * i] = gazes->gaze[i].updown_dir;
6869 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6870 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6871 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6872 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6873 }
6874 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6875 (uint8_t *)angle, numFaces);
6876 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6877 direction, numFaces * 3);
6878 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6879 (uint8_t *)degree, numFaces * 2);
6880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006881 }
6882 }
6883 }
6884 }
6885
6886 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6887 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006888 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006889
Thierry Strudel54dc9782017-02-15 12:12:10 -08006890 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006891 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6892 // process histogram statistics info
Thierry Strudel54dc9782017-02-15 12:12:10 -08006893 uint32_t hist_buf[4][CAM_HISTOGRAM_STATS_SIZE];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006894 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006895 cam_histogram_data_t rHistData, grHistData, gbHistData, bHistData;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006896 memset(&rHistData, 0, sizeof(rHistData));
Thierry Strudel54dc9782017-02-15 12:12:10 -08006897 memset(&grHistData, 0, sizeof(grHistData));
6898 memset(&gbHistData, 0, sizeof(gbHistData));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006899 memset(&bHistData, 0, sizeof(bHistData));
6900
6901 switch (stats_data->type) {
6902 case CAM_HISTOGRAM_TYPE_BAYER:
6903 switch (stats_data->bayer_stats.data_type) {
6904 case CAM_STATS_CHANNEL_GR:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006905 rHistData = grHistData = gbHistData = bHistData =
6906 stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006907 break;
6908 case CAM_STATS_CHANNEL_GB:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006909 rHistData = grHistData = gbHistData = bHistData =
6910 stats_data->bayer_stats.gb_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006911 break;
6912 case CAM_STATS_CHANNEL_B:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006913 rHistData = grHistData = gbHistData = bHistData =
6914 stats_data->bayer_stats.b_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006915 break;
6916 case CAM_STATS_CHANNEL_ALL:
6917 rHistData = stats_data->bayer_stats.r_stats;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006918 gbHistData = stats_data->bayer_stats.gb_stats;
6919 grHistData = stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006920 bHistData = stats_data->bayer_stats.b_stats;
6921 break;
6922 case CAM_STATS_CHANNEL_Y:
6923 case CAM_STATS_CHANNEL_R:
6924 default:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006925 rHistData = grHistData = gbHistData = bHistData =
6926 stats_data->bayer_stats.r_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006927 break;
6928 }
6929 break;
6930 case CAM_HISTOGRAM_TYPE_YUV:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006931 rHistData = grHistData = gbHistData = bHistData =
6932 stats_data->yuv_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006933 break;
6934 }
6935
6936 memcpy(hist_buf, rHistData.hist_buf, hist_size);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006937 memcpy(hist_buf[1], gbHistData.hist_buf, hist_size);
6938 memcpy(hist_buf[2], grHistData.hist_buf, hist_size);
6939 memcpy(hist_buf[3], bHistData.hist_buf, hist_size);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006940
Thierry Strudel54dc9782017-02-15 12:12:10 -08006941 camMetadata.update(QCAMERA3_HISTOGRAM_STATS, (int32_t*)hist_buf, hist_size*4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006942 }
6943 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006944 }
6945
6946 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6947 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6948 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6949 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6950 }
6951
6952 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6953 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6954 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6955 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6956 }
6957
6958 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6959 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6960 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6961 CAM_MAX_SHADING_MAP_HEIGHT);
6962 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6963 CAM_MAX_SHADING_MAP_WIDTH);
6964 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6965 lensShadingMap->lens_shading, 4U * map_width * map_height);
6966 }
6967
6968 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6969 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6970 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6971 }
6972
6973 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6974 //Populate CAM_INTF_META_TONEMAP_CURVES
6975 /* ch0 = G, ch 1 = B, ch 2 = R*/
6976 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6977 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6978 tonemap->tonemap_points_cnt,
6979 CAM_MAX_TONEMAP_CURVE_SIZE);
6980 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6981 }
6982
6983 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6984 &tonemap->curves[0].tonemap_points[0][0],
6985 tonemap->tonemap_points_cnt * 2);
6986
6987 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6988 &tonemap->curves[1].tonemap_points[0][0],
6989 tonemap->tonemap_points_cnt * 2);
6990
6991 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6992 &tonemap->curves[2].tonemap_points[0][0],
6993 tonemap->tonemap_points_cnt * 2);
6994 }
6995
6996 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6997 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6998 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6999 CC_GAIN_MAX);
7000 }
7001
7002 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7003 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7004 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7005 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7006 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7007 }
7008
7009 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7010 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7011 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7012 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7013 toneCurve->tonemap_points_cnt,
7014 CAM_MAX_TONEMAP_CURVE_SIZE);
7015 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7016 }
7017 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7018 (float*)toneCurve->curve.tonemap_points,
7019 toneCurve->tonemap_points_cnt * 2);
7020 }
7021
7022 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7023 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7024 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7025 predColorCorrectionGains->gains, 4);
7026 }
7027
7028 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7029 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7030 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7031 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7032 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7033 }
7034
7035 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7036 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7037 }
7038
7039 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7040 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7041 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7042 }
7043
7044 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7045 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7046 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7047 }
7048
7049 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7050 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7051 *effectMode);
7052 if (NAME_NOT_FOUND != val) {
7053 uint8_t fwk_effectMode = (uint8_t)val;
7054 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7055 }
7056 }
7057
7058 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7059 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7060 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7061 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7062 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7063 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7064 }
7065 int32_t fwk_testPatternData[4];
7066 fwk_testPatternData[0] = testPatternData->r;
7067 fwk_testPatternData[3] = testPatternData->b;
7068 switch (gCamCapability[mCameraId]->color_arrangement) {
7069 case CAM_FILTER_ARRANGEMENT_RGGB:
7070 case CAM_FILTER_ARRANGEMENT_GRBG:
7071 fwk_testPatternData[1] = testPatternData->gr;
7072 fwk_testPatternData[2] = testPatternData->gb;
7073 break;
7074 case CAM_FILTER_ARRANGEMENT_GBRG:
7075 case CAM_FILTER_ARRANGEMENT_BGGR:
7076 fwk_testPatternData[2] = testPatternData->gr;
7077 fwk_testPatternData[1] = testPatternData->gb;
7078 break;
7079 default:
7080 LOGE("color arrangement %d is not supported",
7081 gCamCapability[mCameraId]->color_arrangement);
7082 break;
7083 }
7084 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7085 }
7086
7087 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7088 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7089 }
7090
7091 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7092 String8 str((const char *)gps_methods);
7093 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7094 }
7095
7096 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7097 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7098 }
7099
7100 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7101 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7102 }
7103
7104 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7105 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7106 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7107 }
7108
7109 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7110 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7111 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7112 }
7113
7114 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7115 int32_t fwk_thumb_size[2];
7116 fwk_thumb_size[0] = thumb_size->width;
7117 fwk_thumb_size[1] = thumb_size->height;
7118 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7119 }
7120
7121 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7122 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7123 privateData,
7124 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7125 }
7126
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007127 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007128 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007129 meteringMode, 1);
7130 }
7131
Thierry Strudel54dc9782017-02-15 12:12:10 -08007132 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7133 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7134 LOGD("hdr_scene_data: %d %f\n",
7135 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7136 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7137 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7138 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7139 &isHdr, 1);
7140 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7141 &isHdrConfidence, 1);
7142 }
7143
7144
7145
Thierry Strudel3d639192016-09-09 11:52:26 -07007146 if (metadata->is_tuning_params_valid) {
7147 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7148 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7149 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7150
7151
7152 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7153 sizeof(uint32_t));
7154 data += sizeof(uint32_t);
7155
7156 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7157 sizeof(uint32_t));
7158 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7159 data += sizeof(uint32_t);
7160
7161 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7162 sizeof(uint32_t));
7163 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7164 data += sizeof(uint32_t);
7165
7166 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7167 sizeof(uint32_t));
7168 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7169 data += sizeof(uint32_t);
7170
7171 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7172 sizeof(uint32_t));
7173 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7174 data += sizeof(uint32_t);
7175
7176 metadata->tuning_params.tuning_mod3_data_size = 0;
7177 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7178 sizeof(uint32_t));
7179 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7180 data += sizeof(uint32_t);
7181
7182 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7183 TUNING_SENSOR_DATA_MAX);
7184 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7185 count);
7186 data += count;
7187
7188 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7189 TUNING_VFE_DATA_MAX);
7190 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7191 count);
7192 data += count;
7193
7194 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7195 TUNING_CPP_DATA_MAX);
7196 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7197 count);
7198 data += count;
7199
7200 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7201 TUNING_CAC_DATA_MAX);
7202 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7203 count);
7204 data += count;
7205
7206 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7207 (int32_t *)(void *)tuning_meta_data_blob,
7208 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7209 }
7210
7211 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7212 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7213 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7214 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7215 NEUTRAL_COL_POINTS);
7216 }
7217
7218 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7219 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7220 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7221 }
7222
7223 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7224 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7225 // Adjust crop region from sensor output coordinate system to active
7226 // array coordinate system.
7227 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7228 hAeRegions->rect.width, hAeRegions->rect.height);
7229
7230 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7231 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7232 REGIONS_TUPLE_COUNT);
7233 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7234 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7235 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7236 hAeRegions->rect.height);
7237 }
7238
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007239 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7240 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7241 if (NAME_NOT_FOUND != val) {
7242 uint8_t fwkAfMode = (uint8_t)val;
7243 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7244 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7245 } else {
7246 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7247 val);
7248 }
7249 }
7250
Thierry Strudel3d639192016-09-09 11:52:26 -07007251 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7252 uint8_t fwk_afState = (uint8_t) *afState;
7253 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007254 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007255 }
7256
7257 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7258 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7259 }
7260
7261 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7262 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7263 }
7264
7265 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7266 uint8_t fwk_lensState = *lensState;
7267 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7268 }
7269
7270 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7271 /*af regions*/
7272 int32_t afRegions[REGIONS_TUPLE_COUNT];
7273 // Adjust crop region from sensor output coordinate system to active
7274 // array coordinate system.
7275 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7276 hAfRegions->rect.width, hAfRegions->rect.height);
7277
7278 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7279 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7280 REGIONS_TUPLE_COUNT);
7281 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7282 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7283 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7284 hAfRegions->rect.height);
7285 }
7286
7287 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007288 uint32_t ab_mode = *hal_ab_mode;
7289 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7290 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7291 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7292 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007293 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007294 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007295 if (NAME_NOT_FOUND != val) {
7296 uint8_t fwk_ab_mode = (uint8_t)val;
7297 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7298 }
7299 }
7300
7301 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7302 int val = lookupFwkName(SCENE_MODES_MAP,
7303 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7304 if (NAME_NOT_FOUND != val) {
7305 uint8_t fwkBestshotMode = (uint8_t)val;
7306 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7307 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7308 } else {
7309 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7310 }
7311 }
7312
7313 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7314 uint8_t fwk_mode = (uint8_t) *mode;
7315 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7316 }
7317
7318 /* Constant metadata values to be update*/
7319 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7320 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7321
7322 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7323 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7324
7325 int32_t hotPixelMap[2];
7326 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7327
7328 // CDS
7329 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7330 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7331 }
7332
Thierry Strudel04e026f2016-10-10 11:27:36 -07007333 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7334 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007335 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007336 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7337 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7338 } else {
7339 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7340 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007341
7342 if(fwk_hdr != curr_hdr_state) {
7343 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7344 if(fwk_hdr)
7345 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7346 else
7347 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7348 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007349 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7350 }
7351
Thierry Strudel54dc9782017-02-15 12:12:10 -08007352 //binning correction
7353 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7354 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7355 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7356 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7357 }
7358
Thierry Strudel04e026f2016-10-10 11:27:36 -07007359 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007360 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007361 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7362 int8_t is_ir_on = 0;
7363
7364 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7365 if(is_ir_on != curr_ir_state) {
7366 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7367 if(is_ir_on)
7368 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7369 else
7370 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7371 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007372 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007373 }
7374
Thierry Strudel269c81a2016-10-12 12:13:59 -07007375 // AEC SPEED
7376 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7377 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7378 }
7379
7380 // AWB SPEED
7381 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7382 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7383 }
7384
Thierry Strudel3d639192016-09-09 11:52:26 -07007385 // TNR
7386 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7387 uint8_t tnr_enable = tnr->denoise_enable;
7388 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007389 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7390 int8_t is_tnr_on = 0;
7391
7392 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7393 if(is_tnr_on != curr_tnr_state) {
7394 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7395 if(is_tnr_on)
7396 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7397 else
7398 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7399 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007400
7401 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7402 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7403 }
7404
7405 // Reprocess crop data
7406 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7407 uint8_t cnt = crop_data->num_of_streams;
7408 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7409 // mm-qcamera-daemon only posts crop_data for streams
7410 // not linked to pproc. So no valid crop metadata is not
7411 // necessarily an error case.
7412 LOGD("No valid crop metadata entries");
7413 } else {
7414 uint32_t reproc_stream_id;
7415 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7416 LOGD("No reprocessible stream found, ignore crop data");
7417 } else {
7418 int rc = NO_ERROR;
7419 Vector<int32_t> roi_map;
7420 int32_t *crop = new int32_t[cnt*4];
7421 if (NULL == crop) {
7422 rc = NO_MEMORY;
7423 }
7424 if (NO_ERROR == rc) {
7425 int32_t streams_found = 0;
7426 for (size_t i = 0; i < cnt; i++) {
7427 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7428 if (pprocDone) {
7429 // HAL already does internal reprocessing,
7430 // either via reprocessing before JPEG encoding,
7431 // or offline postprocessing for pproc bypass case.
7432 crop[0] = 0;
7433 crop[1] = 0;
7434 crop[2] = mInputStreamInfo.dim.width;
7435 crop[3] = mInputStreamInfo.dim.height;
7436 } else {
7437 crop[0] = crop_data->crop_info[i].crop.left;
7438 crop[1] = crop_data->crop_info[i].crop.top;
7439 crop[2] = crop_data->crop_info[i].crop.width;
7440 crop[3] = crop_data->crop_info[i].crop.height;
7441 }
7442 roi_map.add(crop_data->crop_info[i].roi_map.left);
7443 roi_map.add(crop_data->crop_info[i].roi_map.top);
7444 roi_map.add(crop_data->crop_info[i].roi_map.width);
7445 roi_map.add(crop_data->crop_info[i].roi_map.height);
7446 streams_found++;
7447 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7448 crop[0], crop[1], crop[2], crop[3]);
7449 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7450 crop_data->crop_info[i].roi_map.left,
7451 crop_data->crop_info[i].roi_map.top,
7452 crop_data->crop_info[i].roi_map.width,
7453 crop_data->crop_info[i].roi_map.height);
7454 break;
7455
7456 }
7457 }
7458 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7459 &streams_found, 1);
7460 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7461 crop, (size_t)(streams_found * 4));
7462 if (roi_map.array()) {
7463 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7464 roi_map.array(), roi_map.size());
7465 }
7466 }
7467 if (crop) {
7468 delete [] crop;
7469 }
7470 }
7471 }
7472 }
7473
7474 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7475 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7476 // so hardcoding the CAC result to OFF mode.
7477 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7478 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7479 } else {
7480 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7481 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7482 *cacMode);
7483 if (NAME_NOT_FOUND != val) {
7484 uint8_t resultCacMode = (uint8_t)val;
7485 // check whether CAC result from CB is equal to Framework set CAC mode
7486 // If not equal then set the CAC mode came in corresponding request
7487 if (fwk_cacMode != resultCacMode) {
7488 resultCacMode = fwk_cacMode;
7489 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007490 //Check if CAC is disabled by property
7491 if (m_cacModeDisabled) {
7492 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7493 }
7494
Thierry Strudel3d639192016-09-09 11:52:26 -07007495 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7496 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7497 } else {
7498 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7499 }
7500 }
7501 }
7502
7503 // Post blob of cam_cds_data through vendor tag.
7504 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7505 uint8_t cnt = cdsInfo->num_of_streams;
7506 cam_cds_data_t cdsDataOverride;
7507 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7508 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7509 cdsDataOverride.num_of_streams = 1;
7510 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7511 uint32_t reproc_stream_id;
7512 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7513 LOGD("No reprocessible stream found, ignore cds data");
7514 } else {
7515 for (size_t i = 0; i < cnt; i++) {
7516 if (cdsInfo->cds_info[i].stream_id ==
7517 reproc_stream_id) {
7518 cdsDataOverride.cds_info[0].cds_enable =
7519 cdsInfo->cds_info[i].cds_enable;
7520 break;
7521 }
7522 }
7523 }
7524 } else {
7525 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7526 }
7527 camMetadata.update(QCAMERA3_CDS_INFO,
7528 (uint8_t *)&cdsDataOverride,
7529 sizeof(cam_cds_data_t));
7530 }
7531
7532 // Ldaf calibration data
7533 if (!mLdafCalibExist) {
7534 IF_META_AVAILABLE(uint32_t, ldafCalib,
7535 CAM_INTF_META_LDAF_EXIF, metadata) {
7536 mLdafCalibExist = true;
7537 mLdafCalib[0] = ldafCalib[0];
7538 mLdafCalib[1] = ldafCalib[1];
7539 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7540 ldafCalib[0], ldafCalib[1]);
7541 }
7542 }
7543
Thierry Strudel54dc9782017-02-15 12:12:10 -08007544 // EXIF debug data through vendor tag
7545 /*
7546 * Mobicat Mask can assume 3 values:
7547 * 1 refers to Mobicat data,
7548 * 2 refers to Stats Debug and Exif Debug Data
7549 * 3 refers to Mobicat and Stats Debug Data
7550 * We want to make sure that we are sending Exif debug data
7551 * only when Mobicat Mask is 2.
7552 */
7553 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7554 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7555 (uint8_t *)(void *)mExifParams.debug_params,
7556 sizeof(mm_jpeg_debug_exif_params_t));
7557 }
7558
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007559 // Reprocess and DDM debug data through vendor tag
7560 cam_reprocess_info_t repro_info;
7561 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007562 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7563 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007564 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007565 }
7566 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7567 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007568 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007569 }
7570 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7571 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007572 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007573 }
7574 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7575 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007576 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007577 }
7578 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7579 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007580 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007581 }
7582 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007583 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007584 }
7585 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7586 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007587 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007588 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007589 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7590 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7591 }
7592 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7593 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7594 }
7595 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7596 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007597
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007598 // INSTANT AEC MODE
7599 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7600 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7601 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7602 }
7603
Shuzhen Wange763e802016-03-31 10:24:29 -07007604 // AF scene change
7605 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7606 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7607 }
7608
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007609 /* In batch mode, cache the first metadata in the batch */
7610 if (mBatchSize && firstMetadataInBatch) {
7611 mCachedMetadata.clear();
7612 mCachedMetadata = camMetadata;
7613 }
7614
Thierry Strudel3d639192016-09-09 11:52:26 -07007615 resultMetadata = camMetadata.release();
7616 return resultMetadata;
7617}
7618
7619/*===========================================================================
7620 * FUNCTION : saveExifParams
7621 *
7622 * DESCRIPTION:
7623 *
7624 * PARAMETERS :
7625 * @metadata : metadata information from callback
7626 *
7627 * RETURN : none
7628 *
7629 *==========================================================================*/
7630void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7631{
7632 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7633 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7634 if (mExifParams.debug_params) {
7635 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7636 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7637 }
7638 }
7639 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7640 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7641 if (mExifParams.debug_params) {
7642 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7643 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7644 }
7645 }
7646 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7647 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7648 if (mExifParams.debug_params) {
7649 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7650 mExifParams.debug_params->af_debug_params_valid = TRUE;
7651 }
7652 }
7653 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7654 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7655 if (mExifParams.debug_params) {
7656 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7657 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7658 }
7659 }
7660 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7661 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7662 if (mExifParams.debug_params) {
7663 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7664 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7665 }
7666 }
7667 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7668 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7669 if (mExifParams.debug_params) {
7670 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7671 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7672 }
7673 }
7674 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7675 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7676 if (mExifParams.debug_params) {
7677 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7678 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7679 }
7680 }
7681 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7682 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7683 if (mExifParams.debug_params) {
7684 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7685 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7686 }
7687 }
7688}
7689
7690/*===========================================================================
7691 * FUNCTION : get3AExifParams
7692 *
7693 * DESCRIPTION:
7694 *
7695 * PARAMETERS : none
7696 *
7697 *
7698 * RETURN : mm_jpeg_exif_params_t
7699 *
7700 *==========================================================================*/
7701mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7702{
7703 return mExifParams;
7704}
7705
7706/*===========================================================================
7707 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7708 *
7709 * DESCRIPTION:
7710 *
7711 * PARAMETERS :
7712 * @metadata : metadata information from callback
7713 *
7714 * RETURN : camera_metadata_t*
7715 * metadata in a format specified by fwk
7716 *==========================================================================*/
7717camera_metadata_t*
7718QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7719 (metadata_buffer_t *metadata)
7720{
7721 CameraMetadata camMetadata;
7722 camera_metadata_t *resultMetadata;
7723
7724
7725 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7726 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7727 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7728 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7729 }
7730
7731 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7732 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7733 &aecTrigger->trigger, 1);
7734 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7735 &aecTrigger->trigger_id, 1);
7736 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7737 aecTrigger->trigger);
7738 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7739 aecTrigger->trigger_id);
7740 }
7741
7742 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7743 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7744 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7745 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7746 }
7747
Thierry Strudel3d639192016-09-09 11:52:26 -07007748 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7749 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7750 &af_trigger->trigger, 1);
7751 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7752 af_trigger->trigger);
7753 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7754 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7755 af_trigger->trigger_id);
7756 }
7757
7758 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7759 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7760 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7761 if (NAME_NOT_FOUND != val) {
7762 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7763 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7764 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7765 } else {
7766 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7767 }
7768 }
7769
7770 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7771 uint32_t aeMode = CAM_AE_MODE_MAX;
7772 int32_t flashMode = CAM_FLASH_MODE_MAX;
7773 int32_t redeye = -1;
7774 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7775 aeMode = *pAeMode;
7776 }
7777 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7778 flashMode = *pFlashMode;
7779 }
7780 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7781 redeye = *pRedeye;
7782 }
7783
7784 if (1 == redeye) {
7785 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7786 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7787 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7788 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7789 flashMode);
7790 if (NAME_NOT_FOUND != val) {
7791 fwk_aeMode = (uint8_t)val;
7792 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7793 } else {
7794 LOGE("Unsupported flash mode %d", flashMode);
7795 }
7796 } else if (aeMode == CAM_AE_MODE_ON) {
7797 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7798 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7799 } else if (aeMode == CAM_AE_MODE_OFF) {
7800 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7801 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7802 } else {
7803 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7804 "flashMode:%d, aeMode:%u!!!",
7805 redeye, flashMode, aeMode);
7806 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007807 if (mInstantAEC) {
7808 // Increment frame Idx count untill a bound reached for instant AEC.
7809 mInstantAecFrameIdxCount++;
7810 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7811 CAM_INTF_META_AEC_INFO, metadata) {
7812 LOGH("ae_params->settled = %d",ae_params->settled);
7813 // If AEC settled, or if number of frames reached bound value,
7814 // should reset instant AEC.
7815 if (ae_params->settled ||
7816 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7817 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7818 mInstantAEC = false;
7819 mResetInstantAEC = true;
7820 mInstantAecFrameIdxCount = 0;
7821 }
7822 }
7823 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007824 resultMetadata = camMetadata.release();
7825 return resultMetadata;
7826}
7827
7828/*===========================================================================
7829 * FUNCTION : dumpMetadataToFile
7830 *
7831 * DESCRIPTION: Dumps tuning metadata to file system
7832 *
7833 * PARAMETERS :
7834 * @meta : tuning metadata
7835 * @dumpFrameCount : current dump frame count
7836 * @enabled : Enable mask
7837 *
7838 *==========================================================================*/
7839void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7840 uint32_t &dumpFrameCount,
7841 bool enabled,
7842 const char *type,
7843 uint32_t frameNumber)
7844{
7845 //Some sanity checks
7846 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7847 LOGE("Tuning sensor data size bigger than expected %d: %d",
7848 meta.tuning_sensor_data_size,
7849 TUNING_SENSOR_DATA_MAX);
7850 return;
7851 }
7852
7853 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7854 LOGE("Tuning VFE data size bigger than expected %d: %d",
7855 meta.tuning_vfe_data_size,
7856 TUNING_VFE_DATA_MAX);
7857 return;
7858 }
7859
7860 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7861 LOGE("Tuning CPP data size bigger than expected %d: %d",
7862 meta.tuning_cpp_data_size,
7863 TUNING_CPP_DATA_MAX);
7864 return;
7865 }
7866
7867 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7868 LOGE("Tuning CAC data size bigger than expected %d: %d",
7869 meta.tuning_cac_data_size,
7870 TUNING_CAC_DATA_MAX);
7871 return;
7872 }
7873 //
7874
7875 if(enabled){
7876 char timeBuf[FILENAME_MAX];
7877 char buf[FILENAME_MAX];
7878 memset(buf, 0, sizeof(buf));
7879 memset(timeBuf, 0, sizeof(timeBuf));
7880 time_t current_time;
7881 struct tm * timeinfo;
7882 time (&current_time);
7883 timeinfo = localtime (&current_time);
7884 if (timeinfo != NULL) {
7885 strftime (timeBuf, sizeof(timeBuf),
7886 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7887 }
7888 String8 filePath(timeBuf);
7889 snprintf(buf,
7890 sizeof(buf),
7891 "%dm_%s_%d.bin",
7892 dumpFrameCount,
7893 type,
7894 frameNumber);
7895 filePath.append(buf);
7896 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7897 if (file_fd >= 0) {
7898 ssize_t written_len = 0;
7899 meta.tuning_data_version = TUNING_DATA_VERSION;
7900 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7901 written_len += write(file_fd, data, sizeof(uint32_t));
7902 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7903 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7904 written_len += write(file_fd, data, sizeof(uint32_t));
7905 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7906 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7907 written_len += write(file_fd, data, sizeof(uint32_t));
7908 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7909 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7910 written_len += write(file_fd, data, sizeof(uint32_t));
7911 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7912 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7913 written_len += write(file_fd, data, sizeof(uint32_t));
7914 meta.tuning_mod3_data_size = 0;
7915 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7916 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7917 written_len += write(file_fd, data, sizeof(uint32_t));
7918 size_t total_size = meta.tuning_sensor_data_size;
7919 data = (void *)((uint8_t *)&meta.data);
7920 written_len += write(file_fd, data, total_size);
7921 total_size = meta.tuning_vfe_data_size;
7922 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7923 written_len += write(file_fd, data, total_size);
7924 total_size = meta.tuning_cpp_data_size;
7925 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7926 written_len += write(file_fd, data, total_size);
7927 total_size = meta.tuning_cac_data_size;
7928 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7929 written_len += write(file_fd, data, total_size);
7930 close(file_fd);
7931 }else {
7932 LOGE("fail to open file for metadata dumping");
7933 }
7934 }
7935}
7936
7937/*===========================================================================
7938 * FUNCTION : cleanAndSortStreamInfo
7939 *
7940 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7941 * and sort them such that raw stream is at the end of the list
7942 * This is a workaround for camera daemon constraint.
7943 *
7944 * PARAMETERS : None
7945 *
7946 *==========================================================================*/
7947void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7948{
7949 List<stream_info_t *> newStreamInfo;
7950
7951 /*clean up invalid streams*/
7952 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7953 it != mStreamInfo.end();) {
7954 if(((*it)->status) == INVALID){
7955 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7956 delete channel;
7957 free(*it);
7958 it = mStreamInfo.erase(it);
7959 } else {
7960 it++;
7961 }
7962 }
7963
7964 // Move preview/video/callback/snapshot streams into newList
7965 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7966 it != mStreamInfo.end();) {
7967 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7968 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7969 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7970 newStreamInfo.push_back(*it);
7971 it = mStreamInfo.erase(it);
7972 } else
7973 it++;
7974 }
7975 // Move raw streams into newList
7976 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7977 it != mStreamInfo.end();) {
7978 newStreamInfo.push_back(*it);
7979 it = mStreamInfo.erase(it);
7980 }
7981
7982 mStreamInfo = newStreamInfo;
7983}
7984
7985/*===========================================================================
7986 * FUNCTION : extractJpegMetadata
7987 *
7988 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7989 * JPEG metadata is cached in HAL, and return as part of capture
7990 * result when metadata is returned from camera daemon.
7991 *
7992 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7993 * @request: capture request
7994 *
7995 *==========================================================================*/
7996void QCamera3HardwareInterface::extractJpegMetadata(
7997 CameraMetadata& jpegMetadata,
7998 const camera3_capture_request_t *request)
7999{
8000 CameraMetadata frame_settings;
8001 frame_settings = request->settings;
8002
8003 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8004 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8005 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8006 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8007
8008 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8009 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8010 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8011 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8012
8013 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8014 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8015 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8016 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8017
8018 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8019 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8020 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8021 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8022
8023 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8024 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8025 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8026 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8027
8028 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8029 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8030 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8031 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8032
8033 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8034 int32_t thumbnail_size[2];
8035 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8036 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8037 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8038 int32_t orientation =
8039 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008040 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008041 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8042 int32_t temp;
8043 temp = thumbnail_size[0];
8044 thumbnail_size[0] = thumbnail_size[1];
8045 thumbnail_size[1] = temp;
8046 }
8047 }
8048 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8049 thumbnail_size,
8050 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8051 }
8052
8053}
8054
8055/*===========================================================================
8056 * FUNCTION : convertToRegions
8057 *
8058 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8059 *
8060 * PARAMETERS :
8061 * @rect : cam_rect_t struct to convert
8062 * @region : int32_t destination array
8063 * @weight : if we are converting from cam_area_t, weight is valid
8064 * else weight = -1
8065 *
8066 *==========================================================================*/
8067void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8068 int32_t *region, int weight)
8069{
8070 region[0] = rect.left;
8071 region[1] = rect.top;
8072 region[2] = rect.left + rect.width;
8073 region[3] = rect.top + rect.height;
8074 if (weight > -1) {
8075 region[4] = weight;
8076 }
8077}
8078
8079/*===========================================================================
8080 * FUNCTION : convertFromRegions
8081 *
8082 * DESCRIPTION: helper method to convert from array to cam_rect_t
8083 *
8084 * PARAMETERS :
8085 * @rect : cam_rect_t struct to convert
8086 * @region : int32_t destination array
8087 * @weight : if we are converting from cam_area_t, weight is valid
8088 * else weight = -1
8089 *
8090 *==========================================================================*/
8091void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008092 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008093{
Thierry Strudel3d639192016-09-09 11:52:26 -07008094 int32_t x_min = frame_settings.find(tag).data.i32[0];
8095 int32_t y_min = frame_settings.find(tag).data.i32[1];
8096 int32_t x_max = frame_settings.find(tag).data.i32[2];
8097 int32_t y_max = frame_settings.find(tag).data.i32[3];
8098 roi.weight = frame_settings.find(tag).data.i32[4];
8099 roi.rect.left = x_min;
8100 roi.rect.top = y_min;
8101 roi.rect.width = x_max - x_min;
8102 roi.rect.height = y_max - y_min;
8103}
8104
8105/*===========================================================================
8106 * FUNCTION : resetIfNeededROI
8107 *
8108 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8109 * crop region
8110 *
8111 * PARAMETERS :
8112 * @roi : cam_area_t struct to resize
8113 * @scalerCropRegion : cam_crop_region_t region to compare against
8114 *
8115 *
8116 *==========================================================================*/
8117bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8118 const cam_crop_region_t* scalerCropRegion)
8119{
8120 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8121 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8122 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8123 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8124
8125 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8126 * without having this check the calculations below to validate if the roi
8127 * is inside scalar crop region will fail resulting in the roi not being
8128 * reset causing algorithm to continue to use stale roi window
8129 */
8130 if (roi->weight == 0) {
8131 return true;
8132 }
8133
8134 if ((roi_x_max < scalerCropRegion->left) ||
8135 // right edge of roi window is left of scalar crop's left edge
8136 (roi_y_max < scalerCropRegion->top) ||
8137 // bottom edge of roi window is above scalar crop's top edge
8138 (roi->rect.left > crop_x_max) ||
8139 // left edge of roi window is beyond(right) of scalar crop's right edge
8140 (roi->rect.top > crop_y_max)){
8141 // top edge of roi windo is above scalar crop's top edge
8142 return false;
8143 }
8144 if (roi->rect.left < scalerCropRegion->left) {
8145 roi->rect.left = scalerCropRegion->left;
8146 }
8147 if (roi->rect.top < scalerCropRegion->top) {
8148 roi->rect.top = scalerCropRegion->top;
8149 }
8150 if (roi_x_max > crop_x_max) {
8151 roi_x_max = crop_x_max;
8152 }
8153 if (roi_y_max > crop_y_max) {
8154 roi_y_max = crop_y_max;
8155 }
8156 roi->rect.width = roi_x_max - roi->rect.left;
8157 roi->rect.height = roi_y_max - roi->rect.top;
8158 return true;
8159}
8160
8161/*===========================================================================
8162 * FUNCTION : convertLandmarks
8163 *
8164 * DESCRIPTION: helper method to extract the landmarks from face detection info
8165 *
8166 * PARAMETERS :
8167 * @landmark_data : input landmark data to be converted
8168 * @landmarks : int32_t destination array
8169 *
8170 *
8171 *==========================================================================*/
8172void QCamera3HardwareInterface::convertLandmarks(
8173 cam_face_landmarks_info_t landmark_data,
8174 int32_t *landmarks)
8175{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008176 if (landmark_data.is_left_eye_valid) {
8177 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8178 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8179 } else {
8180 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8181 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8182 }
8183
8184 if (landmark_data.is_right_eye_valid) {
8185 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8186 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8187 } else {
8188 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8189 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8190 }
8191
8192 if (landmark_data.is_mouth_valid) {
8193 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8194 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8195 } else {
8196 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8197 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8198 }
8199}
8200
8201/*===========================================================================
8202 * FUNCTION : setInvalidLandmarks
8203 *
8204 * DESCRIPTION: helper method to set invalid landmarks
8205 *
8206 * PARAMETERS :
8207 * @landmarks : int32_t destination array
8208 *
8209 *
8210 *==========================================================================*/
8211void QCamera3HardwareInterface::setInvalidLandmarks(
8212 int32_t *landmarks)
8213{
8214 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8215 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8216 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8217 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8218 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8219 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008220}
8221
8222#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008223
8224/*===========================================================================
8225 * FUNCTION : getCapabilities
8226 *
8227 * DESCRIPTION: query camera capability from back-end
8228 *
8229 * PARAMETERS :
8230 * @ops : mm-interface ops structure
8231 * @cam_handle : camera handle for which we need capability
8232 *
8233 * RETURN : ptr type of capability structure
8234 * capability for success
8235 * NULL for failure
8236 *==========================================================================*/
8237cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8238 uint32_t cam_handle)
8239{
8240 int rc = NO_ERROR;
8241 QCamera3HeapMemory *capabilityHeap = NULL;
8242 cam_capability_t *cap_ptr = NULL;
8243
8244 if (ops == NULL) {
8245 LOGE("Invalid arguments");
8246 return NULL;
8247 }
8248
8249 capabilityHeap = new QCamera3HeapMemory(1);
8250 if (capabilityHeap == NULL) {
8251 LOGE("creation of capabilityHeap failed");
8252 return NULL;
8253 }
8254
8255 /* Allocate memory for capability buffer */
8256 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8257 if(rc != OK) {
8258 LOGE("No memory for cappability");
8259 goto allocate_failed;
8260 }
8261
8262 /* Map memory for capability buffer */
8263 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8264
8265 rc = ops->map_buf(cam_handle,
8266 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8267 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8268 if(rc < 0) {
8269 LOGE("failed to map capability buffer");
8270 rc = FAILED_TRANSACTION;
8271 goto map_failed;
8272 }
8273
8274 /* Query Capability */
8275 rc = ops->query_capability(cam_handle);
8276 if(rc < 0) {
8277 LOGE("failed to query capability");
8278 rc = FAILED_TRANSACTION;
8279 goto query_failed;
8280 }
8281
8282 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8283 if (cap_ptr == NULL) {
8284 LOGE("out of memory");
8285 rc = NO_MEMORY;
8286 goto query_failed;
8287 }
8288
8289 memset(cap_ptr, 0, sizeof(cam_capability_t));
8290 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8291
8292 int index;
8293 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8294 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8295 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8296 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8297 }
8298
8299query_failed:
8300 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8301map_failed:
8302 capabilityHeap->deallocate();
8303allocate_failed:
8304 delete capabilityHeap;
8305
8306 if (rc != NO_ERROR) {
8307 return NULL;
8308 } else {
8309 return cap_ptr;
8310 }
8311}
8312
Thierry Strudel3d639192016-09-09 11:52:26 -07008313/*===========================================================================
8314 * FUNCTION : initCapabilities
8315 *
8316 * DESCRIPTION: initialize camera capabilities in static data struct
8317 *
8318 * PARAMETERS :
8319 * @cameraId : camera Id
8320 *
8321 * RETURN : int32_t type of status
8322 * NO_ERROR -- success
8323 * none-zero failure code
8324 *==========================================================================*/
8325int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8326{
8327 int rc = 0;
8328 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008329 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008330
8331 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8332 if (rc) {
8333 LOGE("camera_open failed. rc = %d", rc);
8334 goto open_failed;
8335 }
8336 if (!cameraHandle) {
8337 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8338 goto open_failed;
8339 }
8340
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008341 handle = get_main_camera_handle(cameraHandle->camera_handle);
8342 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8343 if (gCamCapability[cameraId] == NULL) {
8344 rc = FAILED_TRANSACTION;
8345 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008346 }
8347
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008348 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008349 if (is_dual_camera_by_idx(cameraId)) {
8350 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8351 gCamCapability[cameraId]->aux_cam_cap =
8352 getCapabilities(cameraHandle->ops, handle);
8353 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8354 rc = FAILED_TRANSACTION;
8355 free(gCamCapability[cameraId]);
8356 goto failed_op;
8357 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008358
8359 // Copy the main camera capability to main_cam_cap struct
8360 gCamCapability[cameraId]->main_cam_cap =
8361 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8362 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8363 LOGE("out of memory");
8364 rc = NO_MEMORY;
8365 goto failed_op;
8366 }
8367 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8368 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008369 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008370failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008371 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8372 cameraHandle = NULL;
8373open_failed:
8374 return rc;
8375}
8376
8377/*==========================================================================
8378 * FUNCTION : get3Aversion
8379 *
8380 * DESCRIPTION: get the Q3A S/W version
8381 *
8382 * PARAMETERS :
8383 * @sw_version: Reference of Q3A structure which will hold version info upon
8384 * return
8385 *
8386 * RETURN : None
8387 *
8388 *==========================================================================*/
8389void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8390{
8391 if(gCamCapability[mCameraId])
8392 sw_version = gCamCapability[mCameraId]->q3a_version;
8393 else
8394 LOGE("Capability structure NULL!");
8395}
8396
8397
8398/*===========================================================================
8399 * FUNCTION : initParameters
8400 *
8401 * DESCRIPTION: initialize camera parameters
8402 *
8403 * PARAMETERS :
8404 *
8405 * RETURN : int32_t type of status
8406 * NO_ERROR -- success
8407 * none-zero failure code
8408 *==========================================================================*/
8409int QCamera3HardwareInterface::initParameters()
8410{
8411 int rc = 0;
8412
8413 //Allocate Set Param Buffer
8414 mParamHeap = new QCamera3HeapMemory(1);
8415 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8416 if(rc != OK) {
8417 rc = NO_MEMORY;
8418 LOGE("Failed to allocate SETPARM Heap memory");
8419 delete mParamHeap;
8420 mParamHeap = NULL;
8421 return rc;
8422 }
8423
8424 //Map memory for parameters buffer
8425 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8426 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8427 mParamHeap->getFd(0),
8428 sizeof(metadata_buffer_t),
8429 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8430 if(rc < 0) {
8431 LOGE("failed to map SETPARM buffer");
8432 rc = FAILED_TRANSACTION;
8433 mParamHeap->deallocate();
8434 delete mParamHeap;
8435 mParamHeap = NULL;
8436 return rc;
8437 }
8438
8439 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8440
8441 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8442 return rc;
8443}
8444
8445/*===========================================================================
8446 * FUNCTION : deinitParameters
8447 *
8448 * DESCRIPTION: de-initialize camera parameters
8449 *
8450 * PARAMETERS :
8451 *
8452 * RETURN : NONE
8453 *==========================================================================*/
8454void QCamera3HardwareInterface::deinitParameters()
8455{
8456 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8457 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8458
8459 mParamHeap->deallocate();
8460 delete mParamHeap;
8461 mParamHeap = NULL;
8462
8463 mParameters = NULL;
8464
8465 free(mPrevParameters);
8466 mPrevParameters = NULL;
8467}
8468
8469/*===========================================================================
8470 * FUNCTION : calcMaxJpegSize
8471 *
8472 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8473 *
8474 * PARAMETERS :
8475 *
8476 * RETURN : max_jpeg_size
8477 *==========================================================================*/
8478size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8479{
8480 size_t max_jpeg_size = 0;
8481 size_t temp_width, temp_height;
8482 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8483 MAX_SIZES_CNT);
8484 for (size_t i = 0; i < count; i++) {
8485 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8486 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8487 if (temp_width * temp_height > max_jpeg_size ) {
8488 max_jpeg_size = temp_width * temp_height;
8489 }
8490 }
8491 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8492 return max_jpeg_size;
8493}
8494
8495/*===========================================================================
8496 * FUNCTION : getMaxRawSize
8497 *
8498 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8499 *
8500 * PARAMETERS :
8501 *
8502 * RETURN : Largest supported Raw Dimension
8503 *==========================================================================*/
8504cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8505{
8506 int max_width = 0;
8507 cam_dimension_t maxRawSize;
8508
8509 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8510 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8511 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8512 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8513 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8514 }
8515 }
8516 return maxRawSize;
8517}
8518
8519
8520/*===========================================================================
8521 * FUNCTION : calcMaxJpegDim
8522 *
8523 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8524 *
8525 * PARAMETERS :
8526 *
8527 * RETURN : max_jpeg_dim
8528 *==========================================================================*/
8529cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8530{
8531 cam_dimension_t max_jpeg_dim;
8532 cam_dimension_t curr_jpeg_dim;
8533 max_jpeg_dim.width = 0;
8534 max_jpeg_dim.height = 0;
8535 curr_jpeg_dim.width = 0;
8536 curr_jpeg_dim.height = 0;
8537 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8538 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8539 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8540 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8541 max_jpeg_dim.width * max_jpeg_dim.height ) {
8542 max_jpeg_dim.width = curr_jpeg_dim.width;
8543 max_jpeg_dim.height = curr_jpeg_dim.height;
8544 }
8545 }
8546 return max_jpeg_dim;
8547}
8548
8549/*===========================================================================
8550 * FUNCTION : addStreamConfig
8551 *
8552 * DESCRIPTION: adds the stream configuration to the array
8553 *
8554 * PARAMETERS :
8555 * @available_stream_configs : pointer to stream configuration array
8556 * @scalar_format : scalar format
8557 * @dim : configuration dimension
8558 * @config_type : input or output configuration type
8559 *
8560 * RETURN : NONE
8561 *==========================================================================*/
8562void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8563 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8564{
8565 available_stream_configs.add(scalar_format);
8566 available_stream_configs.add(dim.width);
8567 available_stream_configs.add(dim.height);
8568 available_stream_configs.add(config_type);
8569}
8570
8571/*===========================================================================
8572 * FUNCTION : suppportBurstCapture
8573 *
8574 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8575 *
8576 * PARAMETERS :
8577 * @cameraId : camera Id
8578 *
8579 * RETURN : true if camera supports BURST_CAPTURE
8580 * false otherwise
8581 *==========================================================================*/
8582bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8583{
8584 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8585 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8586 const int32_t highResWidth = 3264;
8587 const int32_t highResHeight = 2448;
8588
8589 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8590 // Maximum resolution images cannot be captured at >= 10fps
8591 // -> not supporting BURST_CAPTURE
8592 return false;
8593 }
8594
8595 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8596 // Maximum resolution images can be captured at >= 20fps
8597 // --> supporting BURST_CAPTURE
8598 return true;
8599 }
8600
8601 // Find the smallest highRes resolution, or largest resolution if there is none
8602 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8603 MAX_SIZES_CNT);
8604 size_t highRes = 0;
8605 while ((highRes + 1 < totalCnt) &&
8606 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8607 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8608 highResWidth * highResHeight)) {
8609 highRes++;
8610 }
8611 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8612 return true;
8613 } else {
8614 return false;
8615 }
8616}
8617
8618/*===========================================================================
8619 * FUNCTION : initStaticMetadata
8620 *
8621 * DESCRIPTION: initialize the static metadata
8622 *
8623 * PARAMETERS :
8624 * @cameraId : camera Id
8625 *
8626 * RETURN : int32_t type of status
8627 * 0 -- success
8628 * non-zero failure code
8629 *==========================================================================*/
8630int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8631{
8632 int rc = 0;
8633 CameraMetadata staticInfo;
8634 size_t count = 0;
8635 bool limitedDevice = false;
8636 char prop[PROPERTY_VALUE_MAX];
8637 bool supportBurst = false;
8638
8639 supportBurst = supportBurstCapture(cameraId);
8640
8641 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8642 * guaranteed or if min fps of max resolution is less than 20 fps, its
8643 * advertised as limited device*/
8644 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8645 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8646 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8647 !supportBurst;
8648
8649 uint8_t supportedHwLvl = limitedDevice ?
8650 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008651#ifndef USE_HAL_3_3
8652 // LEVEL_3 - This device will support level 3.
8653 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8654#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008655 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008656#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008657
8658 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8659 &supportedHwLvl, 1);
8660
8661 bool facingBack = false;
8662 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8663 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8664 facingBack = true;
8665 }
8666 /*HAL 3 only*/
8667 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8668 &gCamCapability[cameraId]->min_focus_distance, 1);
8669
8670 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8671 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8672
8673 /*should be using focal lengths but sensor doesn't provide that info now*/
8674 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8675 &gCamCapability[cameraId]->focal_length,
8676 1);
8677
8678 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8679 gCamCapability[cameraId]->apertures,
8680 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8681
8682 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8683 gCamCapability[cameraId]->filter_densities,
8684 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8685
8686
8687 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8688 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8689 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8690
8691 int32_t lens_shading_map_size[] = {
8692 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8693 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8694 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8695 lens_shading_map_size,
8696 sizeof(lens_shading_map_size)/sizeof(int32_t));
8697
8698 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8699 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8700
8701 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8702 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8703
8704 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8705 &gCamCapability[cameraId]->max_frame_duration, 1);
8706
8707 camera_metadata_rational baseGainFactor = {
8708 gCamCapability[cameraId]->base_gain_factor.numerator,
8709 gCamCapability[cameraId]->base_gain_factor.denominator};
8710 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8711 &baseGainFactor, 1);
8712
8713 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8714 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8715
8716 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8717 gCamCapability[cameraId]->pixel_array_size.height};
8718 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8719 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8720
8721 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8722 gCamCapability[cameraId]->active_array_size.top,
8723 gCamCapability[cameraId]->active_array_size.width,
8724 gCamCapability[cameraId]->active_array_size.height};
8725 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8726 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8727
8728 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8729 &gCamCapability[cameraId]->white_level, 1);
8730
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008731 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8732 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8733 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008734 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008735 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008736
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008737#ifndef USE_HAL_3_3
8738 bool hasBlackRegions = false;
8739 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8740 LOGW("black_region_count: %d is bounded to %d",
8741 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8742 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8743 }
8744 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8745 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8746 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8747 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8748 }
8749 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8750 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8751 hasBlackRegions = true;
8752 }
8753#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008754 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8755 &gCamCapability[cameraId]->flash_charge_duration, 1);
8756
8757 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8758 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8759
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008760 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8761 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8762 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008763 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8764 &timestampSource, 1);
8765
Thierry Strudel54dc9782017-02-15 12:12:10 -08008766 //update histogram vendor data
8767 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008768 &gCamCapability[cameraId]->histogram_size, 1);
8769
Thierry Strudel54dc9782017-02-15 12:12:10 -08008770 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008771 &gCamCapability[cameraId]->max_histogram_count, 1);
8772
8773 int32_t sharpness_map_size[] = {
8774 gCamCapability[cameraId]->sharpness_map_size.width,
8775 gCamCapability[cameraId]->sharpness_map_size.height};
8776
8777 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8778 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8779
8780 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8781 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8782
8783 int32_t scalar_formats[] = {
8784 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8785 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8786 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8787 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8788 HAL_PIXEL_FORMAT_RAW10,
8789 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8790 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8791 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8792 scalar_formats,
8793 scalar_formats_count);
8794
8795 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8796 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8797 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8798 count, MAX_SIZES_CNT, available_processed_sizes);
8799 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8800 available_processed_sizes, count * 2);
8801
8802 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8803 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8804 makeTable(gCamCapability[cameraId]->raw_dim,
8805 count, MAX_SIZES_CNT, available_raw_sizes);
8806 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8807 available_raw_sizes, count * 2);
8808
8809 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8810 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8811 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8812 count, MAX_SIZES_CNT, available_fps_ranges);
8813 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8814 available_fps_ranges, count * 2);
8815
8816 camera_metadata_rational exposureCompensationStep = {
8817 gCamCapability[cameraId]->exp_compensation_step.numerator,
8818 gCamCapability[cameraId]->exp_compensation_step.denominator};
8819 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8820 &exposureCompensationStep, 1);
8821
8822 Vector<uint8_t> availableVstabModes;
8823 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8824 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008825 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008826 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008827 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008828 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008829 count = IS_TYPE_MAX;
8830 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8831 for (size_t i = 0; i < count; i++) {
8832 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8833 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8834 eisSupported = true;
8835 break;
8836 }
8837 }
8838 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008839 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8840 }
8841 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8842 availableVstabModes.array(), availableVstabModes.size());
8843
8844 /*HAL 1 and HAL 3 common*/
8845 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8846 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8847 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8848 float maxZoom = maxZoomStep/minZoomStep;
8849 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8850 &maxZoom, 1);
8851
8852 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8853 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8854
8855 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8856 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8857 max3aRegions[2] = 0; /* AF not supported */
8858 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8859 max3aRegions, 3);
8860
8861 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8862 memset(prop, 0, sizeof(prop));
8863 property_get("persist.camera.facedetect", prop, "1");
8864 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8865 LOGD("Support face detection mode: %d",
8866 supportedFaceDetectMode);
8867
8868 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008869 /* support mode should be OFF if max number of face is 0 */
8870 if (maxFaces <= 0) {
8871 supportedFaceDetectMode = 0;
8872 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008873 Vector<uint8_t> availableFaceDetectModes;
8874 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8875 if (supportedFaceDetectMode == 1) {
8876 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8877 } else if (supportedFaceDetectMode == 2) {
8878 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8879 } else if (supportedFaceDetectMode == 3) {
8880 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8881 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8882 } else {
8883 maxFaces = 0;
8884 }
8885 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8886 availableFaceDetectModes.array(),
8887 availableFaceDetectModes.size());
8888 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8889 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008890 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8891 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8892 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008893
Emilian Peev7650c122017-01-19 08:24:33 -08008894#ifdef SUPPORT_DEPTH_DATA
8895 //TODO: Update depth size accordingly, currently we use active array
8896 // as reference.
8897 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8898 int32_t depthHeight = gCamCapability[cameraId]->active_array_size.height;
8899 //As per spec. depth cloud should be sample count / 16
8900 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8901 assert(0 < depthSamplesCount);
8902 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, &depthSamplesCount, 1);
8903
8904 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8905 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8906 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8907 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
8908
8909 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8910 1, 1 };
8911 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8912 depthMinDuration,
8913 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
8914
8915 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8916 1, 0 };
8917 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8918 depthStallDuration,
8919 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
8920
8921 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8922 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8923#endif
8924
Thierry Strudel3d639192016-09-09 11:52:26 -07008925 int32_t exposureCompensationRange[] = {
8926 gCamCapability[cameraId]->exposure_compensation_min,
8927 gCamCapability[cameraId]->exposure_compensation_max};
8928 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8929 exposureCompensationRange,
8930 sizeof(exposureCompensationRange)/sizeof(int32_t));
8931
8932 uint8_t lensFacing = (facingBack) ?
8933 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8934 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8935
8936 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8937 available_thumbnail_sizes,
8938 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8939
8940 /*all sizes will be clubbed into this tag*/
8941 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8942 /*android.scaler.availableStreamConfigurations*/
8943 Vector<int32_t> available_stream_configs;
8944 cam_dimension_t active_array_dim;
8945 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8946 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08008947
8948 /*advertise list of input dimensions supported based on below property.
8949 By default all sizes upto 5MP will be advertised.
8950 Note that the setprop resolution format should be WxH.
8951 e.g: adb shell setprop persist.camera.input.minsize 1280x720
8952 To list all supported sizes, setprop needs to be set with "0x0" */
8953 cam_dimension_t minInputSize = {2592,1944}; //5MP
8954 memset(prop, 0, sizeof(prop));
8955 property_get("persist.camera.input.minsize", prop, "2592x1944");
8956 if (strlen(prop) > 0) {
8957 char *saveptr = NULL;
8958 char *token = strtok_r(prop, "x", &saveptr);
8959 if (token != NULL) {
8960 minInputSize.width = atoi(token);
8961 }
8962 token = strtok_r(NULL, "x", &saveptr);
8963 if (token != NULL) {
8964 minInputSize.height = atoi(token);
8965 }
8966 }
8967
Thierry Strudel3d639192016-09-09 11:52:26 -07008968 /* Add input/output stream configurations for each scalar formats*/
8969 for (size_t j = 0; j < scalar_formats_count; j++) {
8970 switch (scalar_formats[j]) {
8971 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8972 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8973 case HAL_PIXEL_FORMAT_RAW10:
8974 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8975 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8976 addStreamConfig(available_stream_configs, scalar_formats[j],
8977 gCamCapability[cameraId]->raw_dim[i],
8978 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8979 }
8980 break;
8981 case HAL_PIXEL_FORMAT_BLOB:
8982 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8983 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8984 addStreamConfig(available_stream_configs, scalar_formats[j],
8985 gCamCapability[cameraId]->picture_sizes_tbl[i],
8986 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8987 }
8988 break;
8989 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8990 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8991 default:
8992 cam_dimension_t largest_picture_size;
8993 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8994 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8995 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8996 addStreamConfig(available_stream_configs, scalar_formats[j],
8997 gCamCapability[cameraId]->picture_sizes_tbl[i],
8998 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08008999 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9000 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9001 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9002 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9003 >= minInputSize.width) || (gCamCapability[cameraId]->
9004 picture_sizes_tbl[i].height >= minInputSize.height)) {
9005 addStreamConfig(available_stream_configs, scalar_formats[j],
9006 gCamCapability[cameraId]->picture_sizes_tbl[i],
9007 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9008 }
9009 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009010 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009011
Thierry Strudel3d639192016-09-09 11:52:26 -07009012 break;
9013 }
9014 }
9015
9016 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9017 available_stream_configs.array(), available_stream_configs.size());
9018 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9019 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9020
9021 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9022 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9023
9024 /* android.scaler.availableMinFrameDurations */
9025 Vector<int64_t> available_min_durations;
9026 for (size_t j = 0; j < scalar_formats_count; j++) {
9027 switch (scalar_formats[j]) {
9028 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9029 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9030 case HAL_PIXEL_FORMAT_RAW10:
9031 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9032 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9033 available_min_durations.add(scalar_formats[j]);
9034 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9035 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9036 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9037 }
9038 break;
9039 default:
9040 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9041 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9042 available_min_durations.add(scalar_formats[j]);
9043 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9044 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9045 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9046 }
9047 break;
9048 }
9049 }
9050 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9051 available_min_durations.array(), available_min_durations.size());
9052
9053 Vector<int32_t> available_hfr_configs;
9054 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9055 int32_t fps = 0;
9056 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9057 case CAM_HFR_MODE_60FPS:
9058 fps = 60;
9059 break;
9060 case CAM_HFR_MODE_90FPS:
9061 fps = 90;
9062 break;
9063 case CAM_HFR_MODE_120FPS:
9064 fps = 120;
9065 break;
9066 case CAM_HFR_MODE_150FPS:
9067 fps = 150;
9068 break;
9069 case CAM_HFR_MODE_180FPS:
9070 fps = 180;
9071 break;
9072 case CAM_HFR_MODE_210FPS:
9073 fps = 210;
9074 break;
9075 case CAM_HFR_MODE_240FPS:
9076 fps = 240;
9077 break;
9078 case CAM_HFR_MODE_480FPS:
9079 fps = 480;
9080 break;
9081 case CAM_HFR_MODE_OFF:
9082 case CAM_HFR_MODE_MAX:
9083 default:
9084 break;
9085 }
9086
9087 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9088 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9089 /* For each HFR frame rate, need to advertise one variable fps range
9090 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9091 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9092 * set by the app. When video recording is started, [120, 120] is
9093 * set. This way sensor configuration does not change when recording
9094 * is started */
9095
9096 /* (width, height, fps_min, fps_max, batch_size_max) */
9097 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9098 j < MAX_SIZES_CNT; j++) {
9099 available_hfr_configs.add(
9100 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9101 available_hfr_configs.add(
9102 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9103 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9104 available_hfr_configs.add(fps);
9105 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9106
9107 /* (width, height, fps_min, fps_max, batch_size_max) */
9108 available_hfr_configs.add(
9109 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9110 available_hfr_configs.add(
9111 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9112 available_hfr_configs.add(fps);
9113 available_hfr_configs.add(fps);
9114 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9115 }
9116 }
9117 }
9118 //Advertise HFR capability only if the property is set
9119 memset(prop, 0, sizeof(prop));
9120 property_get("persist.camera.hal3hfr.enable", prop, "1");
9121 uint8_t hfrEnable = (uint8_t)atoi(prop);
9122
9123 if(hfrEnable && available_hfr_configs.array()) {
9124 staticInfo.update(
9125 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9126 available_hfr_configs.array(), available_hfr_configs.size());
9127 }
9128
9129 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9130 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9131 &max_jpeg_size, 1);
9132
9133 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9134 size_t size = 0;
9135 count = CAM_EFFECT_MODE_MAX;
9136 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9137 for (size_t i = 0; i < count; i++) {
9138 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9139 gCamCapability[cameraId]->supported_effects[i]);
9140 if (NAME_NOT_FOUND != val) {
9141 avail_effects[size] = (uint8_t)val;
9142 size++;
9143 }
9144 }
9145 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9146 avail_effects,
9147 size);
9148
9149 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9150 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9151 size_t supported_scene_modes_cnt = 0;
9152 count = CAM_SCENE_MODE_MAX;
9153 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9154 for (size_t i = 0; i < count; i++) {
9155 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9156 CAM_SCENE_MODE_OFF) {
9157 int val = lookupFwkName(SCENE_MODES_MAP,
9158 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9159 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009160
Thierry Strudel3d639192016-09-09 11:52:26 -07009161 if (NAME_NOT_FOUND != val) {
9162 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9163 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9164 supported_scene_modes_cnt++;
9165 }
9166 }
9167 }
9168 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9169 avail_scene_modes,
9170 supported_scene_modes_cnt);
9171
9172 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9173 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9174 supported_scene_modes_cnt,
9175 CAM_SCENE_MODE_MAX,
9176 scene_mode_overrides,
9177 supported_indexes,
9178 cameraId);
9179
9180 if (supported_scene_modes_cnt == 0) {
9181 supported_scene_modes_cnt = 1;
9182 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9183 }
9184
9185 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9186 scene_mode_overrides, supported_scene_modes_cnt * 3);
9187
9188 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9189 ANDROID_CONTROL_MODE_AUTO,
9190 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9191 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9192 available_control_modes,
9193 3);
9194
9195 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9196 size = 0;
9197 count = CAM_ANTIBANDING_MODE_MAX;
9198 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9199 for (size_t i = 0; i < count; i++) {
9200 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9201 gCamCapability[cameraId]->supported_antibandings[i]);
9202 if (NAME_NOT_FOUND != val) {
9203 avail_antibanding_modes[size] = (uint8_t)val;
9204 size++;
9205 }
9206
9207 }
9208 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9209 avail_antibanding_modes,
9210 size);
9211
9212 uint8_t avail_abberation_modes[] = {
9213 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9214 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9215 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9216 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9217 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9218 if (0 == count) {
9219 // If no aberration correction modes are available for a device, this advertise OFF mode
9220 size = 1;
9221 } else {
9222 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9223 // So, advertize all 3 modes if atleast any one mode is supported as per the
9224 // new M requirement
9225 size = 3;
9226 }
9227 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9228 avail_abberation_modes,
9229 size);
9230
9231 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9232 size = 0;
9233 count = CAM_FOCUS_MODE_MAX;
9234 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9235 for (size_t i = 0; i < count; i++) {
9236 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9237 gCamCapability[cameraId]->supported_focus_modes[i]);
9238 if (NAME_NOT_FOUND != val) {
9239 avail_af_modes[size] = (uint8_t)val;
9240 size++;
9241 }
9242 }
9243 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9244 avail_af_modes,
9245 size);
9246
9247 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9248 size = 0;
9249 count = CAM_WB_MODE_MAX;
9250 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9251 for (size_t i = 0; i < count; i++) {
9252 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9253 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9254 gCamCapability[cameraId]->supported_white_balances[i]);
9255 if (NAME_NOT_FOUND != val) {
9256 avail_awb_modes[size] = (uint8_t)val;
9257 size++;
9258 }
9259 }
9260 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9261 avail_awb_modes,
9262 size);
9263
9264 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9265 count = CAM_FLASH_FIRING_LEVEL_MAX;
9266 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9267 count);
9268 for (size_t i = 0; i < count; i++) {
9269 available_flash_levels[i] =
9270 gCamCapability[cameraId]->supported_firing_levels[i];
9271 }
9272 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9273 available_flash_levels, count);
9274
9275 uint8_t flashAvailable;
9276 if (gCamCapability[cameraId]->flash_available)
9277 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9278 else
9279 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9280 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9281 &flashAvailable, 1);
9282
9283 Vector<uint8_t> avail_ae_modes;
9284 count = CAM_AE_MODE_MAX;
9285 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9286 for (size_t i = 0; i < count; i++) {
9287 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9288 }
9289 if (flashAvailable) {
9290 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9291 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009292 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009293 }
9294 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9295 avail_ae_modes.array(),
9296 avail_ae_modes.size());
9297
9298 int32_t sensitivity_range[2];
9299 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9300 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9301 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9302 sensitivity_range,
9303 sizeof(sensitivity_range) / sizeof(int32_t));
9304
9305 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9306 &gCamCapability[cameraId]->max_analog_sensitivity,
9307 1);
9308
9309 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9310 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9311 &sensor_orientation,
9312 1);
9313
9314 int32_t max_output_streams[] = {
9315 MAX_STALLING_STREAMS,
9316 MAX_PROCESSED_STREAMS,
9317 MAX_RAW_STREAMS};
9318 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9319 max_output_streams,
9320 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9321
9322 uint8_t avail_leds = 0;
9323 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9324 &avail_leds, 0);
9325
9326 uint8_t focus_dist_calibrated;
9327 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9328 gCamCapability[cameraId]->focus_dist_calibrated);
9329 if (NAME_NOT_FOUND != val) {
9330 focus_dist_calibrated = (uint8_t)val;
9331 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9332 &focus_dist_calibrated, 1);
9333 }
9334
9335 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9336 size = 0;
9337 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9338 MAX_TEST_PATTERN_CNT);
9339 for (size_t i = 0; i < count; i++) {
9340 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9341 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9342 if (NAME_NOT_FOUND != testpatternMode) {
9343 avail_testpattern_modes[size] = testpatternMode;
9344 size++;
9345 }
9346 }
9347 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9348 avail_testpattern_modes,
9349 size);
9350
9351 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9352 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9353 &max_pipeline_depth,
9354 1);
9355
9356 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9357 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9358 &partial_result_count,
9359 1);
9360
9361 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9362 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9363
9364 Vector<uint8_t> available_capabilities;
9365 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9366 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9367 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9368 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9369 if (supportBurst) {
9370 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9371 }
9372 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9373 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9374 if (hfrEnable && available_hfr_configs.array()) {
9375 available_capabilities.add(
9376 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9377 }
9378
9379 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9380 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9381 }
9382 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9383 available_capabilities.array(),
9384 available_capabilities.size());
9385
9386 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9387 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9388 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9389 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9390
9391 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9392 &aeLockAvailable, 1);
9393
9394 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9395 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9396 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9397 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9398
9399 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9400 &awbLockAvailable, 1);
9401
9402 int32_t max_input_streams = 1;
9403 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9404 &max_input_streams,
9405 1);
9406
9407 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9408 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9409 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9410 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9411 HAL_PIXEL_FORMAT_YCbCr_420_888};
9412 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9413 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9414
9415 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9416 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9417 &max_latency,
9418 1);
9419
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009420#ifndef USE_HAL_3_3
9421 int32_t isp_sensitivity_range[2];
9422 isp_sensitivity_range[0] =
9423 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9424 isp_sensitivity_range[1] =
9425 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9426 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9427 isp_sensitivity_range,
9428 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9429#endif
9430
Thierry Strudel3d639192016-09-09 11:52:26 -07009431 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9432 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9433 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9434 available_hot_pixel_modes,
9435 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9436
9437 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9438 ANDROID_SHADING_MODE_FAST,
9439 ANDROID_SHADING_MODE_HIGH_QUALITY};
9440 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9441 available_shading_modes,
9442 3);
9443
9444 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9445 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9446 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9447 available_lens_shading_map_modes,
9448 2);
9449
9450 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9451 ANDROID_EDGE_MODE_FAST,
9452 ANDROID_EDGE_MODE_HIGH_QUALITY,
9453 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9454 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9455 available_edge_modes,
9456 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9457
9458 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9459 ANDROID_NOISE_REDUCTION_MODE_FAST,
9460 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9461 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9462 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9463 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9464 available_noise_red_modes,
9465 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9466
9467 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9468 ANDROID_TONEMAP_MODE_FAST,
9469 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9470 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9471 available_tonemap_modes,
9472 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9473
9474 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9475 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9476 available_hot_pixel_map_modes,
9477 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9478
9479 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9480 gCamCapability[cameraId]->reference_illuminant1);
9481 if (NAME_NOT_FOUND != val) {
9482 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9483 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9484 }
9485
9486 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9487 gCamCapability[cameraId]->reference_illuminant2);
9488 if (NAME_NOT_FOUND != val) {
9489 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9490 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9491 }
9492
9493 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9494 (void *)gCamCapability[cameraId]->forward_matrix1,
9495 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9496
9497 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9498 (void *)gCamCapability[cameraId]->forward_matrix2,
9499 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9500
9501 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9502 (void *)gCamCapability[cameraId]->color_transform1,
9503 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9504
9505 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9506 (void *)gCamCapability[cameraId]->color_transform2,
9507 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9508
9509 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9510 (void *)gCamCapability[cameraId]->calibration_transform1,
9511 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9512
9513 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9514 (void *)gCamCapability[cameraId]->calibration_transform2,
9515 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9516
9517 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9518 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9519 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9520 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9521 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9522 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9523 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9524 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9525 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9526 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9527 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9528 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9529 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9530 ANDROID_JPEG_GPS_COORDINATES,
9531 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9532 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9533 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9534 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9535 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9536 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9537 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9538 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9539 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9540 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009541#ifndef USE_HAL_3_3
9542 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9543#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009544 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009545 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009546 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9547 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009548 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009549 /* DevCamDebug metadata request_keys_basic */
9550 DEVCAMDEBUG_META_ENABLE,
9551 /* DevCamDebug metadata end */
9552 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009553
9554 size_t request_keys_cnt =
9555 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9556 Vector<int32_t> available_request_keys;
9557 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9558 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9559 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9560 }
9561
9562 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9563 available_request_keys.array(), available_request_keys.size());
9564
9565 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9566 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9567 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9568 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9569 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9570 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9571 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9572 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9573 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9574 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9575 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9576 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9577 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9578 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9579 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9580 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9581 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009582 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009583 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9584 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9585 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009586 ANDROID_STATISTICS_FACE_SCORES,
9587#ifndef USE_HAL_3_3
9588 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9589#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009590 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009591 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009592 // DevCamDebug metadata result_keys_basic
9593 DEVCAMDEBUG_META_ENABLE,
9594 // DevCamDebug metadata result_keys AF
9595 DEVCAMDEBUG_AF_LENS_POSITION,
9596 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9597 DEVCAMDEBUG_AF_TOF_DISTANCE,
9598 DEVCAMDEBUG_AF_LUMA,
9599 DEVCAMDEBUG_AF_HAF_STATE,
9600 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9601 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9602 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9603 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9604 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9605 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9606 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9607 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9608 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9609 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9610 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9611 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9612 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9613 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9614 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9615 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9616 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9617 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9618 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9619 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9620 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9621 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9622 // DevCamDebug metadata result_keys AEC
9623 DEVCAMDEBUG_AEC_TARGET_LUMA,
9624 DEVCAMDEBUG_AEC_COMP_LUMA,
9625 DEVCAMDEBUG_AEC_AVG_LUMA,
9626 DEVCAMDEBUG_AEC_CUR_LUMA,
9627 DEVCAMDEBUG_AEC_LINECOUNT,
9628 DEVCAMDEBUG_AEC_REAL_GAIN,
9629 DEVCAMDEBUG_AEC_EXP_INDEX,
9630 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009631 // DevCamDebug metadata result_keys zzHDR
9632 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9633 DEVCAMDEBUG_AEC_L_LINECOUNT,
9634 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9635 DEVCAMDEBUG_AEC_S_LINECOUNT,
9636 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9637 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9638 // DevCamDebug metadata result_keys ADRC
9639 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9640 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9641 DEVCAMDEBUG_AEC_GTM_RATIO,
9642 DEVCAMDEBUG_AEC_LTM_RATIO,
9643 DEVCAMDEBUG_AEC_LA_RATIO,
9644 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009645 // DevCamDebug metadata result_keys AWB
9646 DEVCAMDEBUG_AWB_R_GAIN,
9647 DEVCAMDEBUG_AWB_G_GAIN,
9648 DEVCAMDEBUG_AWB_B_GAIN,
9649 DEVCAMDEBUG_AWB_CCT,
9650 DEVCAMDEBUG_AWB_DECISION,
9651 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009652 };
9653
Thierry Strudel3d639192016-09-09 11:52:26 -07009654 size_t result_keys_cnt =
9655 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9656
9657 Vector<int32_t> available_result_keys;
9658 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9659 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9660 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9661 }
9662 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9663 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9664 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9665 }
9666 if (supportedFaceDetectMode == 1) {
9667 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9668 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9669 } else if ((supportedFaceDetectMode == 2) ||
9670 (supportedFaceDetectMode == 3)) {
9671 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9672 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9673 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009674#ifndef USE_HAL_3_3
9675 if (hasBlackRegions) {
9676 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9677 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9678 }
9679#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009680 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9681 available_result_keys.array(), available_result_keys.size());
9682
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009683 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9685 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9686 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9687 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9688 ANDROID_SCALER_CROPPING_TYPE,
9689 ANDROID_SYNC_MAX_LATENCY,
9690 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9691 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9692 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9693 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9694 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9695 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9696 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9697 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9698 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9699 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9700 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9701 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9702 ANDROID_LENS_FACING,
9703 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9704 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9705 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9706 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9707 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9708 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9709 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9710 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9711 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9712 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9713 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9714 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9715 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9716 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9717 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9718 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9719 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9720 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9721 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9722 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009723 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009724 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9725 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9726 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9727 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9728 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9729 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9730 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9731 ANDROID_CONTROL_AVAILABLE_MODES,
9732 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9733 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9734 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9735 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009736 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009737#ifdef SUPPORT_DEPTH_DATA
9738 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9739 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9740 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9741 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9742 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9743#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009744#ifndef USE_HAL_3_3
9745 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9746 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9747#endif
9748 };
9749
9750 Vector<int32_t> available_characteristics_keys;
9751 available_characteristics_keys.appendArray(characteristics_keys_basic,
9752 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9753#ifndef USE_HAL_3_3
9754 if (hasBlackRegions) {
9755 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9756 }
9757#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009758 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009759 available_characteristics_keys.array(),
9760 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009761
9762 /*available stall durations depend on the hw + sw and will be different for different devices */
9763 /*have to add for raw after implementation*/
9764 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9765 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9766
9767 Vector<int64_t> available_stall_durations;
9768 for (uint32_t j = 0; j < stall_formats_count; j++) {
9769 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9770 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9771 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9772 available_stall_durations.add(stall_formats[j]);
9773 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9774 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9775 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9776 }
9777 } else {
9778 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9779 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9780 available_stall_durations.add(stall_formats[j]);
9781 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9782 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9783 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9784 }
9785 }
9786 }
9787 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9788 available_stall_durations.array(),
9789 available_stall_durations.size());
9790
9791 //QCAMERA3_OPAQUE_RAW
9792 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9793 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9794 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9795 case LEGACY_RAW:
9796 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9797 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9798 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9799 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9800 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9801 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9802 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9803 break;
9804 case MIPI_RAW:
9805 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9806 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9807 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9808 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9809 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9810 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9811 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9812 break;
9813 default:
9814 LOGE("unknown opaque_raw_format %d",
9815 gCamCapability[cameraId]->opaque_raw_fmt);
9816 break;
9817 }
9818 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9819
9820 Vector<int32_t> strides;
9821 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9822 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9823 cam_stream_buf_plane_info_t buf_planes;
9824 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9825 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9826 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9827 &gCamCapability[cameraId]->padding_info, &buf_planes);
9828 strides.add(buf_planes.plane_info.mp[0].stride);
9829 }
9830 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9831 strides.size());
9832
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009833 //TBD: remove the following line once backend advertises zzHDR in feature mask
9834 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009835 //Video HDR default
9836 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9837 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009838 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009839 int32_t vhdr_mode[] = {
9840 QCAMERA3_VIDEO_HDR_MODE_OFF,
9841 QCAMERA3_VIDEO_HDR_MODE_ON};
9842
9843 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9844 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9845 vhdr_mode, vhdr_mode_count);
9846 }
9847
Thierry Strudel3d639192016-09-09 11:52:26 -07009848 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9849 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9850 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9851
9852 uint8_t isMonoOnly =
9853 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9854 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9855 &isMonoOnly, 1);
9856
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009857#ifndef USE_HAL_3_3
9858 Vector<int32_t> opaque_size;
9859 for (size_t j = 0; j < scalar_formats_count; j++) {
9860 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9861 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9862 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9863 cam_stream_buf_plane_info_t buf_planes;
9864
9865 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9866 &gCamCapability[cameraId]->padding_info, &buf_planes);
9867
9868 if (rc == 0) {
9869 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9870 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9871 opaque_size.add(buf_planes.plane_info.frame_len);
9872 }else {
9873 LOGE("raw frame calculation failed!");
9874 }
9875 }
9876 }
9877 }
9878
9879 if ((opaque_size.size() > 0) &&
9880 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9881 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9882 else
9883 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9884#endif
9885
Thierry Strudel04e026f2016-10-10 11:27:36 -07009886 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9887 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9888 size = 0;
9889 count = CAM_IR_MODE_MAX;
9890 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9891 for (size_t i = 0; i < count; i++) {
9892 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9893 gCamCapability[cameraId]->supported_ir_modes[i]);
9894 if (NAME_NOT_FOUND != val) {
9895 avail_ir_modes[size] = (int32_t)val;
9896 size++;
9897 }
9898 }
9899 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9900 avail_ir_modes, size);
9901 }
9902
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009903 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9904 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9905 size = 0;
9906 count = CAM_AEC_CONVERGENCE_MAX;
9907 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9908 for (size_t i = 0; i < count; i++) {
9909 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9910 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9911 if (NAME_NOT_FOUND != val) {
9912 available_instant_aec_modes[size] = (int32_t)val;
9913 size++;
9914 }
9915 }
9916 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9917 available_instant_aec_modes, size);
9918 }
9919
Thierry Strudel54dc9782017-02-15 12:12:10 -08009920 int32_t sharpness_range[] = {
9921 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9922 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9923 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9924
9925 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9926 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9927 size = 0;
9928 count = CAM_BINNING_CORRECTION_MODE_MAX;
9929 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9930 for (size_t i = 0; i < count; i++) {
9931 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9932 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9933 gCamCapability[cameraId]->supported_binning_modes[i]);
9934 if (NAME_NOT_FOUND != val) {
9935 avail_binning_modes[size] = (int32_t)val;
9936 size++;
9937 }
9938 }
9939 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9940 avail_binning_modes, size);
9941 }
9942
9943 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9944 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9945 size = 0;
9946 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9947 for (size_t i = 0; i < count; i++) {
9948 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9949 gCamCapability[cameraId]->supported_aec_modes[i]);
9950 if (NAME_NOT_FOUND != val)
9951 available_aec_modes[size++] = val;
9952 }
9953 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
9954 available_aec_modes, size);
9955 }
9956
9957 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
9958 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
9959 size = 0;
9960 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
9961 for (size_t i = 0; i < count; i++) {
9962 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
9963 gCamCapability[cameraId]->supported_iso_modes[i]);
9964 if (NAME_NOT_FOUND != val)
9965 available_iso_modes[size++] = val;
9966 }
9967 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
9968 available_iso_modes, size);
9969 }
9970
9971 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
9972 for (size_t i = 0; i < count; i++)
9973 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
9974 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
9975 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
9976
9977 int32_t available_saturation_range[4];
9978 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
9979 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
9980 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
9981 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
9982 staticInfo.update(QCAMERA3_SATURATION_RANGE,
9983 available_saturation_range, 4);
9984
9985 uint8_t is_hdr_values[2];
9986 is_hdr_values[0] = 0;
9987 is_hdr_values[1] = 1;
9988 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
9989 is_hdr_values, 2);
9990
9991 float is_hdr_confidence_range[2];
9992 is_hdr_confidence_range[0] = 0.0;
9993 is_hdr_confidence_range[1] = 1.0;
9994 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
9995 is_hdr_confidence_range, 2);
9996
Thierry Strudel3d639192016-09-09 11:52:26 -07009997 gStaticMetadata[cameraId] = staticInfo.release();
9998 return rc;
9999}
10000
10001/*===========================================================================
10002 * FUNCTION : makeTable
10003 *
10004 * DESCRIPTION: make a table of sizes
10005 *
10006 * PARAMETERS :
10007 *
10008 *
10009 *==========================================================================*/
10010void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10011 size_t max_size, int32_t *sizeTable)
10012{
10013 size_t j = 0;
10014 if (size > max_size) {
10015 size = max_size;
10016 }
10017 for (size_t i = 0; i < size; i++) {
10018 sizeTable[j] = dimTable[i].width;
10019 sizeTable[j+1] = dimTable[i].height;
10020 j+=2;
10021 }
10022}
10023
10024/*===========================================================================
10025 * FUNCTION : makeFPSTable
10026 *
10027 * DESCRIPTION: make a table of fps ranges
10028 *
10029 * PARAMETERS :
10030 *
10031 *==========================================================================*/
10032void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10033 size_t max_size, int32_t *fpsRangesTable)
10034{
10035 size_t j = 0;
10036 if (size > max_size) {
10037 size = max_size;
10038 }
10039 for (size_t i = 0; i < size; i++) {
10040 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10041 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10042 j+=2;
10043 }
10044}
10045
10046/*===========================================================================
10047 * FUNCTION : makeOverridesList
10048 *
10049 * DESCRIPTION: make a list of scene mode overrides
10050 *
10051 * PARAMETERS :
10052 *
10053 *
10054 *==========================================================================*/
10055void QCamera3HardwareInterface::makeOverridesList(
10056 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10057 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10058{
10059 /*daemon will give a list of overrides for all scene modes.
10060 However we should send the fwk only the overrides for the scene modes
10061 supported by the framework*/
10062 size_t j = 0;
10063 if (size > max_size) {
10064 size = max_size;
10065 }
10066 size_t focus_count = CAM_FOCUS_MODE_MAX;
10067 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10068 focus_count);
10069 for (size_t i = 0; i < size; i++) {
10070 bool supt = false;
10071 size_t index = supported_indexes[i];
10072 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10073 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10074 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10075 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10076 overridesTable[index].awb_mode);
10077 if (NAME_NOT_FOUND != val) {
10078 overridesList[j+1] = (uint8_t)val;
10079 }
10080 uint8_t focus_override = overridesTable[index].af_mode;
10081 for (size_t k = 0; k < focus_count; k++) {
10082 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10083 supt = true;
10084 break;
10085 }
10086 }
10087 if (supt) {
10088 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10089 focus_override);
10090 if (NAME_NOT_FOUND != val) {
10091 overridesList[j+2] = (uint8_t)val;
10092 }
10093 } else {
10094 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10095 }
10096 j+=3;
10097 }
10098}
10099
10100/*===========================================================================
10101 * FUNCTION : filterJpegSizes
10102 *
10103 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10104 * could be downscaled to
10105 *
10106 * PARAMETERS :
10107 *
10108 * RETURN : length of jpegSizes array
10109 *==========================================================================*/
10110
10111size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10112 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10113 uint8_t downscale_factor)
10114{
10115 if (0 == downscale_factor) {
10116 downscale_factor = 1;
10117 }
10118
10119 int32_t min_width = active_array_size.width / downscale_factor;
10120 int32_t min_height = active_array_size.height / downscale_factor;
10121 size_t jpegSizesCnt = 0;
10122 if (processedSizesCnt > maxCount) {
10123 processedSizesCnt = maxCount;
10124 }
10125 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10126 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10127 jpegSizes[jpegSizesCnt] = processedSizes[i];
10128 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10129 jpegSizesCnt += 2;
10130 }
10131 }
10132 return jpegSizesCnt;
10133}
10134
10135/*===========================================================================
10136 * FUNCTION : computeNoiseModelEntryS
10137 *
10138 * DESCRIPTION: function to map a given sensitivity to the S noise
10139 * model parameters in the DNG noise model.
10140 *
10141 * PARAMETERS : sens : the sensor sensitivity
10142 *
10143 ** RETURN : S (sensor amplification) noise
10144 *
10145 *==========================================================================*/
10146double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10147 double s = gCamCapability[mCameraId]->gradient_S * sens +
10148 gCamCapability[mCameraId]->offset_S;
10149 return ((s < 0.0) ? 0.0 : s);
10150}
10151
10152/*===========================================================================
10153 * FUNCTION : computeNoiseModelEntryO
10154 *
10155 * DESCRIPTION: function to map a given sensitivity to the O noise
10156 * model parameters in the DNG noise model.
10157 *
10158 * PARAMETERS : sens : the sensor sensitivity
10159 *
10160 ** RETURN : O (sensor readout) noise
10161 *
10162 *==========================================================================*/
10163double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10164 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10165 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10166 1.0 : (1.0 * sens / max_analog_sens);
10167 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10168 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10169 return ((o < 0.0) ? 0.0 : o);
10170}
10171
10172/*===========================================================================
10173 * FUNCTION : getSensorSensitivity
10174 *
10175 * DESCRIPTION: convert iso_mode to an integer value
10176 *
10177 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10178 *
10179 ** RETURN : sensitivity supported by sensor
10180 *
10181 *==========================================================================*/
10182int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10183{
10184 int32_t sensitivity;
10185
10186 switch (iso_mode) {
10187 case CAM_ISO_MODE_100:
10188 sensitivity = 100;
10189 break;
10190 case CAM_ISO_MODE_200:
10191 sensitivity = 200;
10192 break;
10193 case CAM_ISO_MODE_400:
10194 sensitivity = 400;
10195 break;
10196 case CAM_ISO_MODE_800:
10197 sensitivity = 800;
10198 break;
10199 case CAM_ISO_MODE_1600:
10200 sensitivity = 1600;
10201 break;
10202 default:
10203 sensitivity = -1;
10204 break;
10205 }
10206 return sensitivity;
10207}
10208
10209/*===========================================================================
10210 * FUNCTION : getCamInfo
10211 *
10212 * DESCRIPTION: query camera capabilities
10213 *
10214 * PARAMETERS :
10215 * @cameraId : camera Id
10216 * @info : camera info struct to be filled in with camera capabilities
10217 *
10218 * RETURN : int type of status
10219 * NO_ERROR -- success
10220 * none-zero failure code
10221 *==========================================================================*/
10222int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10223 struct camera_info *info)
10224{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010225 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010226 int rc = 0;
10227
10228 pthread_mutex_lock(&gCamLock);
10229 if (NULL == gCamCapability[cameraId]) {
10230 rc = initCapabilities(cameraId);
10231 if (rc < 0) {
10232 pthread_mutex_unlock(&gCamLock);
10233 return rc;
10234 }
10235 }
10236
10237 if (NULL == gStaticMetadata[cameraId]) {
10238 rc = initStaticMetadata(cameraId);
10239 if (rc < 0) {
10240 pthread_mutex_unlock(&gCamLock);
10241 return rc;
10242 }
10243 }
10244
10245 switch(gCamCapability[cameraId]->position) {
10246 case CAM_POSITION_BACK:
10247 case CAM_POSITION_BACK_AUX:
10248 info->facing = CAMERA_FACING_BACK;
10249 break;
10250
10251 case CAM_POSITION_FRONT:
10252 case CAM_POSITION_FRONT_AUX:
10253 info->facing = CAMERA_FACING_FRONT;
10254 break;
10255
10256 default:
10257 LOGE("Unknown position type %d for camera id:%d",
10258 gCamCapability[cameraId]->position, cameraId);
10259 rc = -1;
10260 break;
10261 }
10262
10263
10264 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010265#ifndef USE_HAL_3_3
10266 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10267#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010268 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010269#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010270 info->static_camera_characteristics = gStaticMetadata[cameraId];
10271
10272 //For now assume both cameras can operate independently.
10273 info->conflicting_devices = NULL;
10274 info->conflicting_devices_length = 0;
10275
10276 //resource cost is 100 * MIN(1.0, m/M),
10277 //where m is throughput requirement with maximum stream configuration
10278 //and M is CPP maximum throughput.
10279 float max_fps = 0.0;
10280 for (uint32_t i = 0;
10281 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10282 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10283 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10284 }
10285 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10286 gCamCapability[cameraId]->active_array_size.width *
10287 gCamCapability[cameraId]->active_array_size.height * max_fps /
10288 gCamCapability[cameraId]->max_pixel_bandwidth;
10289 info->resource_cost = 100 * MIN(1.0, ratio);
10290 LOGI("camera %d resource cost is %d", cameraId,
10291 info->resource_cost);
10292
10293 pthread_mutex_unlock(&gCamLock);
10294 return rc;
10295}
10296
10297/*===========================================================================
10298 * FUNCTION : translateCapabilityToMetadata
10299 *
10300 * DESCRIPTION: translate the capability into camera_metadata_t
10301 *
10302 * PARAMETERS : type of the request
10303 *
10304 *
10305 * RETURN : success: camera_metadata_t*
10306 * failure: NULL
10307 *
10308 *==========================================================================*/
10309camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10310{
10311 if (mDefaultMetadata[type] != NULL) {
10312 return mDefaultMetadata[type];
10313 }
10314 //first time we are handling this request
10315 //fill up the metadata structure using the wrapper class
10316 CameraMetadata settings;
10317 //translate from cam_capability_t to camera_metadata_tag_t
10318 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10319 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10320 int32_t defaultRequestID = 0;
10321 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10322
10323 /* OIS disable */
10324 char ois_prop[PROPERTY_VALUE_MAX];
10325 memset(ois_prop, 0, sizeof(ois_prop));
10326 property_get("persist.camera.ois.disable", ois_prop, "0");
10327 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10328
10329 /* Force video to use OIS */
10330 char videoOisProp[PROPERTY_VALUE_MAX];
10331 memset(videoOisProp, 0, sizeof(videoOisProp));
10332 property_get("persist.camera.ois.video", videoOisProp, "1");
10333 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010334
10335 // Hybrid AE enable/disable
10336 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10337 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10338 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10339 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10340
Thierry Strudel3d639192016-09-09 11:52:26 -070010341 uint8_t controlIntent = 0;
10342 uint8_t focusMode;
10343 uint8_t vsMode;
10344 uint8_t optStabMode;
10345 uint8_t cacMode;
10346 uint8_t edge_mode;
10347 uint8_t noise_red_mode;
10348 uint8_t tonemap_mode;
10349 bool highQualityModeEntryAvailable = FALSE;
10350 bool fastModeEntryAvailable = FALSE;
10351 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10352 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010353
Thierry Strudel3d639192016-09-09 11:52:26 -070010354 switch (type) {
10355 case CAMERA3_TEMPLATE_PREVIEW:
10356 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10357 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10358 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10359 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10360 edge_mode = ANDROID_EDGE_MODE_FAST;
10361 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10362 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10363 break;
10364 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10365 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10366 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10367 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10368 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10369 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10370 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10371 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10372 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10373 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10374 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10375 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10376 highQualityModeEntryAvailable = TRUE;
10377 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10378 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10379 fastModeEntryAvailable = TRUE;
10380 }
10381 }
10382 if (highQualityModeEntryAvailable) {
10383 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10384 } else if (fastModeEntryAvailable) {
10385 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10386 }
10387 break;
10388 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10389 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10390 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10391 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010392 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10393 edge_mode = ANDROID_EDGE_MODE_FAST;
10394 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10395 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10396 if (forceVideoOis)
10397 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10398 break;
10399 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10400 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10401 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10402 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010403 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10404 edge_mode = ANDROID_EDGE_MODE_FAST;
10405 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10406 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10407 if (forceVideoOis)
10408 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10409 break;
10410 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10411 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10412 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10413 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10414 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10415 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10416 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10417 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10418 break;
10419 case CAMERA3_TEMPLATE_MANUAL:
10420 edge_mode = ANDROID_EDGE_MODE_FAST;
10421 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10422 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10423 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10424 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10425 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10426 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10427 break;
10428 default:
10429 edge_mode = ANDROID_EDGE_MODE_FAST;
10430 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10431 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10432 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10433 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10434 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10435 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10436 break;
10437 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010438 // Set CAC to OFF if underlying device doesn't support
10439 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10440 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10441 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010442 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10443 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10444 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10445 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10446 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10447 }
10448 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
10449
10450 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10451 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10452 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10453 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10454 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10455 || ois_disable)
10456 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10457 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
10458
10459 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10460 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10461
10462 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10463 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10464
10465 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10466 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10467
10468 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10469 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10470
10471 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10472 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10473
10474 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10475 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10476
10477 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10478 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10479
10480 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10481 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10482
10483 /*flash*/
10484 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10485 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10486
10487 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10488 settings.update(ANDROID_FLASH_FIRING_POWER,
10489 &flashFiringLevel, 1);
10490
10491 /* lens */
10492 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10493 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10494
10495 if (gCamCapability[mCameraId]->filter_densities_count) {
10496 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10497 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10498 gCamCapability[mCameraId]->filter_densities_count);
10499 }
10500
10501 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10502 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10503
10504 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
10505 float default_focus_distance = 0;
10506 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
10507 }
10508
10509 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10510 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10511
10512 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10513 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10514
10515 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10516 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10517
10518 /* face detection (default to OFF) */
10519 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10520 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10521
Thierry Strudel54dc9782017-02-15 12:12:10 -080010522 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10523 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010524
10525 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10526 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10527
10528 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10529 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10530
10531 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10532 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
10533
10534 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10535 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10536
10537 /* Exposure time(Update the Min Exposure Time)*/
10538 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10539 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10540
10541 /* frame duration */
10542 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10543 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10544
10545 /* sensitivity */
10546 static const int32_t default_sensitivity = 100;
10547 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010548#ifndef USE_HAL_3_3
10549 static const int32_t default_isp_sensitivity =
10550 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10551 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10552#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010553
10554 /*edge mode*/
10555 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10556
10557 /*noise reduction mode*/
10558 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10559
10560 /*color correction mode*/
10561 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10562 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10563
10564 /*transform matrix mode*/
10565 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10566
10567 int32_t scaler_crop_region[4];
10568 scaler_crop_region[0] = 0;
10569 scaler_crop_region[1] = 0;
10570 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10571 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10572 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10573
10574 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10575 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10576
10577 /*focus distance*/
10578 float focus_distance = 0.0;
10579 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10580
10581 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010582 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010583 float max_range = 0.0;
10584 float max_fixed_fps = 0.0;
10585 int32_t fps_range[2] = {0, 0};
10586 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10587 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010588 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10589 TEMPLATE_MAX_PREVIEW_FPS) {
10590 continue;
10591 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010592 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10593 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10594 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10595 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10596 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10597 if (range > max_range) {
10598 fps_range[0] =
10599 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10600 fps_range[1] =
10601 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10602 max_range = range;
10603 }
10604 } else {
10605 if (range < 0.01 && max_fixed_fps <
10606 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10607 fps_range[0] =
10608 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10609 fps_range[1] =
10610 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10611 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10612 }
10613 }
10614 }
10615 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10616
10617 /*precapture trigger*/
10618 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10619 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10620
10621 /*af trigger*/
10622 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10623 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10624
10625 /* ae & af regions */
10626 int32_t active_region[] = {
10627 gCamCapability[mCameraId]->active_array_size.left,
10628 gCamCapability[mCameraId]->active_array_size.top,
10629 gCamCapability[mCameraId]->active_array_size.left +
10630 gCamCapability[mCameraId]->active_array_size.width,
10631 gCamCapability[mCameraId]->active_array_size.top +
10632 gCamCapability[mCameraId]->active_array_size.height,
10633 0};
10634 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10635 sizeof(active_region) / sizeof(active_region[0]));
10636 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10637 sizeof(active_region) / sizeof(active_region[0]));
10638
10639 /* black level lock */
10640 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10641 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10642
10643 /* lens shading map mode */
10644 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10645 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10646 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10647 }
10648 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10649
10650 //special defaults for manual template
10651 if (type == CAMERA3_TEMPLATE_MANUAL) {
10652 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10653 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10654
10655 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10656 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10657
10658 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10659 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10660
10661 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10662 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10663
10664 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10665 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10666
10667 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10668 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10669 }
10670
10671
10672 /* TNR
10673 * We'll use this location to determine which modes TNR will be set.
10674 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10675 * This is not to be confused with linking on a per stream basis that decision
10676 * is still on per-session basis and will be handled as part of config stream
10677 */
10678 uint8_t tnr_enable = 0;
10679
10680 if (m_bTnrPreview || m_bTnrVideo) {
10681
10682 switch (type) {
10683 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10684 tnr_enable = 1;
10685 break;
10686
10687 default:
10688 tnr_enable = 0;
10689 break;
10690 }
10691
10692 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10693 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10694 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10695
10696 LOGD("TNR:%d with process plate %d for template:%d",
10697 tnr_enable, tnr_process_type, type);
10698 }
10699
10700 //Update Link tags to default
10701 int32_t sync_type = CAM_TYPE_STANDALONE;
10702 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10703
10704 int32_t is_main = 0; //this doesn't matter as app should overwrite
10705 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10706
10707 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10708
10709 /* CDS default */
10710 char prop[PROPERTY_VALUE_MAX];
10711 memset(prop, 0, sizeof(prop));
10712 property_get("persist.camera.CDS", prop, "Auto");
10713 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10714 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10715 if (CAM_CDS_MODE_MAX == cds_mode) {
10716 cds_mode = CAM_CDS_MODE_AUTO;
10717 }
10718
10719 /* Disabling CDS in templates which have TNR enabled*/
10720 if (tnr_enable)
10721 cds_mode = CAM_CDS_MODE_OFF;
10722
10723 int32_t mode = cds_mode;
10724 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010725
Thierry Strudel269c81a2016-10-12 12:13:59 -070010726 /* Manual Convergence AEC Speed is disabled by default*/
10727 float default_aec_speed = 0;
10728 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10729
10730 /* Manual Convergence AWB Speed is disabled by default*/
10731 float default_awb_speed = 0;
10732 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10733
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010734 // Set instant AEC to normal convergence by default
10735 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10736 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10737
Shuzhen Wang19463d72016-03-08 11:09:52 -080010738 /* hybrid ae */
10739 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10740
Thierry Strudel3d639192016-09-09 11:52:26 -070010741 mDefaultMetadata[type] = settings.release();
10742
10743 return mDefaultMetadata[type];
10744}
10745
10746/*===========================================================================
10747 * FUNCTION : setFrameParameters
10748 *
10749 * DESCRIPTION: set parameters per frame as requested in the metadata from
10750 * framework
10751 *
10752 * PARAMETERS :
10753 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010754 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010755 * @blob_request: Whether this request is a blob request or not
10756 *
10757 * RETURN : success: NO_ERROR
10758 * failure:
10759 *==========================================================================*/
10760int QCamera3HardwareInterface::setFrameParameters(
10761 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010762 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010763 int blob_request,
10764 uint32_t snapshotStreamId)
10765{
10766 /*translate from camera_metadata_t type to parm_type_t*/
10767 int rc = 0;
10768 int32_t hal_version = CAM_HAL_V3;
10769
10770 clear_metadata_buffer(mParameters);
10771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10772 LOGE("Failed to set hal version in the parameters");
10773 return BAD_VALUE;
10774 }
10775
10776 /*we need to update the frame number in the parameters*/
10777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10778 request->frame_number)) {
10779 LOGE("Failed to set the frame number in the parameters");
10780 return BAD_VALUE;
10781 }
10782
10783 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010784 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010785 LOGE("Failed to set stream type mask in the parameters");
10786 return BAD_VALUE;
10787 }
10788
10789 if (mUpdateDebugLevel) {
10790 uint32_t dummyDebugLevel = 0;
10791 /* The value of dummyDebugLevel is irrelavent. On
10792 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10794 dummyDebugLevel)) {
10795 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10796 return BAD_VALUE;
10797 }
10798 mUpdateDebugLevel = false;
10799 }
10800
10801 if(request->settings != NULL){
10802 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10803 if (blob_request)
10804 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10805 }
10806
10807 return rc;
10808}
10809
10810/*===========================================================================
10811 * FUNCTION : setReprocParameters
10812 *
10813 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10814 * return it.
10815 *
10816 * PARAMETERS :
10817 * @request : request that needs to be serviced
10818 *
10819 * RETURN : success: NO_ERROR
10820 * failure:
10821 *==========================================================================*/
10822int32_t QCamera3HardwareInterface::setReprocParameters(
10823 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10824 uint32_t snapshotStreamId)
10825{
10826 /*translate from camera_metadata_t type to parm_type_t*/
10827 int rc = 0;
10828
10829 if (NULL == request->settings){
10830 LOGE("Reprocess settings cannot be NULL");
10831 return BAD_VALUE;
10832 }
10833
10834 if (NULL == reprocParam) {
10835 LOGE("Invalid reprocessing metadata buffer");
10836 return BAD_VALUE;
10837 }
10838 clear_metadata_buffer(reprocParam);
10839
10840 /*we need to update the frame number in the parameters*/
10841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10842 request->frame_number)) {
10843 LOGE("Failed to set the frame number in the parameters");
10844 return BAD_VALUE;
10845 }
10846
10847 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10848 if (rc < 0) {
10849 LOGE("Failed to translate reproc request");
10850 return rc;
10851 }
10852
10853 CameraMetadata frame_settings;
10854 frame_settings = request->settings;
10855 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10856 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10857 int32_t *crop_count =
10858 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10859 int32_t *crop_data =
10860 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10861 int32_t *roi_map =
10862 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10863 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10864 cam_crop_data_t crop_meta;
10865 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10866 crop_meta.num_of_streams = 1;
10867 crop_meta.crop_info[0].crop.left = crop_data[0];
10868 crop_meta.crop_info[0].crop.top = crop_data[1];
10869 crop_meta.crop_info[0].crop.width = crop_data[2];
10870 crop_meta.crop_info[0].crop.height = crop_data[3];
10871
10872 crop_meta.crop_info[0].roi_map.left =
10873 roi_map[0];
10874 crop_meta.crop_info[0].roi_map.top =
10875 roi_map[1];
10876 crop_meta.crop_info[0].roi_map.width =
10877 roi_map[2];
10878 crop_meta.crop_info[0].roi_map.height =
10879 roi_map[3];
10880
10881 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10882 rc = BAD_VALUE;
10883 }
10884 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10885 request->input_buffer->stream,
10886 crop_meta.crop_info[0].crop.left,
10887 crop_meta.crop_info[0].crop.top,
10888 crop_meta.crop_info[0].crop.width,
10889 crop_meta.crop_info[0].crop.height);
10890 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10891 request->input_buffer->stream,
10892 crop_meta.crop_info[0].roi_map.left,
10893 crop_meta.crop_info[0].roi_map.top,
10894 crop_meta.crop_info[0].roi_map.width,
10895 crop_meta.crop_info[0].roi_map.height);
10896 } else {
10897 LOGE("Invalid reprocess crop count %d!", *crop_count);
10898 }
10899 } else {
10900 LOGE("No crop data from matching output stream");
10901 }
10902
10903 /* These settings are not needed for regular requests so handle them specially for
10904 reprocess requests; information needed for EXIF tags */
10905 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10906 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10907 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10908 if (NAME_NOT_FOUND != val) {
10909 uint32_t flashMode = (uint32_t)val;
10910 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10911 rc = BAD_VALUE;
10912 }
10913 } else {
10914 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10915 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10916 }
10917 } else {
10918 LOGH("No flash mode in reprocess settings");
10919 }
10920
10921 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10922 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10924 rc = BAD_VALUE;
10925 }
10926 } else {
10927 LOGH("No flash state in reprocess settings");
10928 }
10929
10930 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10931 uint8_t *reprocessFlags =
10932 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10933 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10934 *reprocessFlags)) {
10935 rc = BAD_VALUE;
10936 }
10937 }
10938
Thierry Strudel54dc9782017-02-15 12:12:10 -080010939 // Add exif debug data to internal metadata
10940 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
10941 mm_jpeg_debug_exif_params_t *debug_params =
10942 (mm_jpeg_debug_exif_params_t *)frame_settings.find
10943 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
10944 // AE
10945 if (debug_params->ae_debug_params_valid == TRUE) {
10946 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
10947 debug_params->ae_debug_params);
10948 }
10949 // AWB
10950 if (debug_params->awb_debug_params_valid == TRUE) {
10951 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
10952 debug_params->awb_debug_params);
10953 }
10954 // AF
10955 if (debug_params->af_debug_params_valid == TRUE) {
10956 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
10957 debug_params->af_debug_params);
10958 }
10959 // ASD
10960 if (debug_params->asd_debug_params_valid == TRUE) {
10961 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
10962 debug_params->asd_debug_params);
10963 }
10964 // Stats
10965 if (debug_params->stats_debug_params_valid == TRUE) {
10966 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
10967 debug_params->stats_debug_params);
10968 }
10969 // BE Stats
10970 if (debug_params->bestats_debug_params_valid == TRUE) {
10971 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
10972 debug_params->bestats_debug_params);
10973 }
10974 // BHIST
10975 if (debug_params->bhist_debug_params_valid == TRUE) {
10976 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
10977 debug_params->bhist_debug_params);
10978 }
10979 // 3A Tuning
10980 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
10981 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
10982 debug_params->q3a_tuning_debug_params);
10983 }
10984 }
10985
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010986 // Add metadata which reprocess needs
10987 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10988 cam_reprocess_info_t *repro_info =
10989 (cam_reprocess_info_t *)frame_settings.find
10990 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010991 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010992 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010993 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010994 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010995 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010996 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010997 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010998 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010999 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011000 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011001 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011002 repro_info->pipeline_flip);
11003 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11004 repro_info->af_roi);
11005 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11006 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011007 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11008 CAM_INTF_PARM_ROTATION metadata then has been added in
11009 translateToHalMetadata. HAL need to keep this new rotation
11010 metadata. Otherwise, the old rotation info saved in the vendor tag
11011 would be used */
11012 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11013 CAM_INTF_PARM_ROTATION, reprocParam) {
11014 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11015 } else {
11016 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011017 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011018 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011019 }
11020
11021 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11022 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11023 roi.width and roi.height would be the final JPEG size.
11024 For now, HAL only checks this for reprocess request */
11025 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11026 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11027 uint8_t *enable =
11028 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11029 if (*enable == TRUE) {
11030 int32_t *crop_data =
11031 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11032 cam_stream_crop_info_t crop_meta;
11033 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11034 crop_meta.stream_id = 0;
11035 crop_meta.crop.left = crop_data[0];
11036 crop_meta.crop.top = crop_data[1];
11037 crop_meta.crop.width = crop_data[2];
11038 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011039 // The JPEG crop roi should match cpp output size
11040 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11041 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11042 crop_meta.roi_map.left = 0;
11043 crop_meta.roi_map.top = 0;
11044 crop_meta.roi_map.width = cpp_crop->crop.width;
11045 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011046 }
11047 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11048 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011049 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011050 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011051 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11052 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011053 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011054 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11055
11056 // Add JPEG scale information
11057 cam_dimension_t scale_dim;
11058 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11059 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11060 int32_t *roi =
11061 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11062 scale_dim.width = roi[2];
11063 scale_dim.height = roi[3];
11064 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11065 scale_dim);
11066 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11067 scale_dim.width, scale_dim.height, mCameraId);
11068 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011069 }
11070 }
11071
11072 return rc;
11073}
11074
11075/*===========================================================================
11076 * FUNCTION : saveRequestSettings
11077 *
11078 * DESCRIPTION: Add any settings that might have changed to the request settings
11079 * and save the settings to be applied on the frame
11080 *
11081 * PARAMETERS :
11082 * @jpegMetadata : the extracted and/or modified jpeg metadata
11083 * @request : request with initial settings
11084 *
11085 * RETURN :
11086 * camera_metadata_t* : pointer to the saved request settings
11087 *==========================================================================*/
11088camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11089 const CameraMetadata &jpegMetadata,
11090 camera3_capture_request_t *request)
11091{
11092 camera_metadata_t *resultMetadata;
11093 CameraMetadata camMetadata;
11094 camMetadata = request->settings;
11095
11096 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11097 int32_t thumbnail_size[2];
11098 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11099 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11100 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11101 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11102 }
11103
11104 if (request->input_buffer != NULL) {
11105 uint8_t reprocessFlags = 1;
11106 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11107 (uint8_t*)&reprocessFlags,
11108 sizeof(reprocessFlags));
11109 }
11110
11111 resultMetadata = camMetadata.release();
11112 return resultMetadata;
11113}
11114
11115/*===========================================================================
11116 * FUNCTION : setHalFpsRange
11117 *
11118 * DESCRIPTION: set FPS range parameter
11119 *
11120 *
11121 * PARAMETERS :
11122 * @settings : Metadata from framework
11123 * @hal_metadata: Metadata buffer
11124 *
11125 *
11126 * RETURN : success: NO_ERROR
11127 * failure:
11128 *==========================================================================*/
11129int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11130 metadata_buffer_t *hal_metadata)
11131{
11132 int32_t rc = NO_ERROR;
11133 cam_fps_range_t fps_range;
11134 fps_range.min_fps = (float)
11135 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11136 fps_range.max_fps = (float)
11137 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11138 fps_range.video_min_fps = fps_range.min_fps;
11139 fps_range.video_max_fps = fps_range.max_fps;
11140
11141 LOGD("aeTargetFpsRange fps: [%f %f]",
11142 fps_range.min_fps, fps_range.max_fps);
11143 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11144 * follows:
11145 * ---------------------------------------------------------------|
11146 * Video stream is absent in configure_streams |
11147 * (Camcorder preview before the first video record |
11148 * ---------------------------------------------------------------|
11149 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11150 * | | | vid_min/max_fps|
11151 * ---------------------------------------------------------------|
11152 * NO | [ 30, 240] | 240 | [240, 240] |
11153 * |-------------|-------------|----------------|
11154 * | [240, 240] | 240 | [240, 240] |
11155 * ---------------------------------------------------------------|
11156 * Video stream is present in configure_streams |
11157 * ---------------------------------------------------------------|
11158 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11159 * | | | vid_min/max_fps|
11160 * ---------------------------------------------------------------|
11161 * NO | [ 30, 240] | 240 | [240, 240] |
11162 * (camcorder prev |-------------|-------------|----------------|
11163 * after video rec | [240, 240] | 240 | [240, 240] |
11164 * is stopped) | | | |
11165 * ---------------------------------------------------------------|
11166 * YES | [ 30, 240] | 240 | [240, 240] |
11167 * |-------------|-------------|----------------|
11168 * | [240, 240] | 240 | [240, 240] |
11169 * ---------------------------------------------------------------|
11170 * When Video stream is absent in configure_streams,
11171 * preview fps = sensor_fps / batchsize
11172 * Eg: for 240fps at batchSize 4, preview = 60fps
11173 * for 120fps at batchSize 4, preview = 30fps
11174 *
11175 * When video stream is present in configure_streams, preview fps is as per
11176 * the ratio of preview buffers to video buffers requested in process
11177 * capture request
11178 */
11179 mBatchSize = 0;
11180 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11181 fps_range.min_fps = fps_range.video_max_fps;
11182 fps_range.video_min_fps = fps_range.video_max_fps;
11183 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11184 fps_range.max_fps);
11185 if (NAME_NOT_FOUND != val) {
11186 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11188 return BAD_VALUE;
11189 }
11190
11191 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11192 /* If batchmode is currently in progress and the fps changes,
11193 * set the flag to restart the sensor */
11194 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11195 (mHFRVideoFps != fps_range.max_fps)) {
11196 mNeedSensorRestart = true;
11197 }
11198 mHFRVideoFps = fps_range.max_fps;
11199 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11200 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11201 mBatchSize = MAX_HFR_BATCH_SIZE;
11202 }
11203 }
11204 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11205
11206 }
11207 } else {
11208 /* HFR mode is session param in backend/ISP. This should be reset when
11209 * in non-HFR mode */
11210 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11212 return BAD_VALUE;
11213 }
11214 }
11215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11216 return BAD_VALUE;
11217 }
11218 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11219 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11220 return rc;
11221}
11222
11223/*===========================================================================
11224 * FUNCTION : translateToHalMetadata
11225 *
11226 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11227 *
11228 *
11229 * PARAMETERS :
11230 * @request : request sent from framework
11231 *
11232 *
11233 * RETURN : success: NO_ERROR
11234 * failure:
11235 *==========================================================================*/
11236int QCamera3HardwareInterface::translateToHalMetadata
11237 (const camera3_capture_request_t *request,
11238 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011239 uint32_t snapshotStreamId) {
11240 if (request == nullptr || hal_metadata == nullptr) {
11241 return BAD_VALUE;
11242 }
11243
11244 int64_t minFrameDuration = getMinFrameDuration(request);
11245
11246 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11247 minFrameDuration);
11248}
11249
11250int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11251 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11252 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11253
Thierry Strudel3d639192016-09-09 11:52:26 -070011254 int rc = 0;
11255 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011256 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011257
11258 /* Do not change the order of the following list unless you know what you are
11259 * doing.
11260 * The order is laid out in such a way that parameters in the front of the table
11261 * may be used to override the parameters later in the table. Examples are:
11262 * 1. META_MODE should precede AEC/AWB/AF MODE
11263 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11264 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11265 * 4. Any mode should precede it's corresponding settings
11266 */
11267 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11268 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11269 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11270 rc = BAD_VALUE;
11271 }
11272 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11273 if (rc != NO_ERROR) {
11274 LOGE("extractSceneMode failed");
11275 }
11276 }
11277
11278 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11279 uint8_t fwk_aeMode =
11280 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11281 uint8_t aeMode;
11282 int32_t redeye;
11283
11284 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11285 aeMode = CAM_AE_MODE_OFF;
11286 } else {
11287 aeMode = CAM_AE_MODE_ON;
11288 }
11289 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11290 redeye = 1;
11291 } else {
11292 redeye = 0;
11293 }
11294
11295 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11296 fwk_aeMode);
11297 if (NAME_NOT_FOUND != val) {
11298 int32_t flashMode = (int32_t)val;
11299 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11300 }
11301
11302 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11303 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11304 rc = BAD_VALUE;
11305 }
11306 }
11307
11308 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11309 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11310 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11311 fwk_whiteLevel);
11312 if (NAME_NOT_FOUND != val) {
11313 uint8_t whiteLevel = (uint8_t)val;
11314 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11315 rc = BAD_VALUE;
11316 }
11317 }
11318 }
11319
11320 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11321 uint8_t fwk_cacMode =
11322 frame_settings.find(
11323 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11324 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11325 fwk_cacMode);
11326 if (NAME_NOT_FOUND != val) {
11327 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11328 bool entryAvailable = FALSE;
11329 // Check whether Frameworks set CAC mode is supported in device or not
11330 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11331 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11332 entryAvailable = TRUE;
11333 break;
11334 }
11335 }
11336 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11337 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11338 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11339 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11340 if (entryAvailable == FALSE) {
11341 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11342 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11343 } else {
11344 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11345 // High is not supported and so set the FAST as spec say's underlying
11346 // device implementation can be the same for both modes.
11347 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11348 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11349 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11350 // in order to avoid the fps drop due to high quality
11351 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11352 } else {
11353 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11354 }
11355 }
11356 }
11357 LOGD("Final cacMode is %d", cacMode);
11358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11359 rc = BAD_VALUE;
11360 }
11361 } else {
11362 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11363 }
11364 }
11365
Thierry Strudel2896d122017-02-23 19:18:03 -080011366 char af_value[PROPERTY_VALUE_MAX];
11367 property_get("persist.camera.af.infinity", af_value, "0");
11368
11369 if (atoi(af_value) == 0) {
11370 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
11371 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
11372 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11373 fwk_focusMode);
11374 if (NAME_NOT_FOUND != val) {
11375 uint8_t focusMode = (uint8_t)val;
11376 LOGD("set focus mode %d", focusMode);
11377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11378 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11379 rc = BAD_VALUE;
11380 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011381 }
11382 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011383 } else {
11384 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11385 LOGE("Focus forced to infinity %d", focusMode);
11386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11387 rc = BAD_VALUE;
11388 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011389 }
11390
11391 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
11392 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11394 focalDistance)) {
11395 rc = BAD_VALUE;
11396 }
11397 }
11398
11399 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11400 uint8_t fwk_antibandingMode =
11401 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11402 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11403 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11404 if (NAME_NOT_FOUND != val) {
11405 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011406 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11407 if (m60HzZone) {
11408 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11409 } else {
11410 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11411 }
11412 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11414 hal_antibandingMode)) {
11415 rc = BAD_VALUE;
11416 }
11417 }
11418 }
11419
11420 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11421 int32_t expCompensation = frame_settings.find(
11422 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11423 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11424 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11425 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11426 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011427 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11429 expCompensation)) {
11430 rc = BAD_VALUE;
11431 }
11432 }
11433
11434 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11435 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11437 rc = BAD_VALUE;
11438 }
11439 }
11440 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11441 rc = setHalFpsRange(frame_settings, hal_metadata);
11442 if (rc != NO_ERROR) {
11443 LOGE("setHalFpsRange failed");
11444 }
11445 }
11446
11447 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11448 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11450 rc = BAD_VALUE;
11451 }
11452 }
11453
11454 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11455 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11456 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11457 fwk_effectMode);
11458 if (NAME_NOT_FOUND != val) {
11459 uint8_t effectMode = (uint8_t)val;
11460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11461 rc = BAD_VALUE;
11462 }
11463 }
11464 }
11465
11466 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11467 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11469 colorCorrectMode)) {
11470 rc = BAD_VALUE;
11471 }
11472 }
11473
11474 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11475 cam_color_correct_gains_t colorCorrectGains;
11476 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11477 colorCorrectGains.gains[i] =
11478 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11479 }
11480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11481 colorCorrectGains)) {
11482 rc = BAD_VALUE;
11483 }
11484 }
11485
11486 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11487 cam_color_correct_matrix_t colorCorrectTransform;
11488 cam_rational_type_t transform_elem;
11489 size_t num = 0;
11490 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11491 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11492 transform_elem.numerator =
11493 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11494 transform_elem.denominator =
11495 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11496 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11497 num++;
11498 }
11499 }
11500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11501 colorCorrectTransform)) {
11502 rc = BAD_VALUE;
11503 }
11504 }
11505
11506 cam_trigger_t aecTrigger;
11507 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11508 aecTrigger.trigger_id = -1;
11509 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11510 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11511 aecTrigger.trigger =
11512 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11513 aecTrigger.trigger_id =
11514 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11516 aecTrigger)) {
11517 rc = BAD_VALUE;
11518 }
11519 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11520 aecTrigger.trigger, aecTrigger.trigger_id);
11521 }
11522
11523 /*af_trigger must come with a trigger id*/
11524 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11525 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11526 cam_trigger_t af_trigger;
11527 af_trigger.trigger =
11528 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11529 af_trigger.trigger_id =
11530 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11532 rc = BAD_VALUE;
11533 }
11534 LOGD("AfTrigger: %d AfTriggerID: %d",
11535 af_trigger.trigger, af_trigger.trigger_id);
11536 }
11537
11538 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11539 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11540 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11541 rc = BAD_VALUE;
11542 }
11543 }
11544 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11545 cam_edge_application_t edge_application;
11546 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011547
Thierry Strudel3d639192016-09-09 11:52:26 -070011548 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11549 edge_application.sharpness = 0;
11550 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011551 edge_application.sharpness =
11552 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11553 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11554 int32_t sharpness =
11555 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11556 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11557 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11558 LOGD("Setting edge mode sharpness %d", sharpness);
11559 edge_application.sharpness = sharpness;
11560 }
11561 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011562 }
11563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11564 rc = BAD_VALUE;
11565 }
11566 }
11567
11568 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11569 int32_t respectFlashMode = 1;
11570 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11571 uint8_t fwk_aeMode =
11572 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11573 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11574 respectFlashMode = 0;
11575 LOGH("AE Mode controls flash, ignore android.flash.mode");
11576 }
11577 }
11578 if (respectFlashMode) {
11579 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11580 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11581 LOGH("flash mode after mapping %d", val);
11582 // To check: CAM_INTF_META_FLASH_MODE usage
11583 if (NAME_NOT_FOUND != val) {
11584 uint8_t flashMode = (uint8_t)val;
11585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11586 rc = BAD_VALUE;
11587 }
11588 }
11589 }
11590 }
11591
11592 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11593 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11595 rc = BAD_VALUE;
11596 }
11597 }
11598
11599 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11600 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11602 flashFiringTime)) {
11603 rc = BAD_VALUE;
11604 }
11605 }
11606
11607 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11608 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11610 hotPixelMode)) {
11611 rc = BAD_VALUE;
11612 }
11613 }
11614
11615 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11616 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11618 lensAperture)) {
11619 rc = BAD_VALUE;
11620 }
11621 }
11622
11623 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11624 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11626 filterDensity)) {
11627 rc = BAD_VALUE;
11628 }
11629 }
11630
11631 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11632 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11634 focalLength)) {
11635 rc = BAD_VALUE;
11636 }
11637 }
11638
11639 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11640 uint8_t optStabMode =
11641 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11643 optStabMode)) {
11644 rc = BAD_VALUE;
11645 }
11646 }
11647
11648 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11649 uint8_t videoStabMode =
11650 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11651 LOGD("videoStabMode from APP = %d", videoStabMode);
11652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11653 videoStabMode)) {
11654 rc = BAD_VALUE;
11655 }
11656 }
11657
11658
11659 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11660 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11662 noiseRedMode)) {
11663 rc = BAD_VALUE;
11664 }
11665 }
11666
11667 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11668 float reprocessEffectiveExposureFactor =
11669 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11671 reprocessEffectiveExposureFactor)) {
11672 rc = BAD_VALUE;
11673 }
11674 }
11675
11676 cam_crop_region_t scalerCropRegion;
11677 bool scalerCropSet = false;
11678 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11679 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11680 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11681 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11682 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11683
11684 // Map coordinate system from active array to sensor output.
11685 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11686 scalerCropRegion.width, scalerCropRegion.height);
11687
11688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11689 scalerCropRegion)) {
11690 rc = BAD_VALUE;
11691 }
11692 scalerCropSet = true;
11693 }
11694
11695 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11696 int64_t sensorExpTime =
11697 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11698 LOGD("setting sensorExpTime %lld", sensorExpTime);
11699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11700 sensorExpTime)) {
11701 rc = BAD_VALUE;
11702 }
11703 }
11704
11705 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11706 int64_t sensorFrameDuration =
11707 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011708 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11709 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11710 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11711 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11713 sensorFrameDuration)) {
11714 rc = BAD_VALUE;
11715 }
11716 }
11717
11718 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11719 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11720 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11721 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11722 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11723 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11724 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11726 sensorSensitivity)) {
11727 rc = BAD_VALUE;
11728 }
11729 }
11730
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011731#ifndef USE_HAL_3_3
11732 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11733 int32_t ispSensitivity =
11734 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11735 if (ispSensitivity <
11736 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11737 ispSensitivity =
11738 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11739 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11740 }
11741 if (ispSensitivity >
11742 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11743 ispSensitivity =
11744 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11745 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11746 }
11747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11748 ispSensitivity)) {
11749 rc = BAD_VALUE;
11750 }
11751 }
11752#endif
11753
Thierry Strudel3d639192016-09-09 11:52:26 -070011754 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11755 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11757 rc = BAD_VALUE;
11758 }
11759 }
11760
11761 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11762 uint8_t fwk_facedetectMode =
11763 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11764
11765 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11766 fwk_facedetectMode);
11767
11768 if (NAME_NOT_FOUND != val) {
11769 uint8_t facedetectMode = (uint8_t)val;
11770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11771 facedetectMode)) {
11772 rc = BAD_VALUE;
11773 }
11774 }
11775 }
11776
Thierry Strudel54dc9782017-02-15 12:12:10 -080011777 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011778 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011779 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11781 histogramMode)) {
11782 rc = BAD_VALUE;
11783 }
11784 }
11785
11786 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11787 uint8_t sharpnessMapMode =
11788 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11790 sharpnessMapMode)) {
11791 rc = BAD_VALUE;
11792 }
11793 }
11794
11795 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11796 uint8_t tonemapMode =
11797 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11799 rc = BAD_VALUE;
11800 }
11801 }
11802 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11803 /*All tonemap channels will have the same number of points*/
11804 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11805 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11806 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11807 cam_rgb_tonemap_curves tonemapCurves;
11808 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11809 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11810 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11811 tonemapCurves.tonemap_points_cnt,
11812 CAM_MAX_TONEMAP_CURVE_SIZE);
11813 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11814 }
11815
11816 /* ch0 = G*/
11817 size_t point = 0;
11818 cam_tonemap_curve_t tonemapCurveGreen;
11819 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11820 for (size_t j = 0; j < 2; j++) {
11821 tonemapCurveGreen.tonemap_points[i][j] =
11822 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11823 point++;
11824 }
11825 }
11826 tonemapCurves.curves[0] = tonemapCurveGreen;
11827
11828 /* ch 1 = B */
11829 point = 0;
11830 cam_tonemap_curve_t tonemapCurveBlue;
11831 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11832 for (size_t j = 0; j < 2; j++) {
11833 tonemapCurveBlue.tonemap_points[i][j] =
11834 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11835 point++;
11836 }
11837 }
11838 tonemapCurves.curves[1] = tonemapCurveBlue;
11839
11840 /* ch 2 = R */
11841 point = 0;
11842 cam_tonemap_curve_t tonemapCurveRed;
11843 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11844 for (size_t j = 0; j < 2; j++) {
11845 tonemapCurveRed.tonemap_points[i][j] =
11846 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11847 point++;
11848 }
11849 }
11850 tonemapCurves.curves[2] = tonemapCurveRed;
11851
11852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11853 tonemapCurves)) {
11854 rc = BAD_VALUE;
11855 }
11856 }
11857
11858 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11859 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11861 captureIntent)) {
11862 rc = BAD_VALUE;
11863 }
11864 }
11865
11866 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11867 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11869 blackLevelLock)) {
11870 rc = BAD_VALUE;
11871 }
11872 }
11873
11874 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11875 uint8_t lensShadingMapMode =
11876 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11878 lensShadingMapMode)) {
11879 rc = BAD_VALUE;
11880 }
11881 }
11882
11883 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11884 cam_area_t roi;
11885 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011886 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011887
11888 // Map coordinate system from active array to sensor output.
11889 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11890 roi.rect.height);
11891
11892 if (scalerCropSet) {
11893 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11894 }
11895 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11896 rc = BAD_VALUE;
11897 }
11898 }
11899
11900 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11901 cam_area_t roi;
11902 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011903 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011904
11905 // Map coordinate system from active array to sensor output.
11906 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11907 roi.rect.height);
11908
11909 if (scalerCropSet) {
11910 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11911 }
11912 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11913 rc = BAD_VALUE;
11914 }
11915 }
11916
11917 // CDS for non-HFR non-video mode
11918 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11919 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11920 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11921 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11922 LOGE("Invalid CDS mode %d!", *fwk_cds);
11923 } else {
11924 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11925 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11926 rc = BAD_VALUE;
11927 }
11928 }
11929 }
11930
Thierry Strudel04e026f2016-10-10 11:27:36 -070011931 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011932 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011933 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011934 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11935 }
11936 if (m_bVideoHdrEnabled)
11937 vhdr = CAM_VIDEO_HDR_MODE_ON;
11938
Thierry Strudel54dc9782017-02-15 12:12:10 -080011939 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
11940
11941 if(vhdr != curr_hdr_state)
11942 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
11943
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011944 rc = setVideoHdrMode(mParameters, vhdr);
11945 if (rc != NO_ERROR) {
11946 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011947 }
11948
11949 //IR
11950 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11951 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11952 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011953 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
11954 uint8_t isIRon = 0;
11955
11956 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011957 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11958 LOGE("Invalid IR mode %d!", fwk_ir);
11959 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011960 if(isIRon != curr_ir_state )
11961 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
11962
Thierry Strudel04e026f2016-10-10 11:27:36 -070011963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11964 CAM_INTF_META_IR_MODE, fwk_ir)) {
11965 rc = BAD_VALUE;
11966 }
11967 }
11968 }
11969
Thierry Strudel54dc9782017-02-15 12:12:10 -080011970 //Binning Correction Mode
11971 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
11972 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
11973 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
11974 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
11975 || (0 > fwk_binning_correction)) {
11976 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
11977 } else {
11978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11979 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
11980 rc = BAD_VALUE;
11981 }
11982 }
11983 }
11984
Thierry Strudel269c81a2016-10-12 12:13:59 -070011985 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11986 float aec_speed;
11987 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11988 LOGD("AEC Speed :%f", aec_speed);
11989 if ( aec_speed < 0 ) {
11990 LOGE("Invalid AEC mode %f!", aec_speed);
11991 } else {
11992 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11993 aec_speed)) {
11994 rc = BAD_VALUE;
11995 }
11996 }
11997 }
11998
11999 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12000 float awb_speed;
12001 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12002 LOGD("AWB Speed :%f", awb_speed);
12003 if ( awb_speed < 0 ) {
12004 LOGE("Invalid AWB mode %f!", awb_speed);
12005 } else {
12006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12007 awb_speed)) {
12008 rc = BAD_VALUE;
12009 }
12010 }
12011 }
12012
Thierry Strudel3d639192016-09-09 11:52:26 -070012013 // TNR
12014 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12015 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12016 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012017 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012018 cam_denoise_param_t tnr;
12019 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12020 tnr.process_plates =
12021 (cam_denoise_process_type_t)frame_settings.find(
12022 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12023 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012024
12025 if(b_TnrRequested != curr_tnr_state)
12026 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12027
Thierry Strudel3d639192016-09-09 11:52:26 -070012028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12029 rc = BAD_VALUE;
12030 }
12031 }
12032
Thierry Strudel54dc9782017-02-15 12:12:10 -080012033 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012034 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012035 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12037 *exposure_metering_mode)) {
12038 rc = BAD_VALUE;
12039 }
12040 }
12041
Thierry Strudel3d639192016-09-09 11:52:26 -070012042 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12043 int32_t fwk_testPatternMode =
12044 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12045 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12046 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12047
12048 if (NAME_NOT_FOUND != testPatternMode) {
12049 cam_test_pattern_data_t testPatternData;
12050 memset(&testPatternData, 0, sizeof(testPatternData));
12051 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12052 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12053 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12054 int32_t *fwk_testPatternData =
12055 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12056 testPatternData.r = fwk_testPatternData[0];
12057 testPatternData.b = fwk_testPatternData[3];
12058 switch (gCamCapability[mCameraId]->color_arrangement) {
12059 case CAM_FILTER_ARRANGEMENT_RGGB:
12060 case CAM_FILTER_ARRANGEMENT_GRBG:
12061 testPatternData.gr = fwk_testPatternData[1];
12062 testPatternData.gb = fwk_testPatternData[2];
12063 break;
12064 case CAM_FILTER_ARRANGEMENT_GBRG:
12065 case CAM_FILTER_ARRANGEMENT_BGGR:
12066 testPatternData.gr = fwk_testPatternData[2];
12067 testPatternData.gb = fwk_testPatternData[1];
12068 break;
12069 default:
12070 LOGE("color arrangement %d is not supported",
12071 gCamCapability[mCameraId]->color_arrangement);
12072 break;
12073 }
12074 }
12075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12076 testPatternData)) {
12077 rc = BAD_VALUE;
12078 }
12079 } else {
12080 LOGE("Invalid framework sensor test pattern mode %d",
12081 fwk_testPatternMode);
12082 }
12083 }
12084
12085 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12086 size_t count = 0;
12087 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12088 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12089 gps_coords.data.d, gps_coords.count, count);
12090 if (gps_coords.count != count) {
12091 rc = BAD_VALUE;
12092 }
12093 }
12094
12095 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12096 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12097 size_t count = 0;
12098 const char *gps_methods_src = (const char *)
12099 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12100 memset(gps_methods, '\0', sizeof(gps_methods));
12101 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12102 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12103 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12104 if (GPS_PROCESSING_METHOD_SIZE != count) {
12105 rc = BAD_VALUE;
12106 }
12107 }
12108
12109 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12110 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12112 gps_timestamp)) {
12113 rc = BAD_VALUE;
12114 }
12115 }
12116
12117 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12118 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12119 cam_rotation_info_t rotation_info;
12120 if (orientation == 0) {
12121 rotation_info.rotation = ROTATE_0;
12122 } else if (orientation == 90) {
12123 rotation_info.rotation = ROTATE_90;
12124 } else if (orientation == 180) {
12125 rotation_info.rotation = ROTATE_180;
12126 } else if (orientation == 270) {
12127 rotation_info.rotation = ROTATE_270;
12128 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012129 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012130 rotation_info.streamId = snapshotStreamId;
12131 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12133 rc = BAD_VALUE;
12134 }
12135 }
12136
12137 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12138 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12139 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12140 rc = BAD_VALUE;
12141 }
12142 }
12143
12144 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12145 uint32_t thumb_quality = (uint32_t)
12146 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12147 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12148 thumb_quality)) {
12149 rc = BAD_VALUE;
12150 }
12151 }
12152
12153 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12154 cam_dimension_t dim;
12155 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12156 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12158 rc = BAD_VALUE;
12159 }
12160 }
12161
12162 // Internal metadata
12163 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12164 size_t count = 0;
12165 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12166 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12167 privatedata.data.i32, privatedata.count, count);
12168 if (privatedata.count != count) {
12169 rc = BAD_VALUE;
12170 }
12171 }
12172
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012173 // ISO/Exposure Priority
12174 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12175 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12176 cam_priority_mode_t mode =
12177 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12178 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12179 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12180 use_iso_exp_pty.previewOnly = FALSE;
12181 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12182 use_iso_exp_pty.value = *ptr;
12183
12184 if(CAM_ISO_PRIORITY == mode) {
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12186 use_iso_exp_pty)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190 else {
12191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12192 use_iso_exp_pty)) {
12193 rc = BAD_VALUE;
12194 }
12195 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012196
12197 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12198 rc = BAD_VALUE;
12199 }
12200 }
12201 } else {
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12203 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012204 }
12205 }
12206
12207 // Saturation
12208 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12209 int32_t* use_saturation =
12210 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12212 rc = BAD_VALUE;
12213 }
12214 }
12215
Thierry Strudel3d639192016-09-09 11:52:26 -070012216 // EV step
12217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12218 gCamCapability[mCameraId]->exp_compensation_step)) {
12219 rc = BAD_VALUE;
12220 }
12221
12222 // CDS info
12223 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12224 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12225 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12226
12227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12228 CAM_INTF_META_CDS_DATA, *cdsData)) {
12229 rc = BAD_VALUE;
12230 }
12231 }
12232
Shuzhen Wang19463d72016-03-08 11:09:52 -080012233 // Hybrid AE
12234 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12235 uint8_t *hybrid_ae = (uint8_t *)
12236 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12237
12238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12239 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12240 rc = BAD_VALUE;
12241 }
12242 }
12243
Thierry Strudel3d639192016-09-09 11:52:26 -070012244 return rc;
12245}
12246
12247/*===========================================================================
12248 * FUNCTION : captureResultCb
12249 *
12250 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12251 *
12252 * PARAMETERS :
12253 * @frame : frame information from mm-camera-interface
12254 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12255 * @userdata: userdata
12256 *
12257 * RETURN : NONE
12258 *==========================================================================*/
12259void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12260 camera3_stream_buffer_t *buffer,
12261 uint32_t frame_number, bool isInputBuffer, void *userdata)
12262{
12263 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12264 if (hw == NULL) {
12265 LOGE("Invalid hw %p", hw);
12266 return;
12267 }
12268
12269 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12270 return;
12271}
12272
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012273/*===========================================================================
12274 * FUNCTION : setBufferErrorStatus
12275 *
12276 * DESCRIPTION: Callback handler for channels to report any buffer errors
12277 *
12278 * PARAMETERS :
12279 * @ch : Channel on which buffer error is reported from
12280 * @frame_number : frame number on which buffer error is reported on
12281 * @buffer_status : buffer error status
12282 * @userdata: userdata
12283 *
12284 * RETURN : NONE
12285 *==========================================================================*/
12286void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12287 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12288{
12289 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12290 if (hw == NULL) {
12291 LOGE("Invalid hw %p", hw);
12292 return;
12293 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012294
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012295 hw->setBufferErrorStatus(ch, frame_number, err);
12296 return;
12297}
12298
12299void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12300 uint32_t frameNumber, camera3_buffer_status_t err)
12301{
12302 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12303 pthread_mutex_lock(&mMutex);
12304
12305 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12306 if (req.frame_number != frameNumber)
12307 continue;
12308 for (auto& k : req.mPendingBufferList) {
12309 if(k.stream->priv == ch) {
12310 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12311 }
12312 }
12313 }
12314
12315 pthread_mutex_unlock(&mMutex);
12316 return;
12317}
Thierry Strudel3d639192016-09-09 11:52:26 -070012318/*===========================================================================
12319 * FUNCTION : initialize
12320 *
12321 * DESCRIPTION: Pass framework callback pointers to HAL
12322 *
12323 * PARAMETERS :
12324 *
12325 *
12326 * RETURN : Success : 0
12327 * Failure: -ENODEV
12328 *==========================================================================*/
12329
12330int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12331 const camera3_callback_ops_t *callback_ops)
12332{
12333 LOGD("E");
12334 QCamera3HardwareInterface *hw =
12335 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12336 if (!hw) {
12337 LOGE("NULL camera device");
12338 return -ENODEV;
12339 }
12340
12341 int rc = hw->initialize(callback_ops);
12342 LOGD("X");
12343 return rc;
12344}
12345
12346/*===========================================================================
12347 * FUNCTION : configure_streams
12348 *
12349 * DESCRIPTION:
12350 *
12351 * PARAMETERS :
12352 *
12353 *
12354 * RETURN : Success: 0
12355 * Failure: -EINVAL (if stream configuration is invalid)
12356 * -ENODEV (fatal error)
12357 *==========================================================================*/
12358
12359int QCamera3HardwareInterface::configure_streams(
12360 const struct camera3_device *device,
12361 camera3_stream_configuration_t *stream_list)
12362{
12363 LOGD("E");
12364 QCamera3HardwareInterface *hw =
12365 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12366 if (!hw) {
12367 LOGE("NULL camera device");
12368 return -ENODEV;
12369 }
12370 int rc = hw->configureStreams(stream_list);
12371 LOGD("X");
12372 return rc;
12373}
12374
12375/*===========================================================================
12376 * FUNCTION : construct_default_request_settings
12377 *
12378 * DESCRIPTION: Configure a settings buffer to meet the required use case
12379 *
12380 * PARAMETERS :
12381 *
12382 *
12383 * RETURN : Success: Return valid metadata
12384 * Failure: Return NULL
12385 *==========================================================================*/
12386const camera_metadata_t* QCamera3HardwareInterface::
12387 construct_default_request_settings(const struct camera3_device *device,
12388 int type)
12389{
12390
12391 LOGD("E");
12392 camera_metadata_t* fwk_metadata = NULL;
12393 QCamera3HardwareInterface *hw =
12394 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12395 if (!hw) {
12396 LOGE("NULL camera device");
12397 return NULL;
12398 }
12399
12400 fwk_metadata = hw->translateCapabilityToMetadata(type);
12401
12402 LOGD("X");
12403 return fwk_metadata;
12404}
12405
12406/*===========================================================================
12407 * FUNCTION : process_capture_request
12408 *
12409 * DESCRIPTION:
12410 *
12411 * PARAMETERS :
12412 *
12413 *
12414 * RETURN :
12415 *==========================================================================*/
12416int QCamera3HardwareInterface::process_capture_request(
12417 const struct camera3_device *device,
12418 camera3_capture_request_t *request)
12419{
12420 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012421 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012422 QCamera3HardwareInterface *hw =
12423 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12424 if (!hw) {
12425 LOGE("NULL camera device");
12426 return -EINVAL;
12427 }
12428
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012429 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012430 LOGD("X");
12431 return rc;
12432}
12433
12434/*===========================================================================
12435 * FUNCTION : dump
12436 *
12437 * DESCRIPTION:
12438 *
12439 * PARAMETERS :
12440 *
12441 *
12442 * RETURN :
12443 *==========================================================================*/
12444
12445void QCamera3HardwareInterface::dump(
12446 const struct camera3_device *device, int fd)
12447{
12448 /* Log level property is read when "adb shell dumpsys media.camera" is
12449 called so that the log level can be controlled without restarting
12450 the media server */
12451 getLogLevel();
12452
12453 LOGD("E");
12454 QCamera3HardwareInterface *hw =
12455 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12456 if (!hw) {
12457 LOGE("NULL camera device");
12458 return;
12459 }
12460
12461 hw->dump(fd);
12462 LOGD("X");
12463 return;
12464}
12465
12466/*===========================================================================
12467 * FUNCTION : flush
12468 *
12469 * DESCRIPTION:
12470 *
12471 * PARAMETERS :
12472 *
12473 *
12474 * RETURN :
12475 *==========================================================================*/
12476
12477int QCamera3HardwareInterface::flush(
12478 const struct camera3_device *device)
12479{
12480 int rc;
12481 LOGD("E");
12482 QCamera3HardwareInterface *hw =
12483 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12484 if (!hw) {
12485 LOGE("NULL camera device");
12486 return -EINVAL;
12487 }
12488
12489 pthread_mutex_lock(&hw->mMutex);
12490 // Validate current state
12491 switch (hw->mState) {
12492 case STARTED:
12493 /* valid state */
12494 break;
12495
12496 case ERROR:
12497 pthread_mutex_unlock(&hw->mMutex);
12498 hw->handleCameraDeviceError();
12499 return -ENODEV;
12500
12501 default:
12502 LOGI("Flush returned during state %d", hw->mState);
12503 pthread_mutex_unlock(&hw->mMutex);
12504 return 0;
12505 }
12506 pthread_mutex_unlock(&hw->mMutex);
12507
12508 rc = hw->flush(true /* restart channels */ );
12509 LOGD("X");
12510 return rc;
12511}
12512
12513/*===========================================================================
12514 * FUNCTION : close_camera_device
12515 *
12516 * DESCRIPTION:
12517 *
12518 * PARAMETERS :
12519 *
12520 *
12521 * RETURN :
12522 *==========================================================================*/
12523int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12524{
12525 int ret = NO_ERROR;
12526 QCamera3HardwareInterface *hw =
12527 reinterpret_cast<QCamera3HardwareInterface *>(
12528 reinterpret_cast<camera3_device_t *>(device)->priv);
12529 if (!hw) {
12530 LOGE("NULL camera device");
12531 return BAD_VALUE;
12532 }
12533
12534 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12535 delete hw;
12536 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012537 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012538 return ret;
12539}
12540
12541/*===========================================================================
12542 * FUNCTION : getWaveletDenoiseProcessPlate
12543 *
12544 * DESCRIPTION: query wavelet denoise process plate
12545 *
12546 * PARAMETERS : None
12547 *
12548 * RETURN : WNR prcocess plate value
12549 *==========================================================================*/
12550cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12551{
12552 char prop[PROPERTY_VALUE_MAX];
12553 memset(prop, 0, sizeof(prop));
12554 property_get("persist.denoise.process.plates", prop, "0");
12555 int processPlate = atoi(prop);
12556 switch(processPlate) {
12557 case 0:
12558 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12559 case 1:
12560 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12561 case 2:
12562 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12563 case 3:
12564 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12565 default:
12566 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12567 }
12568}
12569
12570
12571/*===========================================================================
12572 * FUNCTION : getTemporalDenoiseProcessPlate
12573 *
12574 * DESCRIPTION: query temporal denoise process plate
12575 *
12576 * PARAMETERS : None
12577 *
12578 * RETURN : TNR prcocess plate value
12579 *==========================================================================*/
12580cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12581{
12582 char prop[PROPERTY_VALUE_MAX];
12583 memset(prop, 0, sizeof(prop));
12584 property_get("persist.tnr.process.plates", prop, "0");
12585 int processPlate = atoi(prop);
12586 switch(processPlate) {
12587 case 0:
12588 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12589 case 1:
12590 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12591 case 2:
12592 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12593 case 3:
12594 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12595 default:
12596 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12597 }
12598}
12599
12600
12601/*===========================================================================
12602 * FUNCTION : extractSceneMode
12603 *
12604 * DESCRIPTION: Extract scene mode from frameworks set metadata
12605 *
12606 * PARAMETERS :
12607 * @frame_settings: CameraMetadata reference
12608 * @metaMode: ANDROID_CONTORL_MODE
12609 * @hal_metadata: hal metadata structure
12610 *
12611 * RETURN : None
12612 *==========================================================================*/
12613int32_t QCamera3HardwareInterface::extractSceneMode(
12614 const CameraMetadata &frame_settings, uint8_t metaMode,
12615 metadata_buffer_t *hal_metadata)
12616{
12617 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012618 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12619
12620 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12621 LOGD("Ignoring control mode OFF_KEEP_STATE");
12622 return NO_ERROR;
12623 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012624
12625 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12626 camera_metadata_ro_entry entry =
12627 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12628 if (0 == entry.count)
12629 return rc;
12630
12631 uint8_t fwk_sceneMode = entry.data.u8[0];
12632
12633 int val = lookupHalName(SCENE_MODES_MAP,
12634 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12635 fwk_sceneMode);
12636 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012637 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012638 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012639 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012640 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012641
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012642 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12643 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12644 }
12645
12646 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12647 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012648 cam_hdr_param_t hdr_params;
12649 hdr_params.hdr_enable = 1;
12650 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12651 hdr_params.hdr_need_1x = false;
12652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12653 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12654 rc = BAD_VALUE;
12655 }
12656 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012657
Thierry Strudel3d639192016-09-09 11:52:26 -070012658 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12659 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12660 rc = BAD_VALUE;
12661 }
12662 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012663
12664 if (mForceHdrSnapshot) {
12665 cam_hdr_param_t hdr_params;
12666 hdr_params.hdr_enable = 1;
12667 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12668 hdr_params.hdr_need_1x = false;
12669 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12670 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12671 rc = BAD_VALUE;
12672 }
12673 }
12674
Thierry Strudel3d639192016-09-09 11:52:26 -070012675 return rc;
12676}
12677
12678/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012679 * FUNCTION : setVideoHdrMode
12680 *
12681 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12682 *
12683 * PARAMETERS :
12684 * @hal_metadata: hal metadata structure
12685 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12686 *
12687 * RETURN : None
12688 *==========================================================================*/
12689int32_t QCamera3HardwareInterface::setVideoHdrMode(
12690 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12691{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012692 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12693 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12694 }
12695
12696 LOGE("Invalid Video HDR mode %d!", vhdr);
12697 return BAD_VALUE;
12698}
12699
12700/*===========================================================================
12701 * FUNCTION : setSensorHDR
12702 *
12703 * DESCRIPTION: Enable/disable sensor HDR.
12704 *
12705 * PARAMETERS :
12706 * @hal_metadata: hal metadata structure
12707 * @enable: boolean whether to enable/disable sensor HDR
12708 *
12709 * RETURN : None
12710 *==========================================================================*/
12711int32_t QCamera3HardwareInterface::setSensorHDR(
12712 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12713{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012714 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012715 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12716
12717 if (enable) {
12718 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12719 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12720 #ifdef _LE_CAMERA_
12721 //Default to staggered HDR for IOT
12722 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12723 #else
12724 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12725 #endif
12726 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12727 }
12728
12729 bool isSupported = false;
12730 switch (sensor_hdr) {
12731 case CAM_SENSOR_HDR_IN_SENSOR:
12732 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12733 CAM_QCOM_FEATURE_SENSOR_HDR) {
12734 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012735 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012736 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012737 break;
12738 case CAM_SENSOR_HDR_ZIGZAG:
12739 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12740 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12741 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012742 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012743 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012744 break;
12745 case CAM_SENSOR_HDR_STAGGERED:
12746 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12747 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12748 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012749 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012750 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012751 break;
12752 case CAM_SENSOR_HDR_OFF:
12753 isSupported = true;
12754 LOGD("Turning off sensor HDR");
12755 break;
12756 default:
12757 LOGE("HDR mode %d not supported", sensor_hdr);
12758 rc = BAD_VALUE;
12759 break;
12760 }
12761
12762 if(isSupported) {
12763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12764 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12765 rc = BAD_VALUE;
12766 } else {
12767 if(!isVideoHdrEnable)
12768 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012769 }
12770 }
12771 return rc;
12772}
12773
12774/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012775 * FUNCTION : needRotationReprocess
12776 *
12777 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12778 *
12779 * PARAMETERS : none
12780 *
12781 * RETURN : true: needed
12782 * false: no need
12783 *==========================================================================*/
12784bool QCamera3HardwareInterface::needRotationReprocess()
12785{
12786 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12787 // current rotation is not zero, and pp has the capability to process rotation
12788 LOGH("need do reprocess for rotation");
12789 return true;
12790 }
12791
12792 return false;
12793}
12794
12795/*===========================================================================
12796 * FUNCTION : needReprocess
12797 *
12798 * DESCRIPTION: if reprocess in needed
12799 *
12800 * PARAMETERS : none
12801 *
12802 * RETURN : true: needed
12803 * false: no need
12804 *==========================================================================*/
12805bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12806{
12807 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12808 // TODO: add for ZSL HDR later
12809 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12810 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12811 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12812 return true;
12813 } else {
12814 LOGH("already post processed frame");
12815 return false;
12816 }
12817 }
12818 return needRotationReprocess();
12819}
12820
12821/*===========================================================================
12822 * FUNCTION : needJpegExifRotation
12823 *
12824 * DESCRIPTION: if rotation from jpeg is needed
12825 *
12826 * PARAMETERS : none
12827 *
12828 * RETURN : true: needed
12829 * false: no need
12830 *==========================================================================*/
12831bool QCamera3HardwareInterface::needJpegExifRotation()
12832{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012833 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012834 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12835 LOGD("Need use Jpeg EXIF Rotation");
12836 return true;
12837 }
12838 return false;
12839}
12840
12841/*===========================================================================
12842 * FUNCTION : addOfflineReprocChannel
12843 *
12844 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12845 * coming from input channel
12846 *
12847 * PARAMETERS :
12848 * @config : reprocess configuration
12849 * @inputChHandle : pointer to the input (source) channel
12850 *
12851 *
12852 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12853 *==========================================================================*/
12854QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12855 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12856{
12857 int32_t rc = NO_ERROR;
12858 QCamera3ReprocessChannel *pChannel = NULL;
12859
12860 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012861 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12862 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012863 if (NULL == pChannel) {
12864 LOGE("no mem for reprocess channel");
12865 return NULL;
12866 }
12867
12868 rc = pChannel->initialize(IS_TYPE_NONE);
12869 if (rc != NO_ERROR) {
12870 LOGE("init reprocess channel failed, ret = %d", rc);
12871 delete pChannel;
12872 return NULL;
12873 }
12874
12875 // pp feature config
12876 cam_pp_feature_config_t pp_config;
12877 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12878
12879 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12880 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12881 & CAM_QCOM_FEATURE_DSDN) {
12882 //Use CPP CDS incase h/w supports it.
12883 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12884 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12885 }
12886 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12887 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12888 }
12889
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012890 if (config.hdr_param.hdr_enable) {
12891 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12892 pp_config.hdr_param = config.hdr_param;
12893 }
12894
12895 if (mForceHdrSnapshot) {
12896 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12897 pp_config.hdr_param.hdr_enable = 1;
12898 pp_config.hdr_param.hdr_need_1x = 0;
12899 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12900 }
12901
Thierry Strudel3d639192016-09-09 11:52:26 -070012902 rc = pChannel->addReprocStreamsFromSource(pp_config,
12903 config,
12904 IS_TYPE_NONE,
12905 mMetadataChannel);
12906
12907 if (rc != NO_ERROR) {
12908 delete pChannel;
12909 return NULL;
12910 }
12911 return pChannel;
12912}
12913
12914/*===========================================================================
12915 * FUNCTION : getMobicatMask
12916 *
12917 * DESCRIPTION: returns mobicat mask
12918 *
12919 * PARAMETERS : none
12920 *
12921 * RETURN : mobicat mask
12922 *
12923 *==========================================================================*/
12924uint8_t QCamera3HardwareInterface::getMobicatMask()
12925{
12926 return m_MobicatMask;
12927}
12928
12929/*===========================================================================
12930 * FUNCTION : setMobicat
12931 *
12932 * DESCRIPTION: set Mobicat on/off.
12933 *
12934 * PARAMETERS :
12935 * @params : none
12936 *
12937 * RETURN : int32_t type of status
12938 * NO_ERROR -- success
12939 * none-zero failure code
12940 *==========================================================================*/
12941int32_t QCamera3HardwareInterface::setMobicat()
12942{
12943 char value [PROPERTY_VALUE_MAX];
12944 property_get("persist.camera.mobicat", value, "0");
12945 int32_t ret = NO_ERROR;
12946 uint8_t enableMobi = (uint8_t)atoi(value);
12947
12948 if (enableMobi) {
12949 tune_cmd_t tune_cmd;
12950 tune_cmd.type = SET_RELOAD_CHROMATIX;
12951 tune_cmd.module = MODULE_ALL;
12952 tune_cmd.value = TRUE;
12953 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12954 CAM_INTF_PARM_SET_VFE_COMMAND,
12955 tune_cmd);
12956
12957 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12958 CAM_INTF_PARM_SET_PP_COMMAND,
12959 tune_cmd);
12960 }
12961 m_MobicatMask = enableMobi;
12962
12963 return ret;
12964}
12965
12966/*===========================================================================
12967* FUNCTION : getLogLevel
12968*
12969* DESCRIPTION: Reads the log level property into a variable
12970*
12971* PARAMETERS :
12972* None
12973*
12974* RETURN :
12975* None
12976*==========================================================================*/
12977void QCamera3HardwareInterface::getLogLevel()
12978{
12979 char prop[PROPERTY_VALUE_MAX];
12980 uint32_t globalLogLevel = 0;
12981
12982 property_get("persist.camera.hal.debug", prop, "0");
12983 int val = atoi(prop);
12984 if (0 <= val) {
12985 gCamHal3LogLevel = (uint32_t)val;
12986 }
12987
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012988 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012989 gKpiDebugLevel = atoi(prop);
12990
12991 property_get("persist.camera.global.debug", prop, "0");
12992 val = atoi(prop);
12993 if (0 <= val) {
12994 globalLogLevel = (uint32_t)val;
12995 }
12996
12997 /* Highest log level among hal.logs and global.logs is selected */
12998 if (gCamHal3LogLevel < globalLogLevel)
12999 gCamHal3LogLevel = globalLogLevel;
13000
13001 return;
13002}
13003
13004/*===========================================================================
13005 * FUNCTION : validateStreamRotations
13006 *
13007 * DESCRIPTION: Check if the rotations requested are supported
13008 *
13009 * PARAMETERS :
13010 * @stream_list : streams to be configured
13011 *
13012 * RETURN : NO_ERROR on success
13013 * -EINVAL on failure
13014 *
13015 *==========================================================================*/
13016int QCamera3HardwareInterface::validateStreamRotations(
13017 camera3_stream_configuration_t *streamList)
13018{
13019 int rc = NO_ERROR;
13020
13021 /*
13022 * Loop through all streams requested in configuration
13023 * Check if unsupported rotations have been requested on any of them
13024 */
13025 for (size_t j = 0; j < streamList->num_streams; j++){
13026 camera3_stream_t *newStream = streamList->streams[j];
13027
13028 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13029 bool isImplDef = (newStream->format ==
13030 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13031 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13032 isImplDef);
13033
13034 if (isRotated && (!isImplDef || isZsl)) {
13035 LOGE("Error: Unsupported rotation of %d requested for stream"
13036 "type:%d and stream format:%d",
13037 newStream->rotation, newStream->stream_type,
13038 newStream->format);
13039 rc = -EINVAL;
13040 break;
13041 }
13042 }
13043
13044 return rc;
13045}
13046
13047/*===========================================================================
13048* FUNCTION : getFlashInfo
13049*
13050* DESCRIPTION: Retrieve information about whether the device has a flash.
13051*
13052* PARAMETERS :
13053* @cameraId : Camera id to query
13054* @hasFlash : Boolean indicating whether there is a flash device
13055* associated with given camera
13056* @flashNode : If a flash device exists, this will be its device node.
13057*
13058* RETURN :
13059* None
13060*==========================================================================*/
13061void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13062 bool& hasFlash,
13063 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13064{
13065 cam_capability_t* camCapability = gCamCapability[cameraId];
13066 if (NULL == camCapability) {
13067 hasFlash = false;
13068 flashNode[0] = '\0';
13069 } else {
13070 hasFlash = camCapability->flash_available;
13071 strlcpy(flashNode,
13072 (char*)camCapability->flash_dev_name,
13073 QCAMERA_MAX_FILEPATH_LENGTH);
13074 }
13075}
13076
13077/*===========================================================================
13078* FUNCTION : getEepromVersionInfo
13079*
13080* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13081*
13082* PARAMETERS : None
13083*
13084* RETURN : string describing EEPROM version
13085* "\0" if no such info available
13086*==========================================================================*/
13087const char *QCamera3HardwareInterface::getEepromVersionInfo()
13088{
13089 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13090}
13091
13092/*===========================================================================
13093* FUNCTION : getLdafCalib
13094*
13095* DESCRIPTION: Retrieve Laser AF calibration data
13096*
13097* PARAMETERS : None
13098*
13099* RETURN : Two uint32_t describing laser AF calibration data
13100* NULL if none is available.
13101*==========================================================================*/
13102const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13103{
13104 if (mLdafCalibExist) {
13105 return &mLdafCalib[0];
13106 } else {
13107 return NULL;
13108 }
13109}
13110
13111/*===========================================================================
13112 * FUNCTION : dynamicUpdateMetaStreamInfo
13113 *
13114 * DESCRIPTION: This function:
13115 * (1) stops all the channels
13116 * (2) returns error on pending requests and buffers
13117 * (3) sends metastream_info in setparams
13118 * (4) starts all channels
13119 * This is useful when sensor has to be restarted to apply any
13120 * settings such as frame rate from a different sensor mode
13121 *
13122 * PARAMETERS : None
13123 *
13124 * RETURN : NO_ERROR on success
13125 * Error codes on failure
13126 *
13127 *==========================================================================*/
13128int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013131 int rc = NO_ERROR;
13132
13133 LOGD("E");
13134
13135 rc = stopAllChannels();
13136 if (rc < 0) {
13137 LOGE("stopAllChannels failed");
13138 return rc;
13139 }
13140
13141 rc = notifyErrorForPendingRequests();
13142 if (rc < 0) {
13143 LOGE("notifyErrorForPendingRequests failed");
13144 return rc;
13145 }
13146
13147 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13148 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13149 "Format:%d",
13150 mStreamConfigInfo.type[i],
13151 mStreamConfigInfo.stream_sizes[i].width,
13152 mStreamConfigInfo.stream_sizes[i].height,
13153 mStreamConfigInfo.postprocess_mask[i],
13154 mStreamConfigInfo.format[i]);
13155 }
13156
13157 /* Send meta stream info once again so that ISP can start */
13158 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13159 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13160 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13161 mParameters);
13162 if (rc < 0) {
13163 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13164 }
13165
13166 rc = startAllChannels();
13167 if (rc < 0) {
13168 LOGE("startAllChannels failed");
13169 return rc;
13170 }
13171
13172 LOGD("X");
13173 return rc;
13174}
13175
13176/*===========================================================================
13177 * FUNCTION : stopAllChannels
13178 *
13179 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13180 *
13181 * PARAMETERS : None
13182 *
13183 * RETURN : NO_ERROR on success
13184 * Error codes on failure
13185 *
13186 *==========================================================================*/
13187int32_t QCamera3HardwareInterface::stopAllChannels()
13188{
13189 int32_t rc = NO_ERROR;
13190
13191 LOGD("Stopping all channels");
13192 // Stop the Streams/Channels
13193 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13194 it != mStreamInfo.end(); it++) {
13195 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13196 if (channel) {
13197 channel->stop();
13198 }
13199 (*it)->status = INVALID;
13200 }
13201
13202 if (mSupportChannel) {
13203 mSupportChannel->stop();
13204 }
13205 if (mAnalysisChannel) {
13206 mAnalysisChannel->stop();
13207 }
13208 if (mRawDumpChannel) {
13209 mRawDumpChannel->stop();
13210 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013211 if (mHdrPlusRawSrcChannel) {
13212 mHdrPlusRawSrcChannel->stop();
13213 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013214 if (mMetadataChannel) {
13215 /* If content of mStreamInfo is not 0, there is metadata stream */
13216 mMetadataChannel->stop();
13217 }
13218
13219 LOGD("All channels stopped");
13220 return rc;
13221}
13222
13223/*===========================================================================
13224 * FUNCTION : startAllChannels
13225 *
13226 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13227 *
13228 * PARAMETERS : None
13229 *
13230 * RETURN : NO_ERROR on success
13231 * Error codes on failure
13232 *
13233 *==========================================================================*/
13234int32_t QCamera3HardwareInterface::startAllChannels()
13235{
13236 int32_t rc = NO_ERROR;
13237
13238 LOGD("Start all channels ");
13239 // Start the Streams/Channels
13240 if (mMetadataChannel) {
13241 /* If content of mStreamInfo is not 0, there is metadata stream */
13242 rc = mMetadataChannel->start();
13243 if (rc < 0) {
13244 LOGE("META channel start failed");
13245 return rc;
13246 }
13247 }
13248 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13249 it != mStreamInfo.end(); it++) {
13250 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13251 if (channel) {
13252 rc = channel->start();
13253 if (rc < 0) {
13254 LOGE("channel start failed");
13255 return rc;
13256 }
13257 }
13258 }
13259 if (mAnalysisChannel) {
13260 mAnalysisChannel->start();
13261 }
13262 if (mSupportChannel) {
13263 rc = mSupportChannel->start();
13264 if (rc < 0) {
13265 LOGE("Support channel start failed");
13266 return rc;
13267 }
13268 }
13269 if (mRawDumpChannel) {
13270 rc = mRawDumpChannel->start();
13271 if (rc < 0) {
13272 LOGE("RAW dump channel start failed");
13273 return rc;
13274 }
13275 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013276 if (mHdrPlusRawSrcChannel) {
13277 rc = mHdrPlusRawSrcChannel->start();
13278 if (rc < 0) {
13279 LOGE("HDR+ RAW channel start failed");
13280 return rc;
13281 }
13282 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013283
13284 LOGD("All channels started");
13285 return rc;
13286}
13287
13288/*===========================================================================
13289 * FUNCTION : notifyErrorForPendingRequests
13290 *
13291 * DESCRIPTION: This function sends error for all the pending requests/buffers
13292 *
13293 * PARAMETERS : None
13294 *
13295 * RETURN : Error codes
13296 * NO_ERROR on success
13297 *
13298 *==========================================================================*/
13299int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13300{
13301 int32_t rc = NO_ERROR;
13302 unsigned int frameNum = 0;
13303 camera3_capture_result_t result;
13304 camera3_stream_buffer_t *pStream_Buf = NULL;
13305
13306 memset(&result, 0, sizeof(camera3_capture_result_t));
13307
13308 if (mPendingRequestsList.size() > 0) {
13309 pendingRequestIterator i = mPendingRequestsList.begin();
13310 frameNum = i->frame_number;
13311 } else {
13312 /* There might still be pending buffers even though there are
13313 no pending requests. Setting the frameNum to MAX so that
13314 all the buffers with smaller frame numbers are returned */
13315 frameNum = UINT_MAX;
13316 }
13317
13318 LOGH("Oldest frame num on mPendingRequestsList = %u",
13319 frameNum);
13320
Emilian Peev7650c122017-01-19 08:24:33 -080013321 notifyErrorFoPendingDepthData(mDepthChannel);
13322
Thierry Strudel3d639192016-09-09 11:52:26 -070013323 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13324 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13325
13326 if (req->frame_number < frameNum) {
13327 // Send Error notify to frameworks for each buffer for which
13328 // metadata buffer is already sent
13329 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13330 req->frame_number, req->mPendingBufferList.size());
13331
13332 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13333 if (NULL == pStream_Buf) {
13334 LOGE("No memory for pending buffers array");
13335 return NO_MEMORY;
13336 }
13337 memset(pStream_Buf, 0,
13338 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13339 result.result = NULL;
13340 result.frame_number = req->frame_number;
13341 result.num_output_buffers = req->mPendingBufferList.size();
13342 result.output_buffers = pStream_Buf;
13343
13344 size_t index = 0;
13345 for (auto info = req->mPendingBufferList.begin();
13346 info != req->mPendingBufferList.end(); ) {
13347
13348 camera3_notify_msg_t notify_msg;
13349 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13350 notify_msg.type = CAMERA3_MSG_ERROR;
13351 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13352 notify_msg.message.error.error_stream = info->stream;
13353 notify_msg.message.error.frame_number = req->frame_number;
13354 pStream_Buf[index].acquire_fence = -1;
13355 pStream_Buf[index].release_fence = -1;
13356 pStream_Buf[index].buffer = info->buffer;
13357 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13358 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013359 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013360 index++;
13361 // Remove buffer from list
13362 info = req->mPendingBufferList.erase(info);
13363 }
13364
13365 // Remove this request from Map
13366 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13367 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13368 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13369
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013370 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013371
13372 delete [] pStream_Buf;
13373 } else {
13374
13375 // Go through the pending requests info and send error request to framework
13376 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13377
13378 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13379
13380 // Send error notify to frameworks
13381 camera3_notify_msg_t notify_msg;
13382 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13383 notify_msg.type = CAMERA3_MSG_ERROR;
13384 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13385 notify_msg.message.error.error_stream = NULL;
13386 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013387 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013388
13389 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13390 if (NULL == pStream_Buf) {
13391 LOGE("No memory for pending buffers array");
13392 return NO_MEMORY;
13393 }
13394 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13395
13396 result.result = NULL;
13397 result.frame_number = req->frame_number;
13398 result.input_buffer = i->input_buffer;
13399 result.num_output_buffers = req->mPendingBufferList.size();
13400 result.output_buffers = pStream_Buf;
13401
13402 size_t index = 0;
13403 for (auto info = req->mPendingBufferList.begin();
13404 info != req->mPendingBufferList.end(); ) {
13405 pStream_Buf[index].acquire_fence = -1;
13406 pStream_Buf[index].release_fence = -1;
13407 pStream_Buf[index].buffer = info->buffer;
13408 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13409 pStream_Buf[index].stream = info->stream;
13410 index++;
13411 // Remove buffer from list
13412 info = req->mPendingBufferList.erase(info);
13413 }
13414
13415 // Remove this request from Map
13416 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13417 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13418 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13419
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013420 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013421 delete [] pStream_Buf;
13422 i = erasePendingRequest(i);
13423 }
13424 }
13425
13426 /* Reset pending frame Drop list and requests list */
13427 mPendingFrameDropList.clear();
13428
13429 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13430 req.mPendingBufferList.clear();
13431 }
13432 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013433 LOGH("Cleared all the pending buffers ");
13434
13435 return rc;
13436}
13437
13438bool QCamera3HardwareInterface::isOnEncoder(
13439 const cam_dimension_t max_viewfinder_size,
13440 uint32_t width, uint32_t height)
13441{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013442 return ((width > (uint32_t)max_viewfinder_size.width) ||
13443 (height > (uint32_t)max_viewfinder_size.height) ||
13444 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13445 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013446}
13447
13448/*===========================================================================
13449 * FUNCTION : setBundleInfo
13450 *
13451 * DESCRIPTION: Set bundle info for all streams that are bundle.
13452 *
13453 * PARAMETERS : None
13454 *
13455 * RETURN : NO_ERROR on success
13456 * Error codes on failure
13457 *==========================================================================*/
13458int32_t QCamera3HardwareInterface::setBundleInfo()
13459{
13460 int32_t rc = NO_ERROR;
13461
13462 if (mChannelHandle) {
13463 cam_bundle_config_t bundleInfo;
13464 memset(&bundleInfo, 0, sizeof(bundleInfo));
13465 rc = mCameraHandle->ops->get_bundle_info(
13466 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13467 if (rc != NO_ERROR) {
13468 LOGE("get_bundle_info failed");
13469 return rc;
13470 }
13471 if (mAnalysisChannel) {
13472 mAnalysisChannel->setBundleInfo(bundleInfo);
13473 }
13474 if (mSupportChannel) {
13475 mSupportChannel->setBundleInfo(bundleInfo);
13476 }
13477 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13478 it != mStreamInfo.end(); it++) {
13479 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13480 channel->setBundleInfo(bundleInfo);
13481 }
13482 if (mRawDumpChannel) {
13483 mRawDumpChannel->setBundleInfo(bundleInfo);
13484 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013485 if (mHdrPlusRawSrcChannel) {
13486 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13487 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013488 }
13489
13490 return rc;
13491}
13492
13493/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013494 * FUNCTION : setInstantAEC
13495 *
13496 * DESCRIPTION: Set Instant AEC related params.
13497 *
13498 * PARAMETERS :
13499 * @meta: CameraMetadata reference
13500 *
13501 * RETURN : NO_ERROR on success
13502 * Error codes on failure
13503 *==========================================================================*/
13504int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13505{
13506 int32_t rc = NO_ERROR;
13507 uint8_t val = 0;
13508 char prop[PROPERTY_VALUE_MAX];
13509
13510 // First try to configure instant AEC from framework metadata
13511 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13512 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13513 }
13514
13515 // If framework did not set this value, try to read from set prop.
13516 if (val == 0) {
13517 memset(prop, 0, sizeof(prop));
13518 property_get("persist.camera.instant.aec", prop, "0");
13519 val = (uint8_t)atoi(prop);
13520 }
13521
13522 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13523 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13524 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13525 mInstantAEC = val;
13526 mInstantAECSettledFrameNumber = 0;
13527 mInstantAecFrameIdxCount = 0;
13528 LOGH("instantAEC value set %d",val);
13529 if (mInstantAEC) {
13530 memset(prop, 0, sizeof(prop));
13531 property_get("persist.camera.ae.instant.bound", prop, "10");
13532 int32_t aec_frame_skip_cnt = atoi(prop);
13533 if (aec_frame_skip_cnt >= 0) {
13534 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13535 } else {
13536 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13537 rc = BAD_VALUE;
13538 }
13539 }
13540 } else {
13541 LOGE("Bad instant aec value set %d", val);
13542 rc = BAD_VALUE;
13543 }
13544 return rc;
13545}
13546
13547/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013548 * FUNCTION : get_num_overall_buffers
13549 *
13550 * DESCRIPTION: Estimate number of pending buffers across all requests.
13551 *
13552 * PARAMETERS : None
13553 *
13554 * RETURN : Number of overall pending buffers
13555 *
13556 *==========================================================================*/
13557uint32_t PendingBuffersMap::get_num_overall_buffers()
13558{
13559 uint32_t sum_buffers = 0;
13560 for (auto &req : mPendingBuffersInRequest) {
13561 sum_buffers += req.mPendingBufferList.size();
13562 }
13563 return sum_buffers;
13564}
13565
13566/*===========================================================================
13567 * FUNCTION : removeBuf
13568 *
13569 * DESCRIPTION: Remove a matching buffer from tracker.
13570 *
13571 * PARAMETERS : @buffer: image buffer for the callback
13572 *
13573 * RETURN : None
13574 *
13575 *==========================================================================*/
13576void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13577{
13578 bool buffer_found = false;
13579 for (auto req = mPendingBuffersInRequest.begin();
13580 req != mPendingBuffersInRequest.end(); req++) {
13581 for (auto k = req->mPendingBufferList.begin();
13582 k != req->mPendingBufferList.end(); k++ ) {
13583 if (k->buffer == buffer) {
13584 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13585 req->frame_number, buffer);
13586 k = req->mPendingBufferList.erase(k);
13587 if (req->mPendingBufferList.empty()) {
13588 // Remove this request from Map
13589 req = mPendingBuffersInRequest.erase(req);
13590 }
13591 buffer_found = true;
13592 break;
13593 }
13594 }
13595 if (buffer_found) {
13596 break;
13597 }
13598 }
13599 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13600 get_num_overall_buffers());
13601}
13602
13603/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013604 * FUNCTION : getBufErrStatus
13605 *
13606 * DESCRIPTION: get buffer error status
13607 *
13608 * PARAMETERS : @buffer: buffer handle
13609 *
13610 * RETURN : Error status
13611 *
13612 *==========================================================================*/
13613int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13614{
13615 for (auto& req : mPendingBuffersInRequest) {
13616 for (auto& k : req.mPendingBufferList) {
13617 if (k.buffer == buffer)
13618 return k.bufStatus;
13619 }
13620 }
13621 return CAMERA3_BUFFER_STATUS_OK;
13622}
13623
13624/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013625 * FUNCTION : setPAAFSupport
13626 *
13627 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13628 * feature mask according to stream type and filter
13629 * arrangement
13630 *
13631 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13632 * @stream_type: stream type
13633 * @filter_arrangement: filter arrangement
13634 *
13635 * RETURN : None
13636 *==========================================================================*/
13637void QCamera3HardwareInterface::setPAAFSupport(
13638 cam_feature_mask_t& feature_mask,
13639 cam_stream_type_t stream_type,
13640 cam_color_filter_arrangement_t filter_arrangement)
13641{
13642 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13643 feature_mask, stream_type, filter_arrangement);
13644
13645 switch (filter_arrangement) {
13646 case CAM_FILTER_ARRANGEMENT_RGGB:
13647 case CAM_FILTER_ARRANGEMENT_GRBG:
13648 case CAM_FILTER_ARRANGEMENT_GBRG:
13649 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013650 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13651 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013652 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013653 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
13654 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070013655 }
13656 break;
13657 case CAM_FILTER_ARRANGEMENT_Y:
13658 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13659 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13660 }
13661 break;
13662 default:
13663 break;
13664 }
13665}
13666
13667/*===========================================================================
13668* FUNCTION : getSensorMountAngle
13669*
13670* DESCRIPTION: Retrieve sensor mount angle
13671*
13672* PARAMETERS : None
13673*
13674* RETURN : sensor mount angle in uint32_t
13675*==========================================================================*/
13676uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13677{
13678 return gCamCapability[mCameraId]->sensor_mount_angle;
13679}
13680
13681/*===========================================================================
13682* FUNCTION : getRelatedCalibrationData
13683*
13684* DESCRIPTION: Retrieve related system calibration data
13685*
13686* PARAMETERS : None
13687*
13688* RETURN : Pointer of related system calibration data
13689*==========================================================================*/
13690const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13691{
13692 return (const cam_related_system_calibration_data_t *)
13693 &(gCamCapability[mCameraId]->related_cam_calibration);
13694}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013695
13696/*===========================================================================
13697 * FUNCTION : is60HzZone
13698 *
13699 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13700 *
13701 * PARAMETERS : None
13702 *
13703 * RETURN : True if in 60Hz zone, False otherwise
13704 *==========================================================================*/
13705bool QCamera3HardwareInterface::is60HzZone()
13706{
13707 time_t t = time(NULL);
13708 struct tm lt;
13709
13710 struct tm* r = localtime_r(&t, &lt);
13711
13712 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13713 return true;
13714 else
13715 return false;
13716}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013717
13718/*===========================================================================
13719 * FUNCTION : adjustBlackLevelForCFA
13720 *
13721 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13722 * of bayer CFA (Color Filter Array).
13723 *
13724 * PARAMETERS : @input: black level pattern in the order of RGGB
13725 * @output: black level pattern in the order of CFA
13726 * @color_arrangement: CFA color arrangement
13727 *
13728 * RETURN : None
13729 *==========================================================================*/
13730template<typename T>
13731void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13732 T input[BLACK_LEVEL_PATTERN_CNT],
13733 T output[BLACK_LEVEL_PATTERN_CNT],
13734 cam_color_filter_arrangement_t color_arrangement)
13735{
13736 switch (color_arrangement) {
13737 case CAM_FILTER_ARRANGEMENT_GRBG:
13738 output[0] = input[1];
13739 output[1] = input[0];
13740 output[2] = input[3];
13741 output[3] = input[2];
13742 break;
13743 case CAM_FILTER_ARRANGEMENT_GBRG:
13744 output[0] = input[2];
13745 output[1] = input[3];
13746 output[2] = input[0];
13747 output[3] = input[1];
13748 break;
13749 case CAM_FILTER_ARRANGEMENT_BGGR:
13750 output[0] = input[3];
13751 output[1] = input[2];
13752 output[2] = input[1];
13753 output[3] = input[0];
13754 break;
13755 case CAM_FILTER_ARRANGEMENT_RGGB:
13756 output[0] = input[0];
13757 output[1] = input[1];
13758 output[2] = input[2];
13759 output[3] = input[3];
13760 break;
13761 default:
13762 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13763 break;
13764 }
13765}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013766
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013767void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13768 CameraMetadata &resultMetadata,
13769 std::shared_ptr<metadata_buffer_t> settings)
13770{
13771 if (settings == nullptr) {
13772 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13773 return;
13774 }
13775
13776 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13777 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13778 }
13779
13780 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13781 String8 str((const char *)gps_methods);
13782 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13783 }
13784
13785 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13786 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13787 }
13788
13789 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13790 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13791 }
13792
13793 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13794 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13795 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13796 }
13797
13798 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13799 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13800 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13801 }
13802
13803 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13804 int32_t fwk_thumb_size[2];
13805 fwk_thumb_size[0] = thumb_size->width;
13806 fwk_thumb_size[1] = thumb_size->height;
13807 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13808 }
13809
13810 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13811 uint8_t fwk_intent = intent[0];
13812 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13813 }
13814}
13815
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013816bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13817 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13818{
13819 if (hdrPlusRequest == nullptr) return false;
13820
13821 // Check noise reduction mode is high quality.
13822 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13823 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13824 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013825 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13826 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013827 return false;
13828 }
13829
13830 // Check edge mode is high quality.
13831 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13832 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13833 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13834 return false;
13835 }
13836
13837 if (request.num_output_buffers != 1 ||
13838 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13839 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013840 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13841 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13842 request.output_buffers[0].stream->width,
13843 request.output_buffers[0].stream->height,
13844 request.output_buffers[0].stream->format);
13845 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013846 return false;
13847 }
13848
13849 // Get a YUV buffer from pic channel.
13850 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13851 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13852 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13853 if (res != OK) {
13854 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13855 __FUNCTION__, strerror(-res), res);
13856 return false;
13857 }
13858
13859 pbcamera::StreamBuffer buffer;
13860 buffer.streamId = kPbYuvOutputStreamId;
13861 buffer.data = yuvBuffer->buffer;
13862 buffer.dataSize = yuvBuffer->frame_len;
13863
13864 pbcamera::CaptureRequest pbRequest;
13865 pbRequest.id = request.frame_number;
13866 pbRequest.outputBuffers.push_back(buffer);
13867
13868 // Submit an HDR+ capture request to HDR+ service.
13869 res = mHdrPlusClient->submitCaptureRequest(&pbRequest);
13870 if (res != OK) {
13871 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13872 strerror(-res), res);
13873 return false;
13874 }
13875
13876 hdrPlusRequest->yuvBuffer = yuvBuffer;
13877 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13878
13879 return true;
13880}
13881
Chien-Yu Chenee335912017-02-09 17:53:20 -080013882status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13883{
13884 if (mHdrPlusClient == nullptr) {
13885 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13886 return -ENODEV;
13887 }
13888
13889 // Connect to HDR+ service
13890 status_t res = mHdrPlusClient->connect(this);
13891 if (res != OK) {
13892 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
13893 strerror(-res), res);
13894 return res;
13895 }
13896
13897 // Set static metadata.
13898 res = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
13899 if (res != OK) {
13900 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
13901 strerror(-res), res);
13902 mHdrPlusClient->disconnect();
13903 return res;
13904 }
13905
13906 // Configure stream for HDR+.
13907 res = configureHdrPlusStreamsLocked();
13908 if (res != OK) {
13909 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
13910 mHdrPlusClient->disconnect();
13911 return res;
13912 }
13913
13914 mHdrPlusModeEnabled = true;
13915 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
13916
13917 return OK;
13918}
13919
13920void QCamera3HardwareInterface::disableHdrPlusModeLocked()
13921{
13922 // Disconnect from HDR+ service.
13923 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
13924 mHdrPlusClient->disconnect();
13925 }
13926
13927 mHdrPlusModeEnabled = false;
13928 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
13929}
13930
13931status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013932{
13933 pbcamera::InputConfiguration inputConfig;
13934 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
13935 status_t res = OK;
13936
13937 // Configure HDR+ client streams.
13938 // Get input config.
13939 if (mHdrPlusRawSrcChannel) {
13940 // HDR+ input buffers will be provided by HAL.
13941 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
13942 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
13943 if (res != OK) {
13944 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
13945 __FUNCTION__, strerror(-res), res);
13946 return res;
13947 }
13948
13949 inputConfig.isSensorInput = false;
13950 } else {
13951 // Sensor MIPI will send data to Easel.
13952 inputConfig.isSensorInput = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080013953 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
13954 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
13955 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
13956 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
13957 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
13958 if (mSensorModeInfo.num_raw_bits != 10) {
13959 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
13960 mSensorModeInfo.num_raw_bits);
13961 return BAD_VALUE;
13962 }
13963
13964 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013965 }
13966
13967 // Get output configurations.
13968 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080013969 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013970
13971 // Easel may need to output YUV output buffers if mPictureChannel was created.
13972 pbcamera::StreamConfiguration yuvOutputConfig;
13973 if (mPictureChannel != nullptr) {
13974 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
13975 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
13976 if (res != OK) {
13977 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
13978 __FUNCTION__, strerror(-res), res);
13979
13980 return res;
13981 }
13982
13983 outputStreamConfigs.push_back(yuvOutputConfig);
13984 }
13985
13986 // TODO: consider other channels for YUV output buffers.
13987
13988 res = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
13989 if (res != OK) {
13990 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
13991 strerror(-res), res);
13992 return res;
13993 }
13994
13995 return OK;
13996}
13997
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013998void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13999 const camera_metadata_t &resultMetadata) {
14000 if (result != nullptr) {
14001 if (result->outputBuffers.size() != 1) {
14002 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14003 result->outputBuffers.size());
14004 return;
14005 }
14006
14007 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14008 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14009 result->outputBuffers[0].streamId);
14010 return;
14011 }
14012
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014013 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014014 HdrPlusPendingRequest pendingRequest;
14015 {
14016 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14017 auto req = mHdrPlusPendingRequests.find(result->requestId);
14018 pendingRequest = req->second;
14019 }
14020
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014021 // Update the result metadata with the settings of the HDR+ still capture request because
14022 // the result metadata belongs to a ZSL buffer.
14023 CameraMetadata metadata;
14024 metadata = &resultMetadata;
14025 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14026 camera_metadata_t* updatedResultMetadata = metadata.release();
14027
14028 QCamera3PicChannel *picChannel =
14029 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14030
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014031 // Check if dumping HDR+ YUV output is enabled.
14032 char prop[PROPERTY_VALUE_MAX];
14033 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14034 bool dumpYuvOutput = atoi(prop);
14035
14036 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014037 // Dump yuv buffer to a ppm file.
14038 pbcamera::StreamConfiguration outputConfig;
14039 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14040 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14041 if (rc == OK) {
14042 char buf[FILENAME_MAX] = {};
14043 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14044 result->requestId, result->outputBuffers[0].streamId,
14045 outputConfig.image.width, outputConfig.image.height);
14046
14047 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14048 } else {
14049 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14050 __FUNCTION__, strerror(-rc), rc);
14051 }
14052 }
14053
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014054 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14055 auto halMetadata = std::make_shared<metadata_buffer_t>();
14056 clear_metadata_buffer(halMetadata.get());
14057
14058 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14059 // encoding.
14060 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14061 halStreamId, /*minFrameDuration*/0);
14062 if (res == OK) {
14063 // Return the buffer to pic channel for encoding.
14064 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14065 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14066 halMetadata);
14067 } else {
14068 // Return the buffer without encoding.
14069 // TODO: This should not happen but we may want to report an error buffer to camera
14070 // service.
14071 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14072 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14073 strerror(-res), res);
14074 }
14075
14076 // Send HDR+ metadata to framework.
14077 {
14078 pthread_mutex_lock(&mMutex);
14079
14080 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14081 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14082 pthread_mutex_unlock(&mMutex);
14083 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014084
14085 // Remove the HDR+ pending request.
14086 {
14087 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14088 auto req = mHdrPlusPendingRequests.find(result->requestId);
14089 mHdrPlusPendingRequests.erase(req);
14090 }
14091 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014092}
14093
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014094void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14095 // TODO: Handle HDR+ capture failures and send the failure to framework.
14096 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14097 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14098
14099 // Return the buffer to pic channel.
14100 QCamera3PicChannel *picChannel =
14101 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14102 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14103
14104 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014105}
14106
Thierry Strudel3d639192016-09-09 11:52:26 -070014107}; //end namespace qcamera