blob: c874ae597850462d3cbb0e7edf67dd98fcee08c2 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
Thierry Strudel54dc9782017-02-15 12:12:10 -0800143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_binning_correction_mode_t,
145 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
146 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
147 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
148};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700149
150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_ir_mode_t,
152 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
153 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
154 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
155 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
156};
Thierry Strudel3d639192016-09-09 11:52:26 -0700157
158const QCamera3HardwareInterface::QCameraMap<
159 camera_metadata_enum_android_control_effect_mode_t,
160 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
161 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
162 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
163 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
164 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
165 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
166 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
167 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
168 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
169 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
170};
171
172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_control_awb_mode_t,
174 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
175 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
176 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
177 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
178 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
179 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
180 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
181 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
182 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
183 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
184};
185
186const QCamera3HardwareInterface::QCameraMap<
187 camera_metadata_enum_android_control_scene_mode_t,
188 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
189 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
190 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
191 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
192 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
193 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
194 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
195 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
196 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
197 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
198 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
199 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
200 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
201 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
202 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
203 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800204 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
205 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_control_af_mode_t,
210 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
211 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
212 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
213 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
214 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
215 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
216 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
217 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221 camera_metadata_enum_android_color_correction_aberration_mode_t,
222 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
223 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
224 CAM_COLOR_CORRECTION_ABERRATION_OFF },
225 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
226 CAM_COLOR_CORRECTION_ABERRATION_FAST },
227 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
228 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
229};
230
231const QCamera3HardwareInterface::QCameraMap<
232 camera_metadata_enum_android_control_ae_antibanding_mode_t,
233 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
234 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
235 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
236 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
237 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_ae_mode_t,
242 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
243 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
244 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
245 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
246 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
247 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
248};
249
250const QCamera3HardwareInterface::QCameraMap<
251 camera_metadata_enum_android_flash_mode_t,
252 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
253 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
254 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
255 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
256};
257
258const QCamera3HardwareInterface::QCameraMap<
259 camera_metadata_enum_android_statistics_face_detect_mode_t,
260 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
261 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
262 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
263 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
268 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
269 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
270 CAM_FOCUS_UNCALIBRATED },
271 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
272 CAM_FOCUS_APPROXIMATE },
273 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
274 CAM_FOCUS_CALIBRATED }
275};
276
277const QCamera3HardwareInterface::QCameraMap<
278 camera_metadata_enum_android_lens_state_t,
279 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
280 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
281 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
282};
283
284const int32_t available_thumbnail_sizes[] = {0, 0,
285 176, 144,
286 240, 144,
287 256, 144,
288 240, 160,
289 256, 154,
290 240, 240,
291 320, 240};
292
293const QCamera3HardwareInterface::QCameraMap<
294 camera_metadata_enum_android_sensor_test_pattern_mode_t,
295 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
296 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
297 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
298 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
299 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
300 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
302};
303
304/* Since there is no mapping for all the options some Android enum are not listed.
305 * Also, the order in this list is important because while mapping from HAL to Android it will
306 * traverse from lower to higher index which means that for HAL values that are map to different
307 * Android values, the traverse logic will select the first one found.
308 */
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_sensor_reference_illuminant1_t,
311 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
328};
329
330const QCamera3HardwareInterface::QCameraMap<
331 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
332 { 60, CAM_HFR_MODE_60FPS},
333 { 90, CAM_HFR_MODE_90FPS},
334 { 120, CAM_HFR_MODE_120FPS},
335 { 150, CAM_HFR_MODE_150FPS},
336 { 180, CAM_HFR_MODE_180FPS},
337 { 210, CAM_HFR_MODE_210FPS},
338 { 240, CAM_HFR_MODE_240FPS},
339 { 480, CAM_HFR_MODE_480FPS},
340};
341
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700342const QCamera3HardwareInterface::QCameraMap<
343 qcamera3_ext_instant_aec_mode_t,
344 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
345 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
346 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
347 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
348};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800349
350const QCamera3HardwareInterface::QCameraMap<
351 qcamera3_ext_exposure_meter_mode_t,
352 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
353 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
354 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
355 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
356 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
357 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
358 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 qcamera3_ext_iso_mode_t,
364 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
365 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
366 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
367 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
368 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
369 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
370 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
371 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
372 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
373};
374
Thierry Strudel3d639192016-09-09 11:52:26 -0700375camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
376 .initialize = QCamera3HardwareInterface::initialize,
377 .configure_streams = QCamera3HardwareInterface::configure_streams,
378 .register_stream_buffers = NULL,
379 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
380 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
381 .get_metadata_vendor_tag_ops = NULL,
382 .dump = QCamera3HardwareInterface::dump,
383 .flush = QCamera3HardwareInterface::flush,
384 .reserved = {0},
385};
386
387// initialise to some default value
388uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
389
390/*===========================================================================
391 * FUNCTION : QCamera3HardwareInterface
392 *
393 * DESCRIPTION: constructor of QCamera3HardwareInterface
394 *
395 * PARAMETERS :
396 * @cameraId : camera ID
397 *
398 * RETURN : none
399 *==========================================================================*/
400QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
401 const camera_module_callbacks_t *callbacks)
402 : mCameraId(cameraId),
403 mCameraHandle(NULL),
404 mCameraInitialized(false),
405 mCallbackOps(NULL),
406 mMetadataChannel(NULL),
407 mPictureChannel(NULL),
408 mRawChannel(NULL),
409 mSupportChannel(NULL),
410 mAnalysisChannel(NULL),
411 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700412 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700413 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800414 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800415 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700416 mCommon(),
417 mChannelHandle(0),
418 mFirstConfiguration(true),
419 mFlush(false),
420 mFlushPerf(false),
421 mParamHeap(NULL),
422 mParameters(NULL),
423 mPrevParameters(NULL),
424 m_bIsVideo(false),
425 m_bIs4KVideo(false),
426 m_bEisSupportedSize(false),
427 m_bEisEnable(false),
428 m_MobicatMask(0),
429 mMinProcessedFrameDuration(0),
430 mMinJpegFrameDuration(0),
431 mMinRawFrameDuration(0),
432 mMetaFrameCount(0U),
433 mUpdateDebugLevel(false),
434 mCallbacks(callbacks),
435 mCaptureIntent(0),
436 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700437 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800438 /* DevCamDebug metadata internal m control*/
439 mDevCamDebugMetaEnable(0),
440 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700441 mBatchSize(0),
442 mToBeQueuedVidBufs(0),
443 mHFRVideoFps(DEFAULT_VIDEO_FPS),
444 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800445 mStreamConfig(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mFirstFrameNumberInBatch(0),
447 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800448 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700449 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
450 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700451 mInstantAEC(false),
452 mResetInstantAEC(false),
453 mInstantAECSettledFrameNumber(0),
454 mAecSkipDisplayFrameBound(0),
455 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800456 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 mLastCustIntentFrmNum(-1),
459 mState(CLOSED),
460 mIsDeviceLinked(false),
461 mIsMainCamera(true),
462 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700463 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800464 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800465 mHdrPlusModeEnabled(false),
466 mIsApInputUsedForHdrPlus(false),
467 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800468 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700469{
470 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700471 mCommon.init(gCamCapability[cameraId]);
472 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700473#ifndef USE_HAL_3_3
474 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
475#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700477#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mCameraDevice.common.close = close_camera_device;
479 mCameraDevice.ops = &mCameraOps;
480 mCameraDevice.priv = this;
481 gCamCapability[cameraId]->version = CAM_HAL_V3;
482 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
483 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
484 gCamCapability[cameraId]->min_num_pp_bufs = 3;
485
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800486 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700487
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800488 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700489 mPendingLiveRequest = 0;
490 mCurrentRequestId = -1;
491 pthread_mutex_init(&mMutex, NULL);
492
493 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
494 mDefaultMetadata[i] = NULL;
495
496 // Getting system props of different kinds
497 char prop[PROPERTY_VALUE_MAX];
498 memset(prop, 0, sizeof(prop));
499 property_get("persist.camera.raw.dump", prop, "0");
500 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800501 property_get("persist.camera.hal3.force.hdr", prop, "0");
502 mForceHdrSnapshot = atoi(prop);
503
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 if (mEnableRawDump)
505 LOGD("Raw dump from Camera HAL enabled");
506
507 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
508 memset(mLdafCalib, 0, sizeof(mLdafCalib));
509
510 memset(prop, 0, sizeof(prop));
511 property_get("persist.camera.tnr.preview", prop, "0");
512 m_bTnrPreview = (uint8_t)atoi(prop);
513
514 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800515 property_get("persist.camera.swtnr.preview", prop, "1");
516 m_bSwTnrPreview = (uint8_t)atoi(prop);
517
518 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700519 property_get("persist.camera.tnr.video", prop, "0");
520 m_bTnrVideo = (uint8_t)atoi(prop);
521
522 memset(prop, 0, sizeof(prop));
523 property_get("persist.camera.avtimer.debug", prop, "0");
524 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800525 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700526
Thierry Strudel54dc9782017-02-15 12:12:10 -0800527 memset(prop, 0, sizeof(prop));
528 property_get("persist.camera.cacmode.disable", prop, "0");
529 m_cacModeDisabled = (uint8_t)atoi(prop);
530
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 //Load and read GPU library.
532 lib_surface_utils = NULL;
533 LINK_get_surface_pixel_alignment = NULL;
534 mSurfaceStridePadding = CAM_PAD_TO_32;
535 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
536 if (lib_surface_utils) {
537 *(void **)&LINK_get_surface_pixel_alignment =
538 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
539 if (LINK_get_surface_pixel_alignment) {
540 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
541 }
542 dlclose(lib_surface_utils);
543 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700544
545 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700546}
547
548/*===========================================================================
549 * FUNCTION : ~QCamera3HardwareInterface
550 *
551 * DESCRIPTION: destructor of QCamera3HardwareInterface
552 *
553 * PARAMETERS : none
554 *
555 * RETURN : none
556 *==========================================================================*/
557QCamera3HardwareInterface::~QCamera3HardwareInterface()
558{
559 LOGD("E");
560
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800561 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700562
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800563 // Disable power hint and enable the perf lock for close camera
564 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
565 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
566
567 // unlink of dualcam during close camera
568 if (mIsDeviceLinked) {
569 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
570 &m_pDualCamCmdPtr->bundle_info;
571 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
572 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
573 pthread_mutex_lock(&gCamLock);
574
575 if (mIsMainCamera == 1) {
576 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
577 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
578 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
579 // related session id should be session id of linked session
580 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
581 } else {
582 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
583 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
584 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
585 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
586 }
587 pthread_mutex_unlock(&gCamLock);
588
589 rc = mCameraHandle->ops->set_dual_cam_cmd(
590 mCameraHandle->camera_handle);
591 if (rc < 0) {
592 LOGE("Dualcam: Unlink failed, but still proceed to close");
593 }
594 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700595
596 /* We need to stop all streams before deleting any stream */
597 if (mRawDumpChannel) {
598 mRawDumpChannel->stop();
599 }
600
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700601 if (mHdrPlusRawSrcChannel) {
602 mHdrPlusRawSrcChannel->stop();
603 }
604
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 // NOTE: 'camera3_stream_t *' objects are already freed at
606 // this stage by the framework
607 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
608 it != mStreamInfo.end(); it++) {
609 QCamera3ProcessingChannel *channel = (*it)->channel;
610 if (channel) {
611 channel->stop();
612 }
613 }
614 if (mSupportChannel)
615 mSupportChannel->stop();
616
617 if (mAnalysisChannel) {
618 mAnalysisChannel->stop();
619 }
620 if (mMetadataChannel) {
621 mMetadataChannel->stop();
622 }
623 if (mChannelHandle) {
624 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
625 mChannelHandle);
626 LOGD("stopping channel %d", mChannelHandle);
627 }
628
629 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
630 it != mStreamInfo.end(); it++) {
631 QCamera3ProcessingChannel *channel = (*it)->channel;
632 if (channel)
633 delete channel;
634 free (*it);
635 }
636 if (mSupportChannel) {
637 delete mSupportChannel;
638 mSupportChannel = NULL;
639 }
640
641 if (mAnalysisChannel) {
642 delete mAnalysisChannel;
643 mAnalysisChannel = NULL;
644 }
645 if (mRawDumpChannel) {
646 delete mRawDumpChannel;
647 mRawDumpChannel = NULL;
648 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700649 if (mHdrPlusRawSrcChannel) {
650 delete mHdrPlusRawSrcChannel;
651 mHdrPlusRawSrcChannel = NULL;
652 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700653 if (mDummyBatchChannel) {
654 delete mDummyBatchChannel;
655 mDummyBatchChannel = NULL;
656 }
657
658 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800659 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700660
661 if (mMetadataChannel) {
662 delete mMetadataChannel;
663 mMetadataChannel = NULL;
664 }
665
666 /* Clean up all channels */
667 if (mCameraInitialized) {
668 if(!mFirstConfiguration){
669 //send the last unconfigure
670 cam_stream_size_info_t stream_config_info;
671 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
672 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
673 stream_config_info.buffer_info.max_buffers =
674 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700675 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700676 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
677 stream_config_info);
678 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
679 if (rc < 0) {
680 LOGE("set_parms failed for unconfigure");
681 }
682 }
683 deinitParameters();
684 }
685
686 if (mChannelHandle) {
687 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
688 mChannelHandle);
689 LOGH("deleting channel %d", mChannelHandle);
690 mChannelHandle = 0;
691 }
692
693 if (mState != CLOSED)
694 closeCamera();
695
696 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
697 req.mPendingBufferList.clear();
698 }
699 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700700 for (pendingRequestIterator i = mPendingRequestsList.begin();
701 i != mPendingRequestsList.end();) {
702 i = erasePendingRequest(i);
703 }
704 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
705 if (mDefaultMetadata[i])
706 free_camera_metadata(mDefaultMetadata[i]);
707
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800708 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 pthread_cond_destroy(&mRequestCond);
711
712 pthread_cond_destroy(&mBuffersCond);
713
714 pthread_mutex_destroy(&mMutex);
715 LOGD("X");
716}
717
718/*===========================================================================
719 * FUNCTION : erasePendingRequest
720 *
721 * DESCRIPTION: function to erase a desired pending request after freeing any
722 * allocated memory
723 *
724 * PARAMETERS :
725 * @i : iterator pointing to pending request to be erased
726 *
727 * RETURN : iterator pointing to the next request
728 *==========================================================================*/
729QCamera3HardwareInterface::pendingRequestIterator
730 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
731{
732 if (i->input_buffer != NULL) {
733 free(i->input_buffer);
734 i->input_buffer = NULL;
735 }
736 if (i->settings != NULL)
737 free_camera_metadata((camera_metadata_t*)i->settings);
738 return mPendingRequestsList.erase(i);
739}
740
741/*===========================================================================
742 * FUNCTION : camEvtHandle
743 *
744 * DESCRIPTION: Function registered to mm-camera-interface to handle events
745 *
746 * PARAMETERS :
747 * @camera_handle : interface layer camera handle
748 * @evt : ptr to event
749 * @user_data : user data ptr
750 *
751 * RETURN : none
752 *==========================================================================*/
753void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
754 mm_camera_event_t *evt,
755 void *user_data)
756{
757 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
758 if (obj && evt) {
759 switch(evt->server_event_type) {
760 case CAM_EVENT_TYPE_DAEMON_DIED:
761 pthread_mutex_lock(&obj->mMutex);
762 obj->mState = ERROR;
763 pthread_mutex_unlock(&obj->mMutex);
764 LOGE("Fatal, camera daemon died");
765 break;
766
767 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
768 LOGD("HAL got request pull from Daemon");
769 pthread_mutex_lock(&obj->mMutex);
770 obj->mWokenUpByDaemon = true;
771 obj->unblockRequestIfNecessary();
772 pthread_mutex_unlock(&obj->mMutex);
773 break;
774
775 default:
776 LOGW("Warning: Unhandled event %d",
777 evt->server_event_type);
778 break;
779 }
780 } else {
781 LOGE("NULL user_data/evt");
782 }
783}
784
785/*===========================================================================
786 * FUNCTION : openCamera
787 *
788 * DESCRIPTION: open camera
789 *
790 * PARAMETERS :
791 * @hw_device : double ptr for camera device struct
792 *
793 * RETURN : int32_t type of status
794 * NO_ERROR -- success
795 * none-zero failure code
796 *==========================================================================*/
797int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
798{
799 int rc = 0;
800 if (mState != CLOSED) {
801 *hw_device = NULL;
802 return PERMISSION_DENIED;
803 }
804
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800805 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700806 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
807 mCameraId);
808
809 rc = openCamera();
810 if (rc == 0) {
811 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800812 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700813 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800814 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700815
Thierry Strudel3d639192016-09-09 11:52:26 -0700816 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
817 mCameraId, rc);
818
819 if (rc == NO_ERROR) {
820 mState = OPENED;
821 }
822 return rc;
823}
824
825/*===========================================================================
826 * FUNCTION : openCamera
827 *
828 * DESCRIPTION: open camera
829 *
830 * PARAMETERS : none
831 *
832 * RETURN : int32_t type of status
833 * NO_ERROR -- success
834 * none-zero failure code
835 *==========================================================================*/
836int QCamera3HardwareInterface::openCamera()
837{
838 int rc = 0;
839 char value[PROPERTY_VALUE_MAX];
840
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800841 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700842 if (mCameraHandle) {
843 LOGE("Failure: Camera already opened");
844 return ALREADY_EXISTS;
845 }
846
847 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
848 if (rc < 0) {
849 LOGE("Failed to reserve flash for camera id: %d",
850 mCameraId);
851 return UNKNOWN_ERROR;
852 }
853
854 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
855 if (rc) {
856 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
857 return rc;
858 }
859
860 if (!mCameraHandle) {
861 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
862 return -ENODEV;
863 }
864
865 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
866 camEvtHandle, (void *)this);
867
868 if (rc < 0) {
869 LOGE("Error, failed to register event callback");
870 /* Not closing camera here since it is already handled in destructor */
871 return FAILED_TRANSACTION;
872 }
873
874 mExifParams.debug_params =
875 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
876 if (mExifParams.debug_params) {
877 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
878 } else {
879 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
880 return NO_MEMORY;
881 }
882 mFirstConfiguration = true;
883
884 //Notify display HAL that a camera session is active.
885 //But avoid calling the same during bootup because camera service might open/close
886 //cameras at boot time during its initialization and display service will also internally
887 //wait for camera service to initialize first while calling this display API, resulting in a
888 //deadlock situation. Since boot time camera open/close calls are made only to fetch
889 //capabilities, no need of this display bw optimization.
890 //Use "service.bootanim.exit" property to know boot status.
891 property_get("service.bootanim.exit", value, "0");
892 if (atoi(value) == 1) {
893 pthread_mutex_lock(&gCamLock);
894 if (gNumCameraSessions++ == 0) {
895 setCameraLaunchStatus(true);
896 }
897 pthread_mutex_unlock(&gCamLock);
898 }
899
900 //fill the session id needed while linking dual cam
901 pthread_mutex_lock(&gCamLock);
902 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
903 &sessionId[mCameraId]);
904 pthread_mutex_unlock(&gCamLock);
905
906 if (rc < 0) {
907 LOGE("Error, failed to get sessiion id");
908 return UNKNOWN_ERROR;
909 } else {
910 //Allocate related cam sync buffer
911 //this is needed for the payload that goes along with bundling cmd for related
912 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700913 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
914 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 if(rc != OK) {
916 rc = NO_MEMORY;
917 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
918 return NO_MEMORY;
919 }
920
921 //Map memory for related cam sync buffer
922 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700923 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
924 m_pDualCamCmdHeap->getFd(0),
925 sizeof(cam_dual_camera_cmd_info_t),
926 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 if(rc < 0) {
928 LOGE("Dualcam: failed to map Related cam sync buffer");
929 rc = FAILED_TRANSACTION;
930 return NO_MEMORY;
931 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700932 m_pDualCamCmdPtr =
933 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 }
935
936 LOGH("mCameraId=%d",mCameraId);
937
Chien-Yu Chenee335912017-02-09 17:53:20 -0800938 // Create an HDR+ client instance.
939 // TODO: detect if Easel exists instead of property.
940 bool enableHdrPlus = property_get_bool("persist.camera.hdrplus.enable",
941 false);
942 ALOGD("%s: HDR+ in Camera HAL %s.", __FUNCTION__, enableHdrPlus ?
943 "enabled" : "disabled");
944 if (enableHdrPlus) {
945 mHdrPlusClient = std::make_shared<HdrPlusClient>();
946 mIsApInputUsedForHdrPlus =
947 property_get_bool("persist.camera.hdrplus.apinput", false);
948 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
949 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
950 }
951
Thierry Strudel3d639192016-09-09 11:52:26 -0700952 return NO_ERROR;
953}
954
955/*===========================================================================
956 * FUNCTION : closeCamera
957 *
958 * DESCRIPTION: close camera
959 *
960 * PARAMETERS : none
961 *
962 * RETURN : int32_t type of status
963 * NO_ERROR -- success
964 * none-zero failure code
965 *==========================================================================*/
966int QCamera3HardwareInterface::closeCamera()
967{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800968 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700969 int rc = NO_ERROR;
970 char value[PROPERTY_VALUE_MAX];
971
972 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
973 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700974
975 // unmap memory for related cam sync buffer
976 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800977 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700978 if (NULL != m_pDualCamCmdHeap) {
979 m_pDualCamCmdHeap->deallocate();
980 delete m_pDualCamCmdHeap;
981 m_pDualCamCmdHeap = NULL;
982 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700983 }
984
Thierry Strudel3d639192016-09-09 11:52:26 -0700985 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
986 mCameraHandle = NULL;
987
Chien-Yu Chenee335912017-02-09 17:53:20 -0800988 mHdrPlusClient = nullptr;
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700989
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 //reset session id to some invalid id
991 pthread_mutex_lock(&gCamLock);
992 sessionId[mCameraId] = 0xDEADBEEF;
993 pthread_mutex_unlock(&gCamLock);
994
995 //Notify display HAL that there is no active camera session
996 //but avoid calling the same during bootup. Refer to openCamera
997 //for more details.
998 property_get("service.bootanim.exit", value, "0");
999 if (atoi(value) == 1) {
1000 pthread_mutex_lock(&gCamLock);
1001 if (--gNumCameraSessions == 0) {
1002 setCameraLaunchStatus(false);
1003 }
1004 pthread_mutex_unlock(&gCamLock);
1005 }
1006
Thierry Strudel3d639192016-09-09 11:52:26 -07001007 if (mExifParams.debug_params) {
1008 free(mExifParams.debug_params);
1009 mExifParams.debug_params = NULL;
1010 }
1011 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1012 LOGW("Failed to release flash for camera id: %d",
1013 mCameraId);
1014 }
1015 mState = CLOSED;
1016 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1017 mCameraId, rc);
1018 return rc;
1019}
1020
1021/*===========================================================================
1022 * FUNCTION : initialize
1023 *
1024 * DESCRIPTION: Initialize frameworks callback functions
1025 *
1026 * PARAMETERS :
1027 * @callback_ops : callback function to frameworks
1028 *
1029 * RETURN :
1030 *
1031 *==========================================================================*/
1032int QCamera3HardwareInterface::initialize(
1033 const struct camera3_callback_ops *callback_ops)
1034{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001035 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 int rc;
1037
1038 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1039 pthread_mutex_lock(&mMutex);
1040
1041 // Validate current state
1042 switch (mState) {
1043 case OPENED:
1044 /* valid state */
1045 break;
1046 default:
1047 LOGE("Invalid state %d", mState);
1048 rc = -ENODEV;
1049 goto err1;
1050 }
1051
1052 rc = initParameters();
1053 if (rc < 0) {
1054 LOGE("initParamters failed %d", rc);
1055 goto err1;
1056 }
1057 mCallbackOps = callback_ops;
1058
1059 mChannelHandle = mCameraHandle->ops->add_channel(
1060 mCameraHandle->camera_handle, NULL, NULL, this);
1061 if (mChannelHandle == 0) {
1062 LOGE("add_channel failed");
1063 rc = -ENOMEM;
1064 pthread_mutex_unlock(&mMutex);
1065 return rc;
1066 }
1067
1068 pthread_mutex_unlock(&mMutex);
1069 mCameraInitialized = true;
1070 mState = INITIALIZED;
1071 LOGI("X");
1072 return 0;
1073
1074err1:
1075 pthread_mutex_unlock(&mMutex);
1076 return rc;
1077}
1078
1079/*===========================================================================
1080 * FUNCTION : validateStreamDimensions
1081 *
1082 * DESCRIPTION: Check if the configuration requested are those advertised
1083 *
1084 * PARAMETERS :
1085 * @stream_list : streams to be configured
1086 *
1087 * RETURN :
1088 *
1089 *==========================================================================*/
1090int QCamera3HardwareInterface::validateStreamDimensions(
1091 camera3_stream_configuration_t *streamList)
1092{
1093 int rc = NO_ERROR;
1094 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001095 uint32_t depthWidth =
1096 gCamCapability[mCameraId]->active_array_size.width;
1097 uint32_t depthHeight =
1098 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001099
1100 camera3_stream_t *inputStream = NULL;
1101 /*
1102 * Loop through all streams to find input stream if it exists*
1103 */
1104 for (size_t i = 0; i< streamList->num_streams; i++) {
1105 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1106 if (inputStream != NULL) {
1107 LOGE("Error, Multiple input streams requested");
1108 return -EINVAL;
1109 }
1110 inputStream = streamList->streams[i];
1111 }
1112 }
1113 /*
1114 * Loop through all streams requested in configuration
1115 * Check if unsupported sizes have been requested on any of them
1116 */
1117 for (size_t j = 0; j < streamList->num_streams; j++) {
1118 bool sizeFound = false;
1119 camera3_stream_t *newStream = streamList->streams[j];
1120
1121 uint32_t rotatedHeight = newStream->height;
1122 uint32_t rotatedWidth = newStream->width;
1123 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1124 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1125 rotatedHeight = newStream->width;
1126 rotatedWidth = newStream->height;
1127 }
1128
1129 /*
1130 * Sizes are different for each type of stream format check against
1131 * appropriate table.
1132 */
1133 switch (newStream->format) {
1134 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1135 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1136 case HAL_PIXEL_FORMAT_RAW10:
1137 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1138 for (size_t i = 0; i < count; i++) {
1139 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1140 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1141 sizeFound = true;
1142 break;
1143 }
1144 }
1145 break;
1146 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001147 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1148 //As per spec. depth cloud should be sample count / 16
1149 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1150 if ((depthSamplesCount == newStream->width) &&
1151 (1 == newStream->height)) {
1152 sizeFound = true;
1153 }
1154 break;
1155 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001156 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1157 /* Verify set size against generated sizes table */
1158 for (size_t i = 0; i < count; i++) {
1159 if (((int32_t)rotatedWidth ==
1160 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1161 ((int32_t)rotatedHeight ==
1162 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1163 sizeFound = true;
1164 break;
1165 }
1166 }
1167 break;
1168 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1169 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1170 default:
1171 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1172 || newStream->stream_type == CAMERA3_STREAM_INPUT
1173 || IS_USAGE_ZSL(newStream->usage)) {
1174 if (((int32_t)rotatedWidth ==
1175 gCamCapability[mCameraId]->active_array_size.width) &&
1176 ((int32_t)rotatedHeight ==
1177 gCamCapability[mCameraId]->active_array_size.height)) {
1178 sizeFound = true;
1179 break;
1180 }
1181 /* We could potentially break here to enforce ZSL stream
1182 * set from frameworks always is full active array size
1183 * but it is not clear from the spc if framework will always
1184 * follow that, also we have logic to override to full array
1185 * size, so keeping the logic lenient at the moment
1186 */
1187 }
1188 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1189 MAX_SIZES_CNT);
1190 for (size_t i = 0; i < count; i++) {
1191 if (((int32_t)rotatedWidth ==
1192 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1193 ((int32_t)rotatedHeight ==
1194 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1195 sizeFound = true;
1196 break;
1197 }
1198 }
1199 break;
1200 } /* End of switch(newStream->format) */
1201
1202 /* We error out even if a single stream has unsupported size set */
1203 if (!sizeFound) {
1204 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1205 rotatedWidth, rotatedHeight, newStream->format,
1206 gCamCapability[mCameraId]->active_array_size.width,
1207 gCamCapability[mCameraId]->active_array_size.height);
1208 rc = -EINVAL;
1209 break;
1210 }
1211 } /* End of for each stream */
1212 return rc;
1213}
1214
1215/*==============================================================================
1216 * FUNCTION : isSupportChannelNeeded
1217 *
1218 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1219 *
1220 * PARAMETERS :
1221 * @stream_list : streams to be configured
1222 * @stream_config_info : the config info for streams to be configured
1223 *
1224 * RETURN : Boolen true/false decision
1225 *
1226 *==========================================================================*/
1227bool QCamera3HardwareInterface::isSupportChannelNeeded(
1228 camera3_stream_configuration_t *streamList,
1229 cam_stream_size_info_t stream_config_info)
1230{
1231 uint32_t i;
1232 bool pprocRequested = false;
1233 /* Check for conditions where PProc pipeline does not have any streams*/
1234 for (i = 0; i < stream_config_info.num_streams; i++) {
1235 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1236 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1237 pprocRequested = true;
1238 break;
1239 }
1240 }
1241
1242 if (pprocRequested == false )
1243 return true;
1244
1245 /* Dummy stream needed if only raw or jpeg streams present */
1246 for (i = 0; i < streamList->num_streams; i++) {
1247 switch(streamList->streams[i]->format) {
1248 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1249 case HAL_PIXEL_FORMAT_RAW10:
1250 case HAL_PIXEL_FORMAT_RAW16:
1251 case HAL_PIXEL_FORMAT_BLOB:
1252 break;
1253 default:
1254 return false;
1255 }
1256 }
1257 return true;
1258}
1259
1260/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001261 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001262 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001263 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 *
1265 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001266 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001267 *
1268 * RETURN : int32_t type of status
1269 * NO_ERROR -- success
1270 * none-zero failure code
1271 *
1272 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001273int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001274{
1275 int32_t rc = NO_ERROR;
1276
1277 cam_dimension_t max_dim = {0, 0};
1278 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1279 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1280 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1281 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1282 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1283 }
1284
1285 clear_metadata_buffer(mParameters);
1286
1287 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1288 max_dim);
1289 if (rc != NO_ERROR) {
1290 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1291 return rc;
1292 }
1293
1294 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1295 if (rc != NO_ERROR) {
1296 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1297 return rc;
1298 }
1299
1300 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001301 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001302
1303 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1304 mParameters);
1305 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001306 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001307 return rc;
1308 }
1309
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001310 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001311 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1312 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1313 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1314 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1315 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001316
1317 return rc;
1318}
1319
1320/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001321 * FUNCTION : addToPPFeatureMask
1322 *
1323 * DESCRIPTION: add additional features to pp feature mask based on
1324 * stream type and usecase
1325 *
1326 * PARAMETERS :
1327 * @stream_format : stream type for feature mask
1328 * @stream_idx : stream idx within postprocess_mask list to change
1329 *
1330 * RETURN : NULL
1331 *
1332 *==========================================================================*/
1333void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1334 uint32_t stream_idx)
1335{
1336 char feature_mask_value[PROPERTY_VALUE_MAX];
1337 cam_feature_mask_t feature_mask;
1338 int args_converted;
1339 int property_len;
1340
1341 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001342#ifdef _LE_CAMERA_
1343 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1344 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1345 property_len = property_get("persist.camera.hal3.feature",
1346 feature_mask_value, swtnr_feature_mask_value);
1347#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001348 property_len = property_get("persist.camera.hal3.feature",
1349 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001350#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001351 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1352 (feature_mask_value[1] == 'x')) {
1353 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1354 } else {
1355 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1356 }
1357 if (1 != args_converted) {
1358 feature_mask = 0;
1359 LOGE("Wrong feature mask %s", feature_mask_value);
1360 return;
1361 }
1362
1363 switch (stream_format) {
1364 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1365 /* Add LLVD to pp feature mask only if video hint is enabled */
1366 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1367 mStreamConfigInfo.postprocess_mask[stream_idx]
1368 |= CAM_QTI_FEATURE_SW_TNR;
1369 LOGH("Added SW TNR to pp feature mask");
1370 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1371 mStreamConfigInfo.postprocess_mask[stream_idx]
1372 |= CAM_QCOM_FEATURE_LLVD;
1373 LOGH("Added LLVD SeeMore to pp feature mask");
1374 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001375 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1376 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1377 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1378 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001379 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1380 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1381 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1382 CAM_QTI_FEATURE_BINNING_CORRECTION;
1383 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001384 break;
1385 }
1386 default:
1387 break;
1388 }
1389 LOGD("PP feature mask %llx",
1390 mStreamConfigInfo.postprocess_mask[stream_idx]);
1391}
1392
1393/*==============================================================================
1394 * FUNCTION : updateFpsInPreviewBuffer
1395 *
1396 * DESCRIPTION: update FPS information in preview buffer.
1397 *
1398 * PARAMETERS :
1399 * @metadata : pointer to metadata buffer
1400 * @frame_number: frame_number to look for in pending buffer list
1401 *
1402 * RETURN : None
1403 *
1404 *==========================================================================*/
1405void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1406 uint32_t frame_number)
1407{
1408 // Mark all pending buffers for this particular request
1409 // with corresponding framerate information
1410 for (List<PendingBuffersInRequest>::iterator req =
1411 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1412 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1413 for(List<PendingBufferInfo>::iterator j =
1414 req->mPendingBufferList.begin();
1415 j != req->mPendingBufferList.end(); j++) {
1416 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1417 if ((req->frame_number == frame_number) &&
1418 (channel->getStreamTypeMask() &
1419 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1420 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1421 CAM_INTF_PARM_FPS_RANGE, metadata) {
1422 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1423 struct private_handle_t *priv_handle =
1424 (struct private_handle_t *)(*(j->buffer));
1425 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1426 }
1427 }
1428 }
1429 }
1430}
1431
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001432/*==============================================================================
1433 * FUNCTION : updateTimeStampInPendingBuffers
1434 *
1435 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1436 * of a frame number
1437 *
1438 * PARAMETERS :
1439 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1440 * @timestamp : timestamp to be set
1441 *
1442 * RETURN : None
1443 *
1444 *==========================================================================*/
1445void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1446 uint32_t frameNumber, nsecs_t timestamp)
1447{
1448 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1449 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1450 if (req->frame_number != frameNumber)
1451 continue;
1452
1453 for (auto k = req->mPendingBufferList.begin();
1454 k != req->mPendingBufferList.end(); k++ ) {
1455 struct private_handle_t *priv_handle =
1456 (struct private_handle_t *) (*(k->buffer));
1457 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1458 }
1459 }
1460 return;
1461}
1462
Thierry Strudel3d639192016-09-09 11:52:26 -07001463/*===========================================================================
1464 * FUNCTION : configureStreams
1465 *
1466 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1467 * and output streams.
1468 *
1469 * PARAMETERS :
1470 * @stream_list : streams to be configured
1471 *
1472 * RETURN :
1473 *
1474 *==========================================================================*/
1475int QCamera3HardwareInterface::configureStreams(
1476 camera3_stream_configuration_t *streamList)
1477{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001478 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001479 int rc = 0;
1480
1481 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001482 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001483 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001484 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001485
1486 return rc;
1487}
1488
1489/*===========================================================================
1490 * FUNCTION : configureStreamsPerfLocked
1491 *
1492 * DESCRIPTION: configureStreams while perfLock is held.
1493 *
1494 * PARAMETERS :
1495 * @stream_list : streams to be configured
1496 *
1497 * RETURN : int32_t type of status
1498 * NO_ERROR -- success
1499 * none-zero failure code
1500 *==========================================================================*/
1501int QCamera3HardwareInterface::configureStreamsPerfLocked(
1502 camera3_stream_configuration_t *streamList)
1503{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001504 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001505 int rc = 0;
1506
1507 // Sanity check stream_list
1508 if (streamList == NULL) {
1509 LOGE("NULL stream configuration");
1510 return BAD_VALUE;
1511 }
1512 if (streamList->streams == NULL) {
1513 LOGE("NULL stream list");
1514 return BAD_VALUE;
1515 }
1516
1517 if (streamList->num_streams < 1) {
1518 LOGE("Bad number of streams requested: %d",
1519 streamList->num_streams);
1520 return BAD_VALUE;
1521 }
1522
1523 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1524 LOGE("Maximum number of streams %d exceeded: %d",
1525 MAX_NUM_STREAMS, streamList->num_streams);
1526 return BAD_VALUE;
1527 }
1528
1529 mOpMode = streamList->operation_mode;
1530 LOGD("mOpMode: %d", mOpMode);
1531
1532 /* first invalidate all the steams in the mStreamList
1533 * if they appear again, they will be validated */
1534 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1535 it != mStreamInfo.end(); it++) {
1536 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1537 if (channel) {
1538 channel->stop();
1539 }
1540 (*it)->status = INVALID;
1541 }
1542
1543 if (mRawDumpChannel) {
1544 mRawDumpChannel->stop();
1545 delete mRawDumpChannel;
1546 mRawDumpChannel = NULL;
1547 }
1548
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001549 if (mHdrPlusRawSrcChannel) {
1550 mHdrPlusRawSrcChannel->stop();
1551 delete mHdrPlusRawSrcChannel;
1552 mHdrPlusRawSrcChannel = NULL;
1553 }
1554
Thierry Strudel3d639192016-09-09 11:52:26 -07001555 if (mSupportChannel)
1556 mSupportChannel->stop();
1557
1558 if (mAnalysisChannel) {
1559 mAnalysisChannel->stop();
1560 }
1561 if (mMetadataChannel) {
1562 /* If content of mStreamInfo is not 0, there is metadata stream */
1563 mMetadataChannel->stop();
1564 }
1565 if (mChannelHandle) {
1566 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1567 mChannelHandle);
1568 LOGD("stopping channel %d", mChannelHandle);
1569 }
1570
1571 pthread_mutex_lock(&mMutex);
1572
1573 // Check state
1574 switch (mState) {
1575 case INITIALIZED:
1576 case CONFIGURED:
1577 case STARTED:
1578 /* valid state */
1579 break;
1580 default:
1581 LOGE("Invalid state %d", mState);
1582 pthread_mutex_unlock(&mMutex);
1583 return -ENODEV;
1584 }
1585
1586 /* Check whether we have video stream */
1587 m_bIs4KVideo = false;
1588 m_bIsVideo = false;
1589 m_bEisSupportedSize = false;
1590 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001591 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001592 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001593 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 uint32_t videoWidth = 0U;
1595 uint32_t videoHeight = 0U;
1596 size_t rawStreamCnt = 0;
1597 size_t stallStreamCnt = 0;
1598 size_t processedStreamCnt = 0;
1599 // Number of streams on ISP encoder path
1600 size_t numStreamsOnEncoder = 0;
1601 size_t numYuv888OnEncoder = 0;
1602 bool bYuv888OverrideJpeg = false;
1603 cam_dimension_t largeYuv888Size = {0, 0};
1604 cam_dimension_t maxViewfinderSize = {0, 0};
1605 bool bJpegExceeds4K = false;
1606 bool bJpegOnEncoder = false;
1607 bool bUseCommonFeatureMask = false;
1608 cam_feature_mask_t commonFeatureMask = 0;
1609 bool bSmallJpegSize = false;
1610 uint32_t width_ratio;
1611 uint32_t height_ratio;
1612 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1613 camera3_stream_t *inputStream = NULL;
1614 bool isJpeg = false;
1615 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001616 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001617
1618 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1619
1620 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001621 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 uint8_t eis_prop_set;
1623 uint32_t maxEisWidth = 0;
1624 uint32_t maxEisHeight = 0;
1625
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001626 // Initialize all instant AEC related variables
1627 mInstantAEC = false;
1628 mResetInstantAEC = false;
1629 mInstantAECSettledFrameNumber = 0;
1630 mAecSkipDisplayFrameBound = 0;
1631 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001632 mCurrFeatureState = 0;
1633 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001634
Thierry Strudel3d639192016-09-09 11:52:26 -07001635 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1636
1637 size_t count = IS_TYPE_MAX;
1638 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1639 for (size_t i = 0; i < count; i++) {
1640 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001641 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1642 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001643 break;
1644 }
1645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001646 count = CAM_OPT_STAB_MAX;
1647 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1648 for (size_t i = 0; i < count; i++) {
1649 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1650 oisSupported = true;
1651 break;
1652 }
1653 }
1654
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001655 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001656 maxEisWidth = MAX_EIS_WIDTH;
1657 maxEisHeight = MAX_EIS_HEIGHT;
1658 }
1659
1660 /* EIS setprop control */
1661 char eis_prop[PROPERTY_VALUE_MAX];
1662 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001663 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001664 eis_prop_set = (uint8_t)atoi(eis_prop);
1665
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001666 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001667 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1668
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001669 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1670 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1671
Thierry Strudel3d639192016-09-09 11:52:26 -07001672 /* stream configurations */
1673 for (size_t i = 0; i < streamList->num_streams; i++) {
1674 camera3_stream_t *newStream = streamList->streams[i];
1675 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1676 "height = %d, rotation = %d, usage = 0x%x",
1677 i, newStream->stream_type, newStream->format,
1678 newStream->width, newStream->height, newStream->rotation,
1679 newStream->usage);
1680 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1681 newStream->stream_type == CAMERA3_STREAM_INPUT){
1682 isZsl = true;
1683 }
1684 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1685 inputStream = newStream;
1686 }
1687
Emilian Peev7650c122017-01-19 08:24:33 -08001688 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1689 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001690 isJpeg = true;
1691 jpegSize.width = newStream->width;
1692 jpegSize.height = newStream->height;
1693 if (newStream->width > VIDEO_4K_WIDTH ||
1694 newStream->height > VIDEO_4K_HEIGHT)
1695 bJpegExceeds4K = true;
1696 }
1697
1698 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1699 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1700 m_bIsVideo = true;
1701 videoWidth = newStream->width;
1702 videoHeight = newStream->height;
1703 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1704 (VIDEO_4K_HEIGHT <= newStream->height)) {
1705 m_bIs4KVideo = true;
1706 }
1707 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1708 (newStream->height <= maxEisHeight);
1709 }
1710 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1711 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1712 switch (newStream->format) {
1713 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001714 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1715 depthPresent = true;
1716 break;
1717 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001718 stallStreamCnt++;
1719 if (isOnEncoder(maxViewfinderSize, newStream->width,
1720 newStream->height)) {
1721 numStreamsOnEncoder++;
1722 bJpegOnEncoder = true;
1723 }
1724 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1725 newStream->width);
1726 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1727 newStream->height);;
1728 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1729 "FATAL: max_downscale_factor cannot be zero and so assert");
1730 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1731 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1732 LOGH("Setting small jpeg size flag to true");
1733 bSmallJpegSize = true;
1734 }
1735 break;
1736 case HAL_PIXEL_FORMAT_RAW10:
1737 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1738 case HAL_PIXEL_FORMAT_RAW16:
1739 rawStreamCnt++;
1740 break;
1741 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1742 processedStreamCnt++;
1743 if (isOnEncoder(maxViewfinderSize, newStream->width,
1744 newStream->height)) {
1745 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1746 !IS_USAGE_ZSL(newStream->usage)) {
1747 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1748 }
1749 numStreamsOnEncoder++;
1750 }
1751 break;
1752 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1753 processedStreamCnt++;
1754 if (isOnEncoder(maxViewfinderSize, newStream->width,
1755 newStream->height)) {
1756 // If Yuv888 size is not greater than 4K, set feature mask
1757 // to SUPERSET so that it support concurrent request on
1758 // YUV and JPEG.
1759 if (newStream->width <= VIDEO_4K_WIDTH &&
1760 newStream->height <= VIDEO_4K_HEIGHT) {
1761 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1762 }
1763 numStreamsOnEncoder++;
1764 numYuv888OnEncoder++;
1765 largeYuv888Size.width = newStream->width;
1766 largeYuv888Size.height = newStream->height;
1767 }
1768 break;
1769 default:
1770 processedStreamCnt++;
1771 if (isOnEncoder(maxViewfinderSize, newStream->width,
1772 newStream->height)) {
1773 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1774 numStreamsOnEncoder++;
1775 }
1776 break;
1777 }
1778
1779 }
1780 }
1781
1782 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1783 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1784 !m_bIsVideo) {
1785 m_bEisEnable = false;
1786 }
1787
Thierry Strudel54dc9782017-02-15 12:12:10 -08001788 uint8_t forceEnableTnr = 0;
1789 char tnr_prop[PROPERTY_VALUE_MAX];
1790 memset(tnr_prop, 0, sizeof(tnr_prop));
1791 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1792 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* Logic to enable/disable TNR based on specific config size/etc.*/
1795 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1796 ((videoWidth == 1920 && videoHeight == 1080) ||
1797 (videoWidth == 1280 && videoHeight == 720)) &&
1798 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1799 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001800 else if (forceEnableTnr)
1801 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001802
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001803 char videoHdrProp[PROPERTY_VALUE_MAX];
1804 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1805 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1806 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1807
1808 if (hdr_mode_prop == 1 && m_bIsVideo &&
1809 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1810 m_bVideoHdrEnabled = true;
1811 else
1812 m_bVideoHdrEnabled = false;
1813
1814
Thierry Strudel3d639192016-09-09 11:52:26 -07001815 /* Check if num_streams is sane */
1816 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1817 rawStreamCnt > MAX_RAW_STREAMS ||
1818 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1819 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1820 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1821 pthread_mutex_unlock(&mMutex);
1822 return -EINVAL;
1823 }
1824 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001825 if (isZsl && m_bIs4KVideo) {
1826 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001827 pthread_mutex_unlock(&mMutex);
1828 return -EINVAL;
1829 }
1830 /* Check if stream sizes are sane */
1831 if (numStreamsOnEncoder > 2) {
1832 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1833 pthread_mutex_unlock(&mMutex);
1834 return -EINVAL;
1835 } else if (1 < numStreamsOnEncoder){
1836 bUseCommonFeatureMask = true;
1837 LOGH("Multiple streams above max viewfinder size, common mask needed");
1838 }
1839
1840 /* Check if BLOB size is greater than 4k in 4k recording case */
1841 if (m_bIs4KVideo && bJpegExceeds4K) {
1842 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1843 pthread_mutex_unlock(&mMutex);
1844 return -EINVAL;
1845 }
1846
Emilian Peev7650c122017-01-19 08:24:33 -08001847 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1848 depthPresent) {
1849 LOGE("HAL doesn't support depth streams in HFR mode!");
1850 pthread_mutex_unlock(&mMutex);
1851 return -EINVAL;
1852 }
1853
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1855 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1856 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1857 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1858 // configurations:
1859 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1860 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1861 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1862 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1863 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1864 __func__);
1865 pthread_mutex_unlock(&mMutex);
1866 return -EINVAL;
1867 }
1868
1869 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1870 // the YUV stream's size is greater or equal to the JPEG size, set common
1871 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1872 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1873 jpegSize.width, jpegSize.height) &&
1874 largeYuv888Size.width > jpegSize.width &&
1875 largeYuv888Size.height > jpegSize.height) {
1876 bYuv888OverrideJpeg = true;
1877 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1878 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1879 }
1880
1881 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1882 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1883 commonFeatureMask);
1884 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1885 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1886
1887 rc = validateStreamDimensions(streamList);
1888 if (rc == NO_ERROR) {
1889 rc = validateStreamRotations(streamList);
1890 }
1891 if (rc != NO_ERROR) {
1892 LOGE("Invalid stream configuration requested!");
1893 pthread_mutex_unlock(&mMutex);
1894 return rc;
1895 }
1896
1897 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1898 for (size_t i = 0; i < streamList->num_streams; i++) {
1899 camera3_stream_t *newStream = streamList->streams[i];
1900 LOGH("newStream type = %d, stream format = %d "
1901 "stream size : %d x %d, stream rotation = %d",
1902 newStream->stream_type, newStream->format,
1903 newStream->width, newStream->height, newStream->rotation);
1904 //if the stream is in the mStreamList validate it
1905 bool stream_exists = false;
1906 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1907 it != mStreamInfo.end(); it++) {
1908 if ((*it)->stream == newStream) {
1909 QCamera3ProcessingChannel *channel =
1910 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1911 stream_exists = true;
1912 if (channel)
1913 delete channel;
1914 (*it)->status = VALID;
1915 (*it)->stream->priv = NULL;
1916 (*it)->channel = NULL;
1917 }
1918 }
1919 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1920 //new stream
1921 stream_info_t* stream_info;
1922 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1923 if (!stream_info) {
1924 LOGE("Could not allocate stream info");
1925 rc = -ENOMEM;
1926 pthread_mutex_unlock(&mMutex);
1927 return rc;
1928 }
1929 stream_info->stream = newStream;
1930 stream_info->status = VALID;
1931 stream_info->channel = NULL;
1932 mStreamInfo.push_back(stream_info);
1933 }
1934 /* Covers Opaque ZSL and API1 F/W ZSL */
1935 if (IS_USAGE_ZSL(newStream->usage)
1936 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1937 if (zslStream != NULL) {
1938 LOGE("Multiple input/reprocess streams requested!");
1939 pthread_mutex_unlock(&mMutex);
1940 return BAD_VALUE;
1941 }
1942 zslStream = newStream;
1943 }
1944 /* Covers YUV reprocess */
1945 if (inputStream != NULL) {
1946 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1947 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1948 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1949 && inputStream->width == newStream->width
1950 && inputStream->height == newStream->height) {
1951 if (zslStream != NULL) {
1952 /* This scenario indicates multiple YUV streams with same size
1953 * as input stream have been requested, since zsl stream handle
1954 * is solely use for the purpose of overriding the size of streams
1955 * which share h/w streams we will just make a guess here as to
1956 * which of the stream is a ZSL stream, this will be refactored
1957 * once we make generic logic for streams sharing encoder output
1958 */
1959 LOGH("Warning, Multiple ip/reprocess streams requested!");
1960 }
1961 zslStream = newStream;
1962 }
1963 }
1964 }
1965
1966 /* If a zsl stream is set, we know that we have configured at least one input or
1967 bidirectional stream */
1968 if (NULL != zslStream) {
1969 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1970 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1971 mInputStreamInfo.format = zslStream->format;
1972 mInputStreamInfo.usage = zslStream->usage;
1973 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1974 mInputStreamInfo.dim.width,
1975 mInputStreamInfo.dim.height,
1976 mInputStreamInfo.format, mInputStreamInfo.usage);
1977 }
1978
1979 cleanAndSortStreamInfo();
1980 if (mMetadataChannel) {
1981 delete mMetadataChannel;
1982 mMetadataChannel = NULL;
1983 }
1984 if (mSupportChannel) {
1985 delete mSupportChannel;
1986 mSupportChannel = NULL;
1987 }
1988
1989 if (mAnalysisChannel) {
1990 delete mAnalysisChannel;
1991 mAnalysisChannel = NULL;
1992 }
1993
1994 if (mDummyBatchChannel) {
1995 delete mDummyBatchChannel;
1996 mDummyBatchChannel = NULL;
1997 }
1998
Emilian Peev7650c122017-01-19 08:24:33 -08001999 if (mDepthChannel) {
2000 mDepthChannel = NULL;
2001 }
2002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 //Create metadata channel and initialize it
2004 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2005 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2006 gCamCapability[mCameraId]->color_arrangement);
2007 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2008 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002009 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 if (mMetadataChannel == NULL) {
2011 LOGE("failed to allocate metadata channel");
2012 rc = -ENOMEM;
2013 pthread_mutex_unlock(&mMutex);
2014 return rc;
2015 }
2016 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2017 if (rc < 0) {
2018 LOGE("metadata channel initialization failed");
2019 delete mMetadataChannel;
2020 mMetadataChannel = NULL;
2021 pthread_mutex_unlock(&mMutex);
2022 return rc;
2023 }
2024
Thierry Strudel3d639192016-09-09 11:52:26 -07002025 bool isRawStreamRequested = false;
2026 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2027 /* Allocate channel objects for the requested streams */
2028 for (size_t i = 0; i < streamList->num_streams; i++) {
2029 camera3_stream_t *newStream = streamList->streams[i];
2030 uint32_t stream_usage = newStream->usage;
2031 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2032 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2033 struct camera_info *p_info = NULL;
2034 pthread_mutex_lock(&gCamLock);
2035 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2036 pthread_mutex_unlock(&gCamLock);
2037 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2038 || IS_USAGE_ZSL(newStream->usage)) &&
2039 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2040 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2041 if (bUseCommonFeatureMask) {
2042 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2043 commonFeatureMask;
2044 } else {
2045 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2046 CAM_QCOM_FEATURE_NONE;
2047 }
2048
2049 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2050 LOGH("Input stream configured, reprocess config");
2051 } else {
2052 //for non zsl streams find out the format
2053 switch (newStream->format) {
2054 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2055 {
2056 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2057 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2058 /* add additional features to pp feature mask */
2059 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2060 mStreamConfigInfo.num_streams);
2061
2062 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2063 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2064 CAM_STREAM_TYPE_VIDEO;
2065 if (m_bTnrEnabled && m_bTnrVideo) {
2066 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2067 CAM_QCOM_FEATURE_CPP_TNR;
2068 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2069 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2070 ~CAM_QCOM_FEATURE_CDS;
2071 }
2072 } else {
2073 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2074 CAM_STREAM_TYPE_PREVIEW;
2075 if (m_bTnrEnabled && m_bTnrPreview) {
2076 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2077 CAM_QCOM_FEATURE_CPP_TNR;
2078 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2079 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2080 ~CAM_QCOM_FEATURE_CDS;
2081 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002082 if(!m_bSwTnrPreview) {
2083 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2084 ~CAM_QTI_FEATURE_SW_TNR;
2085 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002086 padding_info.width_padding = mSurfaceStridePadding;
2087 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 previewSize.width = (int32_t)newStream->width;
2089 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 }
2091 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2092 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2093 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2094 newStream->height;
2095 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2096 newStream->width;
2097 }
2098 }
2099 break;
2100 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2101 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2102 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2103 if (bUseCommonFeatureMask)
2104 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2105 commonFeatureMask;
2106 else
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2108 CAM_QCOM_FEATURE_NONE;
2109 } else {
2110 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2111 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2112 }
2113 break;
2114 case HAL_PIXEL_FORMAT_BLOB:
2115 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2116 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2117 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2118 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2119 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2120 } else {
2121 if (bUseCommonFeatureMask &&
2122 isOnEncoder(maxViewfinderSize, newStream->width,
2123 newStream->height)) {
2124 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2125 } else {
2126 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2127 }
2128 }
2129 if (isZsl) {
2130 if (zslStream) {
2131 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2132 (int32_t)zslStream->width;
2133 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2134 (int32_t)zslStream->height;
2135 } else {
2136 LOGE("Error, No ZSL stream identified");
2137 pthread_mutex_unlock(&mMutex);
2138 return -EINVAL;
2139 }
2140 } else if (m_bIs4KVideo) {
2141 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2142 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2143 } else if (bYuv888OverrideJpeg) {
2144 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2145 (int32_t)largeYuv888Size.width;
2146 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2147 (int32_t)largeYuv888Size.height;
2148 }
2149 break;
2150 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2151 case HAL_PIXEL_FORMAT_RAW16:
2152 case HAL_PIXEL_FORMAT_RAW10:
2153 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2154 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2155 isRawStreamRequested = true;
2156 break;
2157 default:
2158 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2159 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2160 break;
2161 }
2162 }
2163
2164 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2165 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2166 gCamCapability[mCameraId]->color_arrangement);
2167
2168 if (newStream->priv == NULL) {
2169 //New stream, construct channel
2170 switch (newStream->stream_type) {
2171 case CAMERA3_STREAM_INPUT:
2172 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2173 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2174 break;
2175 case CAMERA3_STREAM_BIDIRECTIONAL:
2176 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2177 GRALLOC_USAGE_HW_CAMERA_WRITE;
2178 break;
2179 case CAMERA3_STREAM_OUTPUT:
2180 /* For video encoding stream, set read/write rarely
2181 * flag so that they may be set to un-cached */
2182 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2183 newStream->usage |=
2184 (GRALLOC_USAGE_SW_READ_RARELY |
2185 GRALLOC_USAGE_SW_WRITE_RARELY |
2186 GRALLOC_USAGE_HW_CAMERA_WRITE);
2187 else if (IS_USAGE_ZSL(newStream->usage))
2188 {
2189 LOGD("ZSL usage flag skipping");
2190 }
2191 else if (newStream == zslStream
2192 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2193 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2194 } else
2195 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2196 break;
2197 default:
2198 LOGE("Invalid stream_type %d", newStream->stream_type);
2199 break;
2200 }
2201
2202 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2203 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2204 QCamera3ProcessingChannel *channel = NULL;
2205 switch (newStream->format) {
2206 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2207 if ((newStream->usage &
2208 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2209 (streamList->operation_mode ==
2210 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2211 ) {
2212 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2213 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002214 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002215 this,
2216 newStream,
2217 (cam_stream_type_t)
2218 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2219 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2220 mMetadataChannel,
2221 0); //heap buffers are not required for HFR video channel
2222 if (channel == NULL) {
2223 LOGE("allocation of channel failed");
2224 pthread_mutex_unlock(&mMutex);
2225 return -ENOMEM;
2226 }
2227 //channel->getNumBuffers() will return 0 here so use
2228 //MAX_INFLIGH_HFR_REQUESTS
2229 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2230 newStream->priv = channel;
2231 LOGI("num video buffers in HFR mode: %d",
2232 MAX_INFLIGHT_HFR_REQUESTS);
2233 } else {
2234 /* Copy stream contents in HFR preview only case to create
2235 * dummy batch channel so that sensor streaming is in
2236 * HFR mode */
2237 if (!m_bIsVideo && (streamList->operation_mode ==
2238 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2239 mDummyBatchStream = *newStream;
2240 }
2241 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2242 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002243 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002244 this,
2245 newStream,
2246 (cam_stream_type_t)
2247 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2248 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2249 mMetadataChannel,
2250 MAX_INFLIGHT_REQUESTS);
2251 if (channel == NULL) {
2252 LOGE("allocation of channel failed");
2253 pthread_mutex_unlock(&mMutex);
2254 return -ENOMEM;
2255 }
2256 newStream->max_buffers = channel->getNumBuffers();
2257 newStream->priv = channel;
2258 }
2259 break;
2260 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2261 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2262 mChannelHandle,
2263 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002264 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002265 this,
2266 newStream,
2267 (cam_stream_type_t)
2268 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2270 mMetadataChannel);
2271 if (channel == NULL) {
2272 LOGE("allocation of YUV channel failed");
2273 pthread_mutex_unlock(&mMutex);
2274 return -ENOMEM;
2275 }
2276 newStream->max_buffers = channel->getNumBuffers();
2277 newStream->priv = channel;
2278 break;
2279 }
2280 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2281 case HAL_PIXEL_FORMAT_RAW16:
2282 case HAL_PIXEL_FORMAT_RAW10:
2283 mRawChannel = new QCamera3RawChannel(
2284 mCameraHandle->camera_handle, mChannelHandle,
2285 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002286 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002287 this, newStream,
2288 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2289 mMetadataChannel,
2290 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2291 if (mRawChannel == NULL) {
2292 LOGE("allocation of raw channel failed");
2293 pthread_mutex_unlock(&mMutex);
2294 return -ENOMEM;
2295 }
2296 newStream->max_buffers = mRawChannel->getNumBuffers();
2297 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2298 break;
2299 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002300 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2301 mDepthChannel = new QCamera3DepthChannel(
2302 mCameraHandle->camera_handle, mChannelHandle,
2303 mCameraHandle->ops, NULL, NULL, &padding_info,
2304 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2305 mMetadataChannel);
2306 if (NULL == mDepthChannel) {
2307 LOGE("Allocation of depth channel failed");
2308 pthread_mutex_unlock(&mMutex);
2309 return NO_MEMORY;
2310 }
2311 newStream->priv = mDepthChannel;
2312 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2313 } else {
2314 // Max live snapshot inflight buffer is 1. This is to mitigate
2315 // frame drop issues for video snapshot. The more buffers being
2316 // allocated, the more frame drops there are.
2317 mPictureChannel = new QCamera3PicChannel(
2318 mCameraHandle->camera_handle, mChannelHandle,
2319 mCameraHandle->ops, captureResultCb,
2320 setBufferErrorStatus, &padding_info, this, newStream,
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2322 m_bIs4KVideo, isZsl, mMetadataChannel,
2323 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2324 if (mPictureChannel == NULL) {
2325 LOGE("allocation of channel failed");
2326 pthread_mutex_unlock(&mMutex);
2327 return -ENOMEM;
2328 }
2329 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2330 newStream->max_buffers = mPictureChannel->getNumBuffers();
2331 mPictureChannel->overrideYuvSize(
2332 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2333 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002335 break;
2336
2337 default:
2338 LOGE("not a supported format 0x%x", newStream->format);
2339 break;
2340 }
2341 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2342 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2343 } else {
2344 LOGE("Error, Unknown stream type");
2345 pthread_mutex_unlock(&mMutex);
2346 return -EINVAL;
2347 }
2348
2349 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2350 if (channel != NULL && channel->isUBWCEnabled()) {
2351 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2353 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002354 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2355 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2356 }
2357 }
2358
2359 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2360 it != mStreamInfo.end(); it++) {
2361 if ((*it)->stream == newStream) {
2362 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2363 break;
2364 }
2365 }
2366 } else {
2367 // Channel already exists for this stream
2368 // Do nothing for now
2369 }
2370 padding_info = gCamCapability[mCameraId]->padding_info;
2371
Emilian Peev7650c122017-01-19 08:24:33 -08002372 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002373 * since there is no real stream associated with it
2374 */
Emilian Peev7650c122017-01-19 08:24:33 -08002375 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2376 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 }
2380
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002381 // Create analysis stream all the time, even when h/w support is not available
2382 {
2383 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2384 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2385 gCamCapability[mCameraId]->color_arrangement);
2386 cam_analysis_info_t analysisInfo;
2387 int32_t ret = NO_ERROR;
2388 ret = mCommon.getAnalysisInfo(
2389 FALSE,
2390 analysisFeatureMask,
2391 &analysisInfo);
2392 if (ret == NO_ERROR) {
2393 cam_dimension_t analysisDim;
2394 analysisDim = mCommon.getMatchingDimension(previewSize,
2395 analysisInfo.analysis_recommended_res);
2396
2397 mAnalysisChannel = new QCamera3SupportChannel(
2398 mCameraHandle->camera_handle,
2399 mChannelHandle,
2400 mCameraHandle->ops,
2401 &analysisInfo.analysis_padding_info,
2402 analysisFeatureMask,
2403 CAM_STREAM_TYPE_ANALYSIS,
2404 &analysisDim,
2405 (analysisInfo.analysis_format
2406 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2407 : CAM_FORMAT_YUV_420_NV21),
2408 analysisInfo.hw_analysis_supported,
2409 gCamCapability[mCameraId]->color_arrangement,
2410 this,
2411 0); // force buffer count to 0
2412 } else {
2413 LOGW("getAnalysisInfo failed, ret = %d", ret);
2414 }
2415 if (!mAnalysisChannel) {
2416 LOGW("Analysis channel cannot be created");
2417 }
2418 }
2419
Thierry Strudel3d639192016-09-09 11:52:26 -07002420 //RAW DUMP channel
2421 if (mEnableRawDump && isRawStreamRequested == false){
2422 cam_dimension_t rawDumpSize;
2423 rawDumpSize = getMaxRawSize(mCameraId);
2424 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2425 setPAAFSupport(rawDumpFeatureMask,
2426 CAM_STREAM_TYPE_RAW,
2427 gCamCapability[mCameraId]->color_arrangement);
2428 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2429 mChannelHandle,
2430 mCameraHandle->ops,
2431 rawDumpSize,
2432 &padding_info,
2433 this, rawDumpFeatureMask);
2434 if (!mRawDumpChannel) {
2435 LOGE("Raw Dump channel cannot be created");
2436 pthread_mutex_unlock(&mMutex);
2437 return -ENOMEM;
2438 }
2439 }
2440
Chien-Yu Chenee335912017-02-09 17:53:20 -08002441 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
2442 if (mHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002443 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002444 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2445 "HDR+ RAW source channel is not created.",
2446 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002447 } else {
2448 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2449 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2450 setPAAFSupport(hdrPlusRawFeatureMask,
2451 CAM_STREAM_TYPE_RAW,
2452 gCamCapability[mCameraId]->color_arrangement);
2453 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2454 mChannelHandle,
2455 mCameraHandle->ops,
2456 rawSize,
2457 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002458 this, hdrPlusRawFeatureMask,
2459 mHdrPlusClient,
2460 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002461 if (!mHdrPlusRawSrcChannel) {
2462 LOGE("HDR+ Raw Source channel cannot be created");
2463 pthread_mutex_unlock(&mMutex);
2464 return -ENOMEM;
2465 }
2466 }
2467 }
2468
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 if (mAnalysisChannel) {
2470 cam_analysis_info_t analysisInfo;
2471 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2473 CAM_STREAM_TYPE_ANALYSIS;
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2476 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2477 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2478 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002479 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2481 &analysisInfo);
2482 if (rc != NO_ERROR) {
2483 LOGE("getAnalysisInfo failed, ret = %d", rc);
2484 pthread_mutex_unlock(&mMutex);
2485 return rc;
2486 }
2487 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002488 mCommon.getMatchingDimension(previewSize,
2489 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 mStreamConfigInfo.num_streams++;
2491 }
2492
2493 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2494 cam_analysis_info_t supportInfo;
2495 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2496 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2497 setPAAFSupport(callbackFeatureMask,
2498 CAM_STREAM_TYPE_CALLBACK,
2499 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002500 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002501 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002502 if (ret != NO_ERROR) {
2503 /* Ignore the error for Mono camera
2504 * because the PAAF bit mask is only set
2505 * for CAM_STREAM_TYPE_ANALYSIS stream type
2506 */
2507 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2508 LOGW("getAnalysisInfo failed, ret = %d", ret);
2509 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 }
2511 mSupportChannel = new QCamera3SupportChannel(
2512 mCameraHandle->camera_handle,
2513 mChannelHandle,
2514 mCameraHandle->ops,
2515 &gCamCapability[mCameraId]->padding_info,
2516 callbackFeatureMask,
2517 CAM_STREAM_TYPE_CALLBACK,
2518 &QCamera3SupportChannel::kDim,
2519 CAM_FORMAT_YUV_420_NV21,
2520 supportInfo.hw_analysis_supported,
2521 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002522 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002523 if (!mSupportChannel) {
2524 LOGE("dummy channel cannot be created");
2525 pthread_mutex_unlock(&mMutex);
2526 return -ENOMEM;
2527 }
2528 }
2529
2530 if (mSupportChannel) {
2531 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2532 QCamera3SupportChannel::kDim;
2533 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2534 CAM_STREAM_TYPE_CALLBACK;
2535 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2536 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2537 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2538 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2539 gCamCapability[mCameraId]->color_arrangement);
2540 mStreamConfigInfo.num_streams++;
2541 }
2542
2543 if (mRawDumpChannel) {
2544 cam_dimension_t rawSize;
2545 rawSize = getMaxRawSize(mCameraId);
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2547 rawSize;
2548 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2549 CAM_STREAM_TYPE_RAW;
2550 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2551 CAM_QCOM_FEATURE_NONE;
2552 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2554 gCamCapability[mCameraId]->color_arrangement);
2555 mStreamConfigInfo.num_streams++;
2556 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002557
2558 if (mHdrPlusRawSrcChannel) {
2559 cam_dimension_t rawSize;
2560 rawSize = getMaxRawSize(mCameraId);
2561 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2562 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2564 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2565 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2566 gCamCapability[mCameraId]->color_arrangement);
2567 mStreamConfigInfo.num_streams++;
2568 }
2569
Thierry Strudel3d639192016-09-09 11:52:26 -07002570 /* In HFR mode, if video stream is not added, create a dummy channel so that
2571 * ISP can create a batch mode even for preview only case. This channel is
2572 * never 'start'ed (no stream-on), it is only 'initialized' */
2573 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2574 !m_bIsVideo) {
2575 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2576 setPAAFSupport(dummyFeatureMask,
2577 CAM_STREAM_TYPE_VIDEO,
2578 gCamCapability[mCameraId]->color_arrangement);
2579 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2580 mChannelHandle,
2581 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002582 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002583 this,
2584 &mDummyBatchStream,
2585 CAM_STREAM_TYPE_VIDEO,
2586 dummyFeatureMask,
2587 mMetadataChannel);
2588 if (NULL == mDummyBatchChannel) {
2589 LOGE("creation of mDummyBatchChannel failed."
2590 "Preview will use non-hfr sensor mode ");
2591 }
2592 }
2593 if (mDummyBatchChannel) {
2594 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2595 mDummyBatchStream.width;
2596 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2597 mDummyBatchStream.height;
2598 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2599 CAM_STREAM_TYPE_VIDEO;
2600 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2601 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2602 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2603 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2604 gCamCapability[mCameraId]->color_arrangement);
2605 mStreamConfigInfo.num_streams++;
2606 }
2607
2608 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2609 mStreamConfigInfo.buffer_info.max_buffers =
2610 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2611
2612 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2613 for (pendingRequestIterator i = mPendingRequestsList.begin();
2614 i != mPendingRequestsList.end();) {
2615 i = erasePendingRequest(i);
2616 }
2617 mPendingFrameDropList.clear();
2618 // Initialize/Reset the pending buffers list
2619 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2620 req.mPendingBufferList.clear();
2621 }
2622 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2623
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 mCurJpegMeta.clear();
2625 //Get min frame duration for this streams configuration
2626 deriveMinFrameDuration();
2627
Chien-Yu Chenee335912017-02-09 17:53:20 -08002628 mFirstPreviewIntentSeen = false;
2629
2630 // Disable HRD+ if it's enabled;
2631 disableHdrPlusModeLocked();
2632
Thierry Strudel3d639192016-09-09 11:52:26 -07002633 // Update state
2634 mState = CONFIGURED;
2635
2636 pthread_mutex_unlock(&mMutex);
2637
2638 return rc;
2639}
2640
2641/*===========================================================================
2642 * FUNCTION : validateCaptureRequest
2643 *
2644 * DESCRIPTION: validate a capture request from camera service
2645 *
2646 * PARAMETERS :
2647 * @request : request from framework to process
2648 *
2649 * RETURN :
2650 *
2651 *==========================================================================*/
2652int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002653 camera3_capture_request_t *request,
2654 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002655{
2656 ssize_t idx = 0;
2657 const camera3_stream_buffer_t *b;
2658 CameraMetadata meta;
2659
2660 /* Sanity check the request */
2661 if (request == NULL) {
2662 LOGE("NULL capture request");
2663 return BAD_VALUE;
2664 }
2665
2666 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2667 /*settings cannot be null for the first request*/
2668 return BAD_VALUE;
2669 }
2670
2671 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002672 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2673 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 LOGE("Request %d: No output buffers provided!",
2675 __FUNCTION__, frameNumber);
2676 return BAD_VALUE;
2677 }
2678 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2679 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2680 request->num_output_buffers, MAX_NUM_STREAMS);
2681 return BAD_VALUE;
2682 }
2683 if (request->input_buffer != NULL) {
2684 b = request->input_buffer;
2685 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2686 LOGE("Request %d: Buffer %ld: Status not OK!",
2687 frameNumber, (long)idx);
2688 return BAD_VALUE;
2689 }
2690 if (b->release_fence != -1) {
2691 LOGE("Request %d: Buffer %ld: Has a release fence!",
2692 frameNumber, (long)idx);
2693 return BAD_VALUE;
2694 }
2695 if (b->buffer == NULL) {
2696 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2697 frameNumber, (long)idx);
2698 return BAD_VALUE;
2699 }
2700 }
2701
2702 // Validate all buffers
2703 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002704 if (b == NULL) {
2705 return BAD_VALUE;
2706 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002707 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002708 QCamera3ProcessingChannel *channel =
2709 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2710 if (channel == NULL) {
2711 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2712 frameNumber, (long)idx);
2713 return BAD_VALUE;
2714 }
2715 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2716 LOGE("Request %d: Buffer %ld: Status not OK!",
2717 frameNumber, (long)idx);
2718 return BAD_VALUE;
2719 }
2720 if (b->release_fence != -1) {
2721 LOGE("Request %d: Buffer %ld: Has a release fence!",
2722 frameNumber, (long)idx);
2723 return BAD_VALUE;
2724 }
2725 if (b->buffer == NULL) {
2726 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2727 frameNumber, (long)idx);
2728 return BAD_VALUE;
2729 }
2730 if (*(b->buffer) == NULL) {
2731 LOGE("Request %d: Buffer %ld: NULL private handle!",
2732 frameNumber, (long)idx);
2733 return BAD_VALUE;
2734 }
2735 idx++;
2736 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 return NO_ERROR;
2739}
2740
2741/*===========================================================================
2742 * FUNCTION : deriveMinFrameDuration
2743 *
2744 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2745 * on currently configured streams.
2746 *
2747 * PARAMETERS : NONE
2748 *
2749 * RETURN : NONE
2750 *
2751 *==========================================================================*/
2752void QCamera3HardwareInterface::deriveMinFrameDuration()
2753{
2754 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2755
2756 maxJpegDim = 0;
2757 maxProcessedDim = 0;
2758 maxRawDim = 0;
2759
2760 // Figure out maximum jpeg, processed, and raw dimensions
2761 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2762 it != mStreamInfo.end(); it++) {
2763
2764 // Input stream doesn't have valid stream_type
2765 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2766 continue;
2767
2768 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2769 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2770 if (dimension > maxJpegDim)
2771 maxJpegDim = dimension;
2772 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2773 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2774 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2775 if (dimension > maxRawDim)
2776 maxRawDim = dimension;
2777 } else {
2778 if (dimension > maxProcessedDim)
2779 maxProcessedDim = dimension;
2780 }
2781 }
2782
2783 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2784 MAX_SIZES_CNT);
2785
2786 //Assume all jpeg dimensions are in processed dimensions.
2787 if (maxJpegDim > maxProcessedDim)
2788 maxProcessedDim = maxJpegDim;
2789 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2790 if (maxProcessedDim > maxRawDim) {
2791 maxRawDim = INT32_MAX;
2792
2793 for (size_t i = 0; i < count; i++) {
2794 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2795 gCamCapability[mCameraId]->raw_dim[i].height;
2796 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2797 maxRawDim = dimension;
2798 }
2799 }
2800
2801 //Find minimum durations for processed, jpeg, and raw
2802 for (size_t i = 0; i < count; i++) {
2803 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2804 gCamCapability[mCameraId]->raw_dim[i].height) {
2805 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2806 break;
2807 }
2808 }
2809 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2810 for (size_t i = 0; i < count; i++) {
2811 if (maxProcessedDim ==
2812 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2813 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2814 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2815 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2816 break;
2817 }
2818 }
2819}
2820
2821/*===========================================================================
2822 * FUNCTION : getMinFrameDuration
2823 *
2824 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2825 * and current request configuration.
2826 *
2827 * PARAMETERS : @request: requset sent by the frameworks
2828 *
2829 * RETURN : min farme duration for a particular request
2830 *
2831 *==========================================================================*/
2832int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2833{
2834 bool hasJpegStream = false;
2835 bool hasRawStream = false;
2836 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2837 const camera3_stream_t *stream = request->output_buffers[i].stream;
2838 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2839 hasJpegStream = true;
2840 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2841 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2842 stream->format == HAL_PIXEL_FORMAT_RAW16)
2843 hasRawStream = true;
2844 }
2845
2846 if (!hasJpegStream)
2847 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2848 else
2849 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2850}
2851
2852/*===========================================================================
2853 * FUNCTION : handleBuffersDuringFlushLock
2854 *
2855 * DESCRIPTION: Account for buffers returned from back-end during flush
2856 * This function is executed while mMutex is held by the caller.
2857 *
2858 * PARAMETERS :
2859 * @buffer: image buffer for the callback
2860 *
2861 * RETURN :
2862 *==========================================================================*/
2863void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2864{
2865 bool buffer_found = false;
2866 for (List<PendingBuffersInRequest>::iterator req =
2867 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2868 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2869 for (List<PendingBufferInfo>::iterator i =
2870 req->mPendingBufferList.begin();
2871 i != req->mPendingBufferList.end(); i++) {
2872 if (i->buffer == buffer->buffer) {
2873 mPendingBuffersMap.numPendingBufsAtFlush--;
2874 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2875 buffer->buffer, req->frame_number,
2876 mPendingBuffersMap.numPendingBufsAtFlush);
2877 buffer_found = true;
2878 break;
2879 }
2880 }
2881 if (buffer_found) {
2882 break;
2883 }
2884 }
2885 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2886 //signal the flush()
2887 LOGD("All buffers returned to HAL. Continue flush");
2888 pthread_cond_signal(&mBuffersCond);
2889 }
2890}
2891
Thierry Strudel3d639192016-09-09 11:52:26 -07002892/*===========================================================================
2893 * FUNCTION : handleBatchMetadata
2894 *
2895 * DESCRIPTION: Handles metadata buffer callback in batch mode
2896 *
2897 * PARAMETERS : @metadata_buf: metadata buffer
2898 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2899 * the meta buf in this method
2900 *
2901 * RETURN :
2902 *
2903 *==========================================================================*/
2904void QCamera3HardwareInterface::handleBatchMetadata(
2905 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2906{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002907 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002908
2909 if (NULL == metadata_buf) {
2910 LOGE("metadata_buf is NULL");
2911 return;
2912 }
2913 /* In batch mode, the metdata will contain the frame number and timestamp of
2914 * the last frame in the batch. Eg: a batch containing buffers from request
2915 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2916 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2917 * multiple process_capture_results */
2918 metadata_buffer_t *metadata =
2919 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2920 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2921 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2922 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2923 uint32_t frame_number = 0, urgent_frame_number = 0;
2924 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2925 bool invalid_metadata = false;
2926 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2927 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002928 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002929
2930 int32_t *p_frame_number_valid =
2931 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2932 uint32_t *p_frame_number =
2933 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2934 int64_t *p_capture_time =
2935 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2936 int32_t *p_urgent_frame_number_valid =
2937 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2938 uint32_t *p_urgent_frame_number =
2939 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2940
2941 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2942 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2943 (NULL == p_urgent_frame_number)) {
2944 LOGE("Invalid metadata");
2945 invalid_metadata = true;
2946 } else {
2947 frame_number_valid = *p_frame_number_valid;
2948 last_frame_number = *p_frame_number;
2949 last_frame_capture_time = *p_capture_time;
2950 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2951 last_urgent_frame_number = *p_urgent_frame_number;
2952 }
2953
2954 /* In batchmode, when no video buffers are requested, set_parms are sent
2955 * for every capture_request. The difference between consecutive urgent
2956 * frame numbers and frame numbers should be used to interpolate the
2957 * corresponding frame numbers and time stamps */
2958 pthread_mutex_lock(&mMutex);
2959 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002960 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2961 if(idx < 0) {
2962 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2963 last_urgent_frame_number);
2964 mState = ERROR;
2965 pthread_mutex_unlock(&mMutex);
2966 return;
2967 }
2968 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002969 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2970 first_urgent_frame_number;
2971
2972 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2973 urgent_frame_number_valid,
2974 first_urgent_frame_number, last_urgent_frame_number);
2975 }
2976
2977 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002978 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2979 if(idx < 0) {
2980 LOGE("Invalid frame number received: %d. Irrecoverable error",
2981 last_frame_number);
2982 mState = ERROR;
2983 pthread_mutex_unlock(&mMutex);
2984 return;
2985 }
2986 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002987 frameNumDiff = last_frame_number + 1 -
2988 first_frame_number;
2989 mPendingBatchMap.removeItem(last_frame_number);
2990
2991 LOGD("frm: valid: %d frm_num: %d - %d",
2992 frame_number_valid,
2993 first_frame_number, last_frame_number);
2994
2995 }
2996 pthread_mutex_unlock(&mMutex);
2997
2998 if (urgent_frame_number_valid || frame_number_valid) {
2999 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3000 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3001 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3002 urgentFrameNumDiff, last_urgent_frame_number);
3003 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3004 LOGE("frameNumDiff: %d frameNum: %d",
3005 frameNumDiff, last_frame_number);
3006 }
3007
3008 for (size_t i = 0; i < loopCount; i++) {
3009 /* handleMetadataWithLock is called even for invalid_metadata for
3010 * pipeline depth calculation */
3011 if (!invalid_metadata) {
3012 /* Infer frame number. Batch metadata contains frame number of the
3013 * last frame */
3014 if (urgent_frame_number_valid) {
3015 if (i < urgentFrameNumDiff) {
3016 urgent_frame_number =
3017 first_urgent_frame_number + i;
3018 LOGD("inferred urgent frame_number: %d",
3019 urgent_frame_number);
3020 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3021 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3022 } else {
3023 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3024 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3025 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3026 }
3027 }
3028
3029 /* Infer frame number. Batch metadata contains frame number of the
3030 * last frame */
3031 if (frame_number_valid) {
3032 if (i < frameNumDiff) {
3033 frame_number = first_frame_number + i;
3034 LOGD("inferred frame_number: %d", frame_number);
3035 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3036 CAM_INTF_META_FRAME_NUMBER, frame_number);
3037 } else {
3038 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3039 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3040 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3041 }
3042 }
3043
3044 if (last_frame_capture_time) {
3045 //Infer timestamp
3046 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003047 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003049 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003050 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3051 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3052 LOGD("batch capture_time: %lld, capture_time: %lld",
3053 last_frame_capture_time, capture_time);
3054 }
3055 }
3056 pthread_mutex_lock(&mMutex);
3057 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003058 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003059 (i == 0) /* first metadata in the batch metadata */,
3060 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003061 pthread_mutex_unlock(&mMutex);
3062 }
3063
3064 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003065 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mMetadataChannel->bufDone(metadata_buf);
3067 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003068 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003069 }
3070}
3071
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003072void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3073 camera3_error_msg_code_t errorCode)
3074{
3075 camera3_notify_msg_t notify_msg;
3076 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3077 notify_msg.type = CAMERA3_MSG_ERROR;
3078 notify_msg.message.error.error_code = errorCode;
3079 notify_msg.message.error.error_stream = NULL;
3080 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003081 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003082
3083 return;
3084}
Thierry Strudel3d639192016-09-09 11:52:26 -07003085/*===========================================================================
3086 * FUNCTION : handleMetadataWithLock
3087 *
3088 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3089 *
3090 * PARAMETERS : @metadata_buf: metadata buffer
3091 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3092 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003093 * @firstMetadataInBatch: Boolean to indicate whether this is the
3094 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003095 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3096 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003097 *
3098 * RETURN :
3099 *
3100 *==========================================================================*/
3101void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003102 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003103 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003104{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003105 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003106 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3107 //during flush do not send metadata from this thread
3108 LOGD("not sending metadata during flush or when mState is error");
3109 if (free_and_bufdone_meta_buf) {
3110 mMetadataChannel->bufDone(metadata_buf);
3111 free(metadata_buf);
3112 }
3113 return;
3114 }
3115
3116 //not in flush
3117 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3118 int32_t frame_number_valid, urgent_frame_number_valid;
3119 uint32_t frame_number, urgent_frame_number;
3120 int64_t capture_time;
3121 nsecs_t currentSysTime;
3122
3123 int32_t *p_frame_number_valid =
3124 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3125 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3126 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3127 int32_t *p_urgent_frame_number_valid =
3128 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3129 uint32_t *p_urgent_frame_number =
3130 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3131 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3132 metadata) {
3133 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3134 *p_frame_number_valid, *p_frame_number);
3135 }
3136
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003137 camera_metadata_t *resultMetadata = nullptr;
3138
Thierry Strudel3d639192016-09-09 11:52:26 -07003139 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3140 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3141 LOGE("Invalid metadata");
3142 if (free_and_bufdone_meta_buf) {
3143 mMetadataChannel->bufDone(metadata_buf);
3144 free(metadata_buf);
3145 }
3146 goto done_metadata;
3147 }
3148 frame_number_valid = *p_frame_number_valid;
3149 frame_number = *p_frame_number;
3150 capture_time = *p_capture_time;
3151 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3152 urgent_frame_number = *p_urgent_frame_number;
3153 currentSysTime = systemTime(CLOCK_MONOTONIC);
3154
3155 // Detect if buffers from any requests are overdue
3156 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003157 int64_t timeout;
3158 {
3159 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3160 // If there is a pending HDR+ request, the following requests may be blocked until the
3161 // HDR+ request is done. So allow a longer timeout.
3162 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3163 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3164 }
3165
3166 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003167 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003168 assert(missed.stream->priv);
3169 if (missed.stream->priv) {
3170 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3171 assert(ch->mStreams[0]);
3172 if (ch->mStreams[0]) {
3173 LOGE("Cancel missing frame = %d, buffer = %p,"
3174 "stream type = %d, stream format = %d",
3175 req.frame_number, missed.buffer,
3176 ch->mStreams[0]->getMyType(), missed.stream->format);
3177 ch->timeoutFrame(req.frame_number);
3178 }
3179 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003180 }
3181 }
3182 }
3183 //Partial result on process_capture_result for timestamp
3184 if (urgent_frame_number_valid) {
3185 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3186 urgent_frame_number, capture_time);
3187
3188 //Recieved an urgent Frame Number, handle it
3189 //using partial results
3190 for (pendingRequestIterator i =
3191 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3192 LOGD("Iterator Frame = %d urgent frame = %d",
3193 i->frame_number, urgent_frame_number);
3194
3195 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3196 (i->partial_result_cnt == 0)) {
3197 LOGE("Error: HAL missed urgent metadata for frame number %d",
3198 i->frame_number);
3199 }
3200
3201 if (i->frame_number == urgent_frame_number &&
3202 i->bUrgentReceived == 0) {
3203
3204 camera3_capture_result_t result;
3205 memset(&result, 0, sizeof(camera3_capture_result_t));
3206
3207 i->partial_result_cnt++;
3208 i->bUrgentReceived = 1;
3209 // Extract 3A metadata
3210 result.result =
3211 translateCbUrgentMetadataToResultMetadata(metadata);
3212 // Populate metadata result
3213 result.frame_number = urgent_frame_number;
3214 result.num_output_buffers = 0;
3215 result.output_buffers = NULL;
3216 result.partial_result = i->partial_result_cnt;
3217
Chien-Yu Chenee335912017-02-09 17:53:20 -08003218 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003219 // Notify HDR+ client about the partial metadata.
3220 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3221 result.partial_result == PARTIAL_RESULT_COUNT);
3222 }
3223
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003224 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003225 LOGD("urgent frame_number = %u, capture_time = %lld",
3226 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003227 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3228 // Instant AEC settled for this frame.
3229 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3230 mInstantAECSettledFrameNumber = urgent_frame_number;
3231 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003232 free_camera_metadata((camera_metadata_t *)result.result);
3233 break;
3234 }
3235 }
3236 }
3237
3238 if (!frame_number_valid) {
3239 LOGD("Not a valid normal frame number, used as SOF only");
3240 if (free_and_bufdone_meta_buf) {
3241 mMetadataChannel->bufDone(metadata_buf);
3242 free(metadata_buf);
3243 }
3244 goto done_metadata;
3245 }
3246 LOGH("valid frame_number = %u, capture_time = %lld",
3247 frame_number, capture_time);
3248
Emilian Peev7650c122017-01-19 08:24:33 -08003249 if (metadata->is_depth_data_valid) {
3250 handleDepthDataLocked(metadata->depth_data, frame_number);
3251 }
3252
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003253 // Check whether any stream buffer corresponding to this is dropped or not
3254 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3255 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3256 for (auto & pendingRequest : mPendingRequestsList) {
3257 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3258 mInstantAECSettledFrameNumber)) {
3259 camera3_notify_msg_t notify_msg = {};
3260 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003261 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003262 QCamera3ProcessingChannel *channel =
3263 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003264 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003265 if (p_cam_frame_drop) {
3266 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003267 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003268 // Got the stream ID for drop frame.
3269 dropFrame = true;
3270 break;
3271 }
3272 }
3273 } else {
3274 // This is instant AEC case.
3275 // For instant AEC drop the stream untill AEC is settled.
3276 dropFrame = true;
3277 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003278
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003279 if (dropFrame) {
3280 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3281 if (p_cam_frame_drop) {
3282 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003283 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003284 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003285 } else {
3286 // For instant AEC, inform frame drop and frame number
3287 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3288 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003289 pendingRequest.frame_number, streamID,
3290 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003291 }
3292 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003293 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003294 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003295 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003296 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003297 if (p_cam_frame_drop) {
3298 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003299 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003300 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003301 } else {
3302 // For instant AEC, inform frame drop and frame number
3303 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3304 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003305 pendingRequest.frame_number, streamID,
3306 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003307 }
3308 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003309 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003310 PendingFrameDrop.stream_ID = streamID;
3311 // Add the Frame drop info to mPendingFrameDropList
3312 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003313 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003314 }
3315 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003316 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003317
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003318 for (auto & pendingRequest : mPendingRequestsList) {
3319 // Find the pending request with the frame number.
3320 if (pendingRequest.frame_number == frame_number) {
3321 // Update the sensor timestamp.
3322 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003323
Thierry Strudel3d639192016-09-09 11:52:26 -07003324
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003325 /* Set the timestamp in display metadata so that clients aware of
3326 private_handle such as VT can use this un-modified timestamps.
3327 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003328 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003329
Thierry Strudel3d639192016-09-09 11:52:26 -07003330 // Find channel requiring metadata, meaning internal offline postprocess
3331 // is needed.
3332 //TODO: for now, we don't support two streams requiring metadata at the same time.
3333 // (because we are not making copies, and metadata buffer is not reference counted.
3334 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003335 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3336 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003337 if (iter->need_metadata) {
3338 internalPproc = true;
3339 QCamera3ProcessingChannel *channel =
3340 (QCamera3ProcessingChannel *)iter->stream->priv;
3341 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003342 if(p_is_metabuf_queued != NULL) {
3343 *p_is_metabuf_queued = true;
3344 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003345 break;
3346 }
3347 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003348 for (auto itr = pendingRequest.internalRequestList.begin();
3349 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003350 if (itr->need_metadata) {
3351 internalPproc = true;
3352 QCamera3ProcessingChannel *channel =
3353 (QCamera3ProcessingChannel *)itr->stream->priv;
3354 channel->queueReprocMetadata(metadata_buf);
3355 break;
3356 }
3357 }
3358
Thierry Strudel54dc9782017-02-15 12:12:10 -08003359 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003360 resultMetadata = translateFromHalMetadata(metadata,
3361 pendingRequest.timestamp, pendingRequest.request_id,
3362 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3363 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003364 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003365 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003366 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003367 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003368 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003369 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003370
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003371 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003372
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003373 if (pendingRequest.blob_request) {
3374 //Dump tuning metadata if enabled and available
3375 char prop[PROPERTY_VALUE_MAX];
3376 memset(prop, 0, sizeof(prop));
3377 property_get("persist.camera.dumpmetadata", prop, "0");
3378 int32_t enabled = atoi(prop);
3379 if (enabled && metadata->is_tuning_params_valid) {
3380 dumpMetadataToFile(metadata->tuning_params,
3381 mMetaFrameCount,
3382 enabled,
3383 "Snapshot",
3384 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 }
3386 }
3387
3388 if (!internalPproc) {
3389 LOGD("couldn't find need_metadata for this metadata");
3390 // Return metadata buffer
3391 if (free_and_bufdone_meta_buf) {
3392 mMetadataChannel->bufDone(metadata_buf);
3393 free(metadata_buf);
3394 }
3395 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003396
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003397 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 }
3399 }
3400
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003401 // Try to send out shutter callbacks and capture results.
3402 handlePendingResultsWithLock(frame_number, resultMetadata);
3403 return;
3404
Thierry Strudel3d639192016-09-09 11:52:26 -07003405done_metadata:
3406 for (pendingRequestIterator i = mPendingRequestsList.begin();
3407 i != mPendingRequestsList.end() ;i++) {
3408 i->pipeline_depth++;
3409 }
3410 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3411 unblockRequestIfNecessary();
3412}
3413
3414/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003415 * FUNCTION : handleDepthDataWithLock
3416 *
3417 * DESCRIPTION: Handles incoming depth data
3418 *
3419 * PARAMETERS : @depthData : Depth data
3420 * @frameNumber: Frame number of the incoming depth data
3421 *
3422 * RETURN :
3423 *
3424 *==========================================================================*/
3425void QCamera3HardwareInterface::handleDepthDataLocked(
3426 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3427 uint32_t currentFrameNumber;
3428 buffer_handle_t *depthBuffer;
3429
3430 if (nullptr == mDepthChannel) {
3431 LOGE("Depth channel not present!");
3432 return;
3433 }
3434
3435 camera3_stream_buffer_t resultBuffer =
3436 {.acquire_fence = -1,
3437 .release_fence = -1,
3438 .status = CAMERA3_BUFFER_STATUS_OK,
3439 .buffer = nullptr,
3440 .stream = mDepthChannel->getStream()};
3441 camera3_capture_result_t result =
3442 {.result = nullptr,
3443 .num_output_buffers = 1,
3444 .output_buffers = &resultBuffer,
3445 .partial_result = 0,
3446 .frame_number = 0};
3447
3448 do {
3449 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3450 if (nullptr == depthBuffer) {
3451 break;
3452 }
3453
3454 result.frame_number = currentFrameNumber;
3455 resultBuffer.buffer = depthBuffer;
3456 if (currentFrameNumber == frameNumber) {
3457 int32_t rc = mDepthChannel->populateDepthData(depthData,
3458 frameNumber);
3459 if (NO_ERROR != rc) {
3460 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3461 } else {
3462 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3463 }
3464 } else if (currentFrameNumber > frameNumber) {
3465 break;
3466 } else {
3467 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3468 {{currentFrameNumber, mDepthChannel->getStream(),
3469 CAMERA3_MSG_ERROR_BUFFER}}};
3470 orchestrateNotify(&notify_msg);
3471
3472 LOGE("Depth buffer for frame number: %d is missing "
3473 "returning back!", currentFrameNumber);
3474 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3475 }
3476 mDepthChannel->unmapBuffer(currentFrameNumber);
3477
3478 orchestrateResult(&result);
3479 } while (currentFrameNumber < frameNumber);
3480}
3481
3482/*===========================================================================
3483 * FUNCTION : notifyErrorFoPendingDepthData
3484 *
3485 * DESCRIPTION: Returns error for any pending depth buffers
3486 *
3487 * PARAMETERS : depthCh - depth channel that needs to get flushed
3488 *
3489 * RETURN :
3490 *
3491 *==========================================================================*/
3492void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3493 QCamera3DepthChannel *depthCh) {
3494 uint32_t currentFrameNumber;
3495 buffer_handle_t *depthBuffer;
3496
3497 if (nullptr == depthCh) {
3498 return;
3499 }
3500
3501 camera3_notify_msg_t notify_msg =
3502 {.type = CAMERA3_MSG_ERROR,
3503 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3504 camera3_stream_buffer_t resultBuffer =
3505 {.acquire_fence = -1,
3506 .release_fence = -1,
3507 .buffer = nullptr,
3508 .stream = depthCh->getStream(),
3509 .status = CAMERA3_BUFFER_STATUS_ERROR};
3510 camera3_capture_result_t result =
3511 {.result = nullptr,
3512 .frame_number = 0,
3513 .num_output_buffers = 1,
3514 .partial_result = 0,
3515 .output_buffers = &resultBuffer};
3516
3517 while (nullptr !=
3518 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3519 depthCh->unmapBuffer(currentFrameNumber);
3520
3521 notify_msg.message.error.frame_number = currentFrameNumber;
3522 orchestrateNotify(&notify_msg);
3523
3524 resultBuffer.buffer = depthBuffer;
3525 result.frame_number = currentFrameNumber;
3526 orchestrateResult(&result);
3527 };
3528}
3529
3530/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003531 * FUNCTION : hdrPlusPerfLock
3532 *
3533 * DESCRIPTION: perf lock for HDR+ using custom intent
3534 *
3535 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3536 *
3537 * RETURN : None
3538 *
3539 *==========================================================================*/
3540void QCamera3HardwareInterface::hdrPlusPerfLock(
3541 mm_camera_super_buf_t *metadata_buf)
3542{
3543 if (NULL == metadata_buf) {
3544 LOGE("metadata_buf is NULL");
3545 return;
3546 }
3547 metadata_buffer_t *metadata =
3548 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3549 int32_t *p_frame_number_valid =
3550 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3551 uint32_t *p_frame_number =
3552 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3553
3554 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3555 LOGE("%s: Invalid metadata", __func__);
3556 return;
3557 }
3558
3559 //acquire perf lock for 5 sec after the last HDR frame is captured
3560 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3561 if ((p_frame_number != NULL) &&
3562 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003563 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003564 }
3565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003566}
3567
3568/*===========================================================================
3569 * FUNCTION : handleInputBufferWithLock
3570 *
3571 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3572 *
3573 * PARAMETERS : @frame_number: frame number of the input buffer
3574 *
3575 * RETURN :
3576 *
3577 *==========================================================================*/
3578void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3579{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003580 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003581 pendingRequestIterator i = mPendingRequestsList.begin();
3582 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3583 i++;
3584 }
3585 if (i != mPendingRequestsList.end() && i->input_buffer) {
3586 //found the right request
3587 if (!i->shutter_notified) {
3588 CameraMetadata settings;
3589 camera3_notify_msg_t notify_msg;
3590 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3591 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3592 if(i->settings) {
3593 settings = i->settings;
3594 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3595 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3596 } else {
3597 LOGE("No timestamp in input settings! Using current one.");
3598 }
3599 } else {
3600 LOGE("Input settings missing!");
3601 }
3602
3603 notify_msg.type = CAMERA3_MSG_SHUTTER;
3604 notify_msg.message.shutter.frame_number = frame_number;
3605 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003606 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003607 i->shutter_notified = true;
3608 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3609 i->frame_number, notify_msg.message.shutter.timestamp);
3610 }
3611
3612 if (i->input_buffer->release_fence != -1) {
3613 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3614 close(i->input_buffer->release_fence);
3615 if (rc != OK) {
3616 LOGE("input buffer sync wait failed %d", rc);
3617 }
3618 }
3619
3620 camera3_capture_result result;
3621 memset(&result, 0, sizeof(camera3_capture_result));
3622 result.frame_number = frame_number;
3623 result.result = i->settings;
3624 result.input_buffer = i->input_buffer;
3625 result.partial_result = PARTIAL_RESULT_COUNT;
3626
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003627 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003628 LOGD("Input request metadata and input buffer frame_number = %u",
3629 i->frame_number);
3630 i = erasePendingRequest(i);
3631 } else {
3632 LOGE("Could not find input request for frame number %d", frame_number);
3633 }
3634}
3635
3636/*===========================================================================
3637 * FUNCTION : handleBufferWithLock
3638 *
3639 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3640 *
3641 * PARAMETERS : @buffer: image buffer for the callback
3642 * @frame_number: frame number of the image buffer
3643 *
3644 * RETURN :
3645 *
3646 *==========================================================================*/
3647void QCamera3HardwareInterface::handleBufferWithLock(
3648 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3649{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003650 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003651
3652 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3653 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3654 }
3655
Thierry Strudel3d639192016-09-09 11:52:26 -07003656 /* Nothing to be done during error state */
3657 if ((ERROR == mState) || (DEINIT == mState)) {
3658 return;
3659 }
3660 if (mFlushPerf) {
3661 handleBuffersDuringFlushLock(buffer);
3662 return;
3663 }
3664 //not in flush
3665 // If the frame number doesn't exist in the pending request list,
3666 // directly send the buffer to the frameworks, and update pending buffers map
3667 // Otherwise, book-keep the buffer.
3668 pendingRequestIterator i = mPendingRequestsList.begin();
3669 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3670 i++;
3671 }
3672 if (i == mPendingRequestsList.end()) {
3673 // Verify all pending requests frame_numbers are greater
3674 for (pendingRequestIterator j = mPendingRequestsList.begin();
3675 j != mPendingRequestsList.end(); j++) {
3676 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3677 LOGW("Error: pending live frame number %d is smaller than %d",
3678 j->frame_number, frame_number);
3679 }
3680 }
3681 camera3_capture_result_t result;
3682 memset(&result, 0, sizeof(camera3_capture_result_t));
3683 result.result = NULL;
3684 result.frame_number = frame_number;
3685 result.num_output_buffers = 1;
3686 result.partial_result = 0;
3687 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3688 m != mPendingFrameDropList.end(); m++) {
3689 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3690 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3691 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3692 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3693 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3694 frame_number, streamID);
3695 m = mPendingFrameDropList.erase(m);
3696 break;
3697 }
3698 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003699 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003700 result.output_buffers = buffer;
3701 LOGH("result frame_number = %d, buffer = %p",
3702 frame_number, buffer->buffer);
3703
3704 mPendingBuffersMap.removeBuf(buffer->buffer);
3705
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003706 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003707 } else {
3708 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003709 if (i->input_buffer->release_fence != -1) {
3710 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3711 close(i->input_buffer->release_fence);
3712 if (rc != OK) {
3713 LOGE("input buffer sync wait failed %d", rc);
3714 }
3715 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003717
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003718 // Put buffer into the pending request
3719 for (auto &requestedBuffer : i->buffers) {
3720 if (requestedBuffer.stream == buffer->stream) {
3721 if (requestedBuffer.buffer != nullptr) {
3722 LOGE("Error: buffer is already set");
3723 } else {
3724 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3725 sizeof(camera3_stream_buffer_t));
3726 *(requestedBuffer.buffer) = *buffer;
3727 LOGH("cache buffer %p at result frame_number %u",
3728 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003729 }
3730 }
3731 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732
3733 if (i->input_buffer) {
3734 // For a reprocessing request, try to send out shutter callback and result metadata.
3735 handlePendingResultsWithLock(frame_number, nullptr);
3736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003738
3739 if (mPreviewStarted == false) {
3740 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3741 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3742 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3743 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3744 mPreviewStarted = true;
3745
3746 // Set power hint for preview
3747 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3748 }
3749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003750}
3751
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003752void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3753 const camera_metadata_t *resultMetadata)
3754{
3755 // Find the pending request for this result metadata.
3756 auto requestIter = mPendingRequestsList.begin();
3757 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3758 requestIter++;
3759 }
3760
3761 if (requestIter == mPendingRequestsList.end()) {
3762 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3763 return;
3764 }
3765
3766 // Update the result metadata
3767 requestIter->resultMetadata = resultMetadata;
3768
3769 // Check what type of request this is.
3770 bool liveRequest = false;
3771 if (requestIter->hdrplus) {
3772 // HDR+ request doesn't have partial results.
3773 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3774 } else if (requestIter->input_buffer != nullptr) {
3775 // Reprocessing request result is the same as settings.
3776 requestIter->resultMetadata = requestIter->settings;
3777 // Reprocessing request doesn't have partial results.
3778 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3779 } else {
3780 liveRequest = true;
3781 requestIter->partial_result_cnt++;
3782 mPendingLiveRequest--;
3783
3784 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chenee335912017-02-09 17:53:20 -08003785 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3787 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3788 }
3789 }
3790
3791 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3792 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3793 bool readyToSend = true;
3794
3795 // Iterate through the pending requests to send out shutter callbacks and results that are
3796 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3797 // live requests that don't have result metadata yet.
3798 auto iter = mPendingRequestsList.begin();
3799 while (iter != mPendingRequestsList.end()) {
3800 // Check if current pending request is ready. If it's not ready, the following pending
3801 // requests are also not ready.
3802 if (readyToSend && iter->resultMetadata == nullptr) {
3803 readyToSend = false;
3804 }
3805
3806 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3807
3808 std::vector<camera3_stream_buffer_t> outputBuffers;
3809
3810 camera3_capture_result_t result = {};
3811 result.frame_number = iter->frame_number;
3812 result.result = iter->resultMetadata;
3813 result.partial_result = iter->partial_result_cnt;
3814
3815 // If this pending buffer has result metadata, we may be able to send out shutter callback
3816 // and result metadata.
3817 if (iter->resultMetadata != nullptr) {
3818 if (!readyToSend) {
3819 // If any of the previous pending request is not ready, this pending request is
3820 // also not ready to send in order to keep shutter callbacks and result metadata
3821 // in order.
3822 iter++;
3823 continue;
3824 }
3825
3826 // Invoke shutter callback if not yet.
3827 if (!iter->shutter_notified) {
3828 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3829
3830 // Find the timestamp in HDR+ result metadata
3831 camera_metadata_ro_entry_t entry;
3832 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3833 ANDROID_SENSOR_TIMESTAMP, &entry);
3834 if (res != OK) {
3835 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3836 __FUNCTION__, iter->frame_number, strerror(-res), res);
3837 } else {
3838 timestamp = entry.data.i64[0];
3839 }
3840
3841 camera3_notify_msg_t notify_msg = {};
3842 notify_msg.type = CAMERA3_MSG_SHUTTER;
3843 notify_msg.message.shutter.frame_number = iter->frame_number;
3844 notify_msg.message.shutter.timestamp = timestamp;
3845 orchestrateNotify(&notify_msg);
3846 iter->shutter_notified = true;
3847 }
3848
3849 result.input_buffer = iter->input_buffer;
3850
3851 // Prepare output buffer array
3852 for (auto bufferInfoIter = iter->buffers.begin();
3853 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3854 if (bufferInfoIter->buffer != nullptr) {
3855
3856 QCamera3Channel *channel =
3857 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3858 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3859
3860 // Check if this buffer is a dropped frame.
3861 auto frameDropIter = mPendingFrameDropList.begin();
3862 while (frameDropIter != mPendingFrameDropList.end()) {
3863 if((frameDropIter->stream_ID == streamID) &&
3864 (frameDropIter->frame_number == frameNumber)) {
3865 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3866 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3867 streamID);
3868 mPendingFrameDropList.erase(frameDropIter);
3869 break;
3870 } else {
3871 frameDropIter++;
3872 }
3873 }
3874
3875 // Check buffer error status
3876 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3877 bufferInfoIter->buffer->buffer);
3878 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3879
3880 outputBuffers.push_back(*(bufferInfoIter->buffer));
3881 free(bufferInfoIter->buffer);
3882 bufferInfoIter->buffer = NULL;
3883 }
3884 }
3885
3886 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3887 result.num_output_buffers = outputBuffers.size();
3888 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3889 // If the result metadata belongs to a live request, notify errors for previous pending
3890 // live requests.
3891 mPendingLiveRequest--;
3892
3893 CameraMetadata dummyMetadata;
3894 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3895 result.result = dummyMetadata.release();
3896
3897 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3898 } else {
3899 iter++;
3900 continue;
3901 }
3902
3903 orchestrateResult(&result);
3904
3905 // For reprocessing, result metadata is the same as settings so do not free it here to
3906 // avoid double free.
3907 if (result.result != iter->settings) {
3908 free_camera_metadata((camera_metadata_t *)result.result);
3909 }
3910 iter->resultMetadata = nullptr;
3911 iter = erasePendingRequest(iter);
3912 }
3913
3914 if (liveRequest) {
3915 for (auto &iter : mPendingRequestsList) {
3916 // Increment pipeline depth for the following pending requests.
3917 if (iter.frame_number > frameNumber) {
3918 iter.pipeline_depth++;
3919 }
3920 }
3921 }
3922
3923 unblockRequestIfNecessary();
3924}
3925
Thierry Strudel3d639192016-09-09 11:52:26 -07003926/*===========================================================================
3927 * FUNCTION : unblockRequestIfNecessary
3928 *
3929 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3930 * that mMutex is held when this function is called.
3931 *
3932 * PARAMETERS :
3933 *
3934 * RETURN :
3935 *
3936 *==========================================================================*/
3937void QCamera3HardwareInterface::unblockRequestIfNecessary()
3938{
3939 // Unblock process_capture_request
3940 pthread_cond_signal(&mRequestCond);
3941}
3942
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003943/*===========================================================================
3944 * FUNCTION : isHdrSnapshotRequest
3945 *
3946 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3947 *
3948 * PARAMETERS : camera3 request structure
3949 *
3950 * RETURN : boolean decision variable
3951 *
3952 *==========================================================================*/
3953bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3954{
3955 if (request == NULL) {
3956 LOGE("Invalid request handle");
3957 assert(0);
3958 return false;
3959 }
3960
3961 if (!mForceHdrSnapshot) {
3962 CameraMetadata frame_settings;
3963 frame_settings = request->settings;
3964
3965 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3966 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3967 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3968 return false;
3969 }
3970 } else {
3971 return false;
3972 }
3973
3974 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3975 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3976 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3977 return false;
3978 }
3979 } else {
3980 return false;
3981 }
3982 }
3983
3984 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3985 if (request->output_buffers[i].stream->format
3986 == HAL_PIXEL_FORMAT_BLOB) {
3987 return true;
3988 }
3989 }
3990
3991 return false;
3992}
3993/*===========================================================================
3994 * FUNCTION : orchestrateRequest
3995 *
3996 * DESCRIPTION: Orchestrates a capture request from camera service
3997 *
3998 * PARAMETERS :
3999 * @request : request from framework to process
4000 *
4001 * RETURN : Error status codes
4002 *
4003 *==========================================================================*/
4004int32_t QCamera3HardwareInterface::orchestrateRequest(
4005 camera3_capture_request_t *request)
4006{
4007
4008 uint32_t originalFrameNumber = request->frame_number;
4009 uint32_t originalOutputCount = request->num_output_buffers;
4010 const camera_metadata_t *original_settings = request->settings;
4011 List<InternalRequest> internallyRequestedStreams;
4012 List<InternalRequest> emptyInternalList;
4013
4014 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4015 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4016 uint32_t internalFrameNumber;
4017 CameraMetadata modified_meta;
4018
4019
4020 /* Add Blob channel to list of internally requested streams */
4021 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4022 if (request->output_buffers[i].stream->format
4023 == HAL_PIXEL_FORMAT_BLOB) {
4024 InternalRequest streamRequested;
4025 streamRequested.meteringOnly = 1;
4026 streamRequested.need_metadata = 0;
4027 streamRequested.stream = request->output_buffers[i].stream;
4028 internallyRequestedStreams.push_back(streamRequested);
4029 }
4030 }
4031 request->num_output_buffers = 0;
4032 auto itr = internallyRequestedStreams.begin();
4033
4034 /* Modify setting to set compensation */
4035 modified_meta = request->settings;
4036 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4037 uint8_t aeLock = 1;
4038 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4039 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4040 camera_metadata_t *modified_settings = modified_meta.release();
4041 request->settings = modified_settings;
4042
4043 /* Capture Settling & -2x frame */
4044 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4045 request->frame_number = internalFrameNumber;
4046 processCaptureRequest(request, internallyRequestedStreams);
4047
4048 request->num_output_buffers = originalOutputCount;
4049 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4050 request->frame_number = internalFrameNumber;
4051 processCaptureRequest(request, emptyInternalList);
4052 request->num_output_buffers = 0;
4053
4054 modified_meta = modified_settings;
4055 expCompensation = 0;
4056 aeLock = 1;
4057 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4058 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4059 modified_settings = modified_meta.release();
4060 request->settings = modified_settings;
4061
4062 /* Capture Settling & 0X frame */
4063
4064 itr = internallyRequestedStreams.begin();
4065 if (itr == internallyRequestedStreams.end()) {
4066 LOGE("Error Internally Requested Stream list is empty");
4067 assert(0);
4068 } else {
4069 itr->need_metadata = 0;
4070 itr->meteringOnly = 1;
4071 }
4072
4073 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4074 request->frame_number = internalFrameNumber;
4075 processCaptureRequest(request, internallyRequestedStreams);
4076
4077 itr = internallyRequestedStreams.begin();
4078 if (itr == internallyRequestedStreams.end()) {
4079 ALOGE("Error Internally Requested Stream list is empty");
4080 assert(0);
4081 } else {
4082 itr->need_metadata = 1;
4083 itr->meteringOnly = 0;
4084 }
4085
4086 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4087 request->frame_number = internalFrameNumber;
4088 processCaptureRequest(request, internallyRequestedStreams);
4089
4090 /* Capture 2X frame*/
4091 modified_meta = modified_settings;
4092 expCompensation = GB_HDR_2X_STEP_EV;
4093 aeLock = 1;
4094 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4095 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4096 modified_settings = modified_meta.release();
4097 request->settings = modified_settings;
4098
4099 itr = internallyRequestedStreams.begin();
4100 if (itr == internallyRequestedStreams.end()) {
4101 ALOGE("Error Internally Requested Stream list is empty");
4102 assert(0);
4103 } else {
4104 itr->need_metadata = 0;
4105 itr->meteringOnly = 1;
4106 }
4107 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4108 request->frame_number = internalFrameNumber;
4109 processCaptureRequest(request, internallyRequestedStreams);
4110
4111 itr = internallyRequestedStreams.begin();
4112 if (itr == internallyRequestedStreams.end()) {
4113 ALOGE("Error Internally Requested Stream list is empty");
4114 assert(0);
4115 } else {
4116 itr->need_metadata = 1;
4117 itr->meteringOnly = 0;
4118 }
4119
4120 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4121 request->frame_number = internalFrameNumber;
4122 processCaptureRequest(request, internallyRequestedStreams);
4123
4124
4125 /* Capture 2X on original streaming config*/
4126 internallyRequestedStreams.clear();
4127
4128 /* Restore original settings pointer */
4129 request->settings = original_settings;
4130 } else {
4131 uint32_t internalFrameNumber;
4132 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4133 request->frame_number = internalFrameNumber;
4134 return processCaptureRequest(request, internallyRequestedStreams);
4135 }
4136
4137 return NO_ERROR;
4138}
4139
4140/*===========================================================================
4141 * FUNCTION : orchestrateResult
4142 *
4143 * DESCRIPTION: Orchestrates a capture result to camera service
4144 *
4145 * PARAMETERS :
4146 * @request : request from framework to process
4147 *
4148 * RETURN :
4149 *
4150 *==========================================================================*/
4151void QCamera3HardwareInterface::orchestrateResult(
4152 camera3_capture_result_t *result)
4153{
4154 uint32_t frameworkFrameNumber;
4155 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4156 frameworkFrameNumber);
4157 if (rc != NO_ERROR) {
4158 LOGE("Cannot find translated frameworkFrameNumber");
4159 assert(0);
4160 } else {
4161 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004162 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004163 } else {
4164 result->frame_number = frameworkFrameNumber;
4165 mCallbackOps->process_capture_result(mCallbackOps, result);
4166 }
4167 }
4168}
4169
4170/*===========================================================================
4171 * FUNCTION : orchestrateNotify
4172 *
4173 * DESCRIPTION: Orchestrates a notify to camera service
4174 *
4175 * PARAMETERS :
4176 * @request : request from framework to process
4177 *
4178 * RETURN :
4179 *
4180 *==========================================================================*/
4181void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4182{
4183 uint32_t frameworkFrameNumber;
4184 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
4185 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
4186 frameworkFrameNumber);
4187 if (rc != NO_ERROR) {
4188 LOGE("Cannot find translated frameworkFrameNumber");
4189 assert(0);
4190 } else {
4191 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004192 LOGD("Internal Request drop the notifyCb");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004193 } else {
4194 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4195 mCallbackOps->notify(mCallbackOps, notify_msg);
4196 }
4197 }
4198}
4199
4200/*===========================================================================
4201 * FUNCTION : FrameNumberRegistry
4202 *
4203 * DESCRIPTION: Constructor
4204 *
4205 * PARAMETERS :
4206 *
4207 * RETURN :
4208 *
4209 *==========================================================================*/
4210FrameNumberRegistry::FrameNumberRegistry()
4211{
4212 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4213}
4214
4215/*===========================================================================
4216 * FUNCTION : ~FrameNumberRegistry
4217 *
4218 * DESCRIPTION: Destructor
4219 *
4220 * PARAMETERS :
4221 *
4222 * RETURN :
4223 *
4224 *==========================================================================*/
4225FrameNumberRegistry::~FrameNumberRegistry()
4226{
4227}
4228
4229/*===========================================================================
4230 * FUNCTION : PurgeOldEntriesLocked
4231 *
4232 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4233 *
4234 * PARAMETERS :
4235 *
4236 * RETURN : NONE
4237 *
4238 *==========================================================================*/
4239void FrameNumberRegistry::purgeOldEntriesLocked()
4240{
4241 while (_register.begin() != _register.end()) {
4242 auto itr = _register.begin();
4243 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4244 _register.erase(itr);
4245 } else {
4246 return;
4247 }
4248 }
4249}
4250
4251/*===========================================================================
4252 * FUNCTION : allocStoreInternalFrameNumber
4253 *
4254 * DESCRIPTION: Method to note down a framework request and associate a new
4255 * internal request number against it
4256 *
4257 * PARAMETERS :
4258 * @fFrameNumber: Identifier given by framework
4259 * @internalFN : Output parameter which will have the newly generated internal
4260 * entry
4261 *
4262 * RETURN : Error code
4263 *
4264 *==========================================================================*/
4265int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4266 uint32_t &internalFrameNumber)
4267{
4268 Mutex::Autolock lock(mRegistryLock);
4269 internalFrameNumber = _nextFreeInternalNumber++;
4270 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4271 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4272 purgeOldEntriesLocked();
4273 return NO_ERROR;
4274}
4275
4276/*===========================================================================
4277 * FUNCTION : generateStoreInternalFrameNumber
4278 *
4279 * DESCRIPTION: Method to associate a new internal request number independent
4280 * of any associate with framework requests
4281 *
4282 * PARAMETERS :
4283 * @internalFrame#: Output parameter which will have the newly generated internal
4284 *
4285 *
4286 * RETURN : Error code
4287 *
4288 *==========================================================================*/
4289int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4290{
4291 Mutex::Autolock lock(mRegistryLock);
4292 internalFrameNumber = _nextFreeInternalNumber++;
4293 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4294 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4295 purgeOldEntriesLocked();
4296 return NO_ERROR;
4297}
4298
4299/*===========================================================================
4300 * FUNCTION : getFrameworkFrameNumber
4301 *
4302 * DESCRIPTION: Method to query the framework framenumber given an internal #
4303 *
4304 * PARAMETERS :
4305 * @internalFrame#: Internal reference
4306 * @frameworkframenumber: Output parameter holding framework frame entry
4307 *
4308 * RETURN : Error code
4309 *
4310 *==========================================================================*/
4311int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4312 uint32_t &frameworkFrameNumber)
4313{
4314 Mutex::Autolock lock(mRegistryLock);
4315 auto itr = _register.find(internalFrameNumber);
4316 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004317 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004318 return -ENOENT;
4319 }
4320
4321 frameworkFrameNumber = itr->second;
4322 purgeOldEntriesLocked();
4323 return NO_ERROR;
4324}
Thierry Strudel3d639192016-09-09 11:52:26 -07004325
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004326status_t QCamera3HardwareInterface::fillPbStreamConfig(
4327 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4328 QCamera3Channel *channel, uint32_t streamIndex) {
4329 if (config == nullptr) {
4330 LOGE("%s: config is null", __FUNCTION__);
4331 return BAD_VALUE;
4332 }
4333
4334 if (channel == nullptr) {
4335 LOGE("%s: channel is null", __FUNCTION__);
4336 return BAD_VALUE;
4337 }
4338
4339 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4340 if (stream == nullptr) {
4341 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4342 return NAME_NOT_FOUND;
4343 }
4344
4345 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4346 if (streamInfo == nullptr) {
4347 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4348 return NAME_NOT_FOUND;
4349 }
4350
4351 config->id = pbStreamId;
4352 config->image.width = streamInfo->dim.width;
4353 config->image.height = streamInfo->dim.height;
4354 config->image.padding = 0;
4355 config->image.format = pbStreamFormat;
4356
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004357 uint32_t totalPlaneSize = 0;
4358
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004359 // Fill plane information.
4360 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4361 pbcamera::PlaneConfiguration plane;
4362 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4363 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4364 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004365
4366 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004367 }
4368
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004369 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004370 return OK;
4371}
4372
Thierry Strudel3d639192016-09-09 11:52:26 -07004373/*===========================================================================
4374 * FUNCTION : processCaptureRequest
4375 *
4376 * DESCRIPTION: process a capture request from camera service
4377 *
4378 * PARAMETERS :
4379 * @request : request from framework to process
4380 *
4381 * RETURN :
4382 *
4383 *==========================================================================*/
4384int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004385 camera3_capture_request_t *request,
4386 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004387{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004388 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004389 int rc = NO_ERROR;
4390 int32_t request_id;
4391 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004392 bool isVidBufRequested = false;
4393 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004394 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004395
4396 pthread_mutex_lock(&mMutex);
4397
4398 // Validate current state
4399 switch (mState) {
4400 case CONFIGURED:
4401 case STARTED:
4402 /* valid state */
4403 break;
4404
4405 case ERROR:
4406 pthread_mutex_unlock(&mMutex);
4407 handleCameraDeviceError();
4408 return -ENODEV;
4409
4410 default:
4411 LOGE("Invalid state %d", mState);
4412 pthread_mutex_unlock(&mMutex);
4413 return -ENODEV;
4414 }
4415
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004416 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004417 if (rc != NO_ERROR) {
4418 LOGE("incoming request is not valid");
4419 pthread_mutex_unlock(&mMutex);
4420 return rc;
4421 }
4422
4423 meta = request->settings;
4424
4425 // For first capture request, send capture intent, and
4426 // stream on all streams
4427 if (mState == CONFIGURED) {
4428 // send an unconfigure to the backend so that the isp
4429 // resources are deallocated
4430 if (!mFirstConfiguration) {
4431 cam_stream_size_info_t stream_config_info;
4432 int32_t hal_version = CAM_HAL_V3;
4433 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4434 stream_config_info.buffer_info.min_buffers =
4435 MIN_INFLIGHT_REQUESTS;
4436 stream_config_info.buffer_info.max_buffers =
4437 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4438 clear_metadata_buffer(mParameters);
4439 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4440 CAM_INTF_PARM_HAL_VERSION, hal_version);
4441 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4442 CAM_INTF_META_STREAM_INFO, stream_config_info);
4443 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4444 mParameters);
4445 if (rc < 0) {
4446 LOGE("set_parms for unconfigure failed");
4447 pthread_mutex_unlock(&mMutex);
4448 return rc;
4449 }
4450 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004451 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004452 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004453 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004454 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004455 property_get("persist.camera.is_type", is_type_value, "4");
4456 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4457 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4458 property_get("persist.camera.is_type_preview", is_type_value, "4");
4459 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4460 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004461
4462 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4463 int32_t hal_version = CAM_HAL_V3;
4464 uint8_t captureIntent =
4465 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4466 mCaptureIntent = captureIntent;
4467 clear_metadata_buffer(mParameters);
4468 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4469 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4470 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004471 if (mFirstConfiguration) {
4472 // configure instant AEC
4473 // Instant AEC is a session based parameter and it is needed only
4474 // once per complete session after open camera.
4475 // i.e. This is set only once for the first capture request, after open camera.
4476 setInstantAEC(meta);
4477 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004478 uint8_t fwkVideoStabMode=0;
4479 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4480 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4481 }
4482
4483 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4484 // turn it on for video/preview
4485 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4486 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004487 int32_t vsMode;
4488 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4490 rc = BAD_VALUE;
4491 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004492 LOGD("setEis %d", setEis);
4493 bool eis3Supported = false;
4494 size_t count = IS_TYPE_MAX;
4495 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4496 for (size_t i = 0; i < count; i++) {
4497 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4498 eis3Supported = true;
4499 break;
4500 }
4501 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004502
4503 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004504 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004505 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4506 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004507 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4508 is_type = isTypePreview;
4509 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4510 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4511 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004512 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004513 } else {
4514 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004515 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004516 } else {
4517 is_type = IS_TYPE_NONE;
4518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004519 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004520 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004521 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4522 }
4523 }
4524
4525 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4526 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4527
Thierry Strudel54dc9782017-02-15 12:12:10 -08004528 //Disable tintless only if the property is set to 0
4529 memset(prop, 0, sizeof(prop));
4530 property_get("persist.camera.tintless.enable", prop, "1");
4531 int32_t tintless_value = atoi(prop);
4532
Thierry Strudel3d639192016-09-09 11:52:26 -07004533 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4534 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004535
Thierry Strudel3d639192016-09-09 11:52:26 -07004536 //Disable CDS for HFR mode or if DIS/EIS is on.
4537 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4538 //after every configure_stream
4539 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4540 (m_bIsVideo)) {
4541 int32_t cds = CAM_CDS_MODE_OFF;
4542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4543 CAM_INTF_PARM_CDS_MODE, cds))
4544 LOGE("Failed to disable CDS for HFR mode");
4545
4546 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004547
4548 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4549 uint8_t* use_av_timer = NULL;
4550
4551 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004552 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004553 use_av_timer = &m_debug_avtimer;
4554 }
4555 else{
4556 use_av_timer =
4557 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004558 if (use_av_timer) {
4559 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4560 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004561 }
4562
4563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4564 rc = BAD_VALUE;
4565 }
4566 }
4567
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 setMobicat();
4569
4570 /* Set fps and hfr mode while sending meta stream info so that sensor
4571 * can configure appropriate streaming mode */
4572 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004573 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4574 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4576 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004577 if (rc == NO_ERROR) {
4578 int32_t max_fps =
4579 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004580 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004581 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4582 }
4583 /* For HFR, more buffers are dequeued upfront to improve the performance */
4584 if (mBatchSize) {
4585 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4586 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4587 }
4588 }
4589 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004590 LOGE("setHalFpsRange failed");
4591 }
4592 }
4593 if (meta.exists(ANDROID_CONTROL_MODE)) {
4594 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4595 rc = extractSceneMode(meta, metaMode, mParameters);
4596 if (rc != NO_ERROR) {
4597 LOGE("extractSceneMode failed");
4598 }
4599 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004600 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004601
Thierry Strudel04e026f2016-10-10 11:27:36 -07004602 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4603 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4604 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4605 rc = setVideoHdrMode(mParameters, vhdr);
4606 if (rc != NO_ERROR) {
4607 LOGE("setVideoHDR is failed");
4608 }
4609 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004610
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 //TODO: validate the arguments, HSV scenemode should have only the
4612 //advertised fps ranges
4613
4614 /*set the capture intent, hal version, tintless, stream info,
4615 *and disenable parameters to the backend*/
4616 LOGD("set_parms META_STREAM_INFO " );
4617 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4618 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004619 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004620 mStreamConfigInfo.type[i],
4621 mStreamConfigInfo.stream_sizes[i].width,
4622 mStreamConfigInfo.stream_sizes[i].height,
4623 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004624 mStreamConfigInfo.format[i],
4625 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004626 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004627
Thierry Strudel3d639192016-09-09 11:52:26 -07004628 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4629 mParameters);
4630 if (rc < 0) {
4631 LOGE("set_parms failed for hal version, stream info");
4632 }
4633
Chien-Yu Chenee335912017-02-09 17:53:20 -08004634 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4635 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004636 if (rc != NO_ERROR) {
4637 LOGE("Failed to get sensor output size");
4638 pthread_mutex_unlock(&mMutex);
4639 goto error_exit;
4640 }
4641
4642 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4643 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004644 mSensorModeInfo.active_array_size.width,
4645 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004646
4647 /* Set batchmode before initializing channel. Since registerBuffer
4648 * internally initializes some of the channels, better set batchmode
4649 * even before first register buffer */
4650 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4651 it != mStreamInfo.end(); it++) {
4652 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4653 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4654 && mBatchSize) {
4655 rc = channel->setBatchSize(mBatchSize);
4656 //Disable per frame map unmap for HFR/batchmode case
4657 rc |= channel->setPerFrameMapUnmap(false);
4658 if (NO_ERROR != rc) {
4659 LOGE("Channel init failed %d", rc);
4660 pthread_mutex_unlock(&mMutex);
4661 goto error_exit;
4662 }
4663 }
4664 }
4665
4666 //First initialize all streams
4667 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4668 it != mStreamInfo.end(); it++) {
4669 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4670 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4671 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004672 setEis) {
4673 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4674 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4675 is_type = mStreamConfigInfo.is_type[i];
4676 break;
4677 }
4678 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004679 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004680 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004681 rc = channel->initialize(IS_TYPE_NONE);
4682 }
4683 if (NO_ERROR != rc) {
4684 LOGE("Channel initialization failed %d", rc);
4685 pthread_mutex_unlock(&mMutex);
4686 goto error_exit;
4687 }
4688 }
4689
4690 if (mRawDumpChannel) {
4691 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4692 if (rc != NO_ERROR) {
4693 LOGE("Error: Raw Dump Channel init failed");
4694 pthread_mutex_unlock(&mMutex);
4695 goto error_exit;
4696 }
4697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004698 if (mHdrPlusRawSrcChannel) {
4699 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4700 if (rc != NO_ERROR) {
4701 LOGE("Error: HDR+ RAW Source Channel init failed");
4702 pthread_mutex_unlock(&mMutex);
4703 goto error_exit;
4704 }
4705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004706 if (mSupportChannel) {
4707 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4708 if (rc < 0) {
4709 LOGE("Support channel initialization failed");
4710 pthread_mutex_unlock(&mMutex);
4711 goto error_exit;
4712 }
4713 }
4714 if (mAnalysisChannel) {
4715 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4716 if (rc < 0) {
4717 LOGE("Analysis channel initialization failed");
4718 pthread_mutex_unlock(&mMutex);
4719 goto error_exit;
4720 }
4721 }
4722 if (mDummyBatchChannel) {
4723 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4724 if (rc < 0) {
4725 LOGE("mDummyBatchChannel setBatchSize failed");
4726 pthread_mutex_unlock(&mMutex);
4727 goto error_exit;
4728 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004729 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004730 if (rc < 0) {
4731 LOGE("mDummyBatchChannel initialization failed");
4732 pthread_mutex_unlock(&mMutex);
4733 goto error_exit;
4734 }
4735 }
4736
4737 // Set bundle info
4738 rc = setBundleInfo();
4739 if (rc < 0) {
4740 LOGE("setBundleInfo failed %d", rc);
4741 pthread_mutex_unlock(&mMutex);
4742 goto error_exit;
4743 }
4744
4745 //update settings from app here
4746 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4747 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4748 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4749 }
4750 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4751 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4752 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4753 }
4754 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4755 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4756 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4757
4758 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4759 (mLinkedCameraId != mCameraId) ) {
4760 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4761 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004762 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 goto error_exit;
4764 }
4765 }
4766
4767 // add bundle related cameras
4768 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4769 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004770 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4771 &m_pDualCamCmdPtr->bundle_info;
4772 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 if (mIsDeviceLinked)
4774 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4775 else
4776 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4777
4778 pthread_mutex_lock(&gCamLock);
4779
4780 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4781 LOGE("Dualcam: Invalid Session Id ");
4782 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 goto error_exit;
4785 }
4786
4787 if (mIsMainCamera == 1) {
4788 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4789 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004790 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004791 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004792 // related session id should be session id of linked session
4793 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4794 } else {
4795 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4796 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004797 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004798 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4800 }
4801 pthread_mutex_unlock(&gCamLock);
4802
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004803 rc = mCameraHandle->ops->set_dual_cam_cmd(
4804 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 if (rc < 0) {
4806 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004807 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004808 goto error_exit;
4809 }
4810 }
4811
4812 //Then start them.
4813 LOGH("Start META Channel");
4814 rc = mMetadataChannel->start();
4815 if (rc < 0) {
4816 LOGE("META channel start failed");
4817 pthread_mutex_unlock(&mMutex);
4818 goto error_exit;
4819 }
4820
4821 if (mAnalysisChannel) {
4822 rc = mAnalysisChannel->start();
4823 if (rc < 0) {
4824 LOGE("Analysis channel start failed");
4825 mMetadataChannel->stop();
4826 pthread_mutex_unlock(&mMutex);
4827 goto error_exit;
4828 }
4829 }
4830
4831 if (mSupportChannel) {
4832 rc = mSupportChannel->start();
4833 if (rc < 0) {
4834 LOGE("Support channel start failed");
4835 mMetadataChannel->stop();
4836 /* Although support and analysis are mutually exclusive today
4837 adding it in anycase for future proofing */
4838 if (mAnalysisChannel) {
4839 mAnalysisChannel->stop();
4840 }
4841 pthread_mutex_unlock(&mMutex);
4842 goto error_exit;
4843 }
4844 }
4845 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4846 it != mStreamInfo.end(); it++) {
4847 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4848 LOGH("Start Processing Channel mask=%d",
4849 channel->getStreamTypeMask());
4850 rc = channel->start();
4851 if (rc < 0) {
4852 LOGE("channel start failed");
4853 pthread_mutex_unlock(&mMutex);
4854 goto error_exit;
4855 }
4856 }
4857
4858 if (mRawDumpChannel) {
4859 LOGD("Starting raw dump stream");
4860 rc = mRawDumpChannel->start();
4861 if (rc != NO_ERROR) {
4862 LOGE("Error Starting Raw Dump Channel");
4863 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4864 it != mStreamInfo.end(); it++) {
4865 QCamera3Channel *channel =
4866 (QCamera3Channel *)(*it)->stream->priv;
4867 LOGH("Stopping Processing Channel mask=%d",
4868 channel->getStreamTypeMask());
4869 channel->stop();
4870 }
4871 if (mSupportChannel)
4872 mSupportChannel->stop();
4873 if (mAnalysisChannel) {
4874 mAnalysisChannel->stop();
4875 }
4876 mMetadataChannel->stop();
4877 pthread_mutex_unlock(&mMutex);
4878 goto error_exit;
4879 }
4880 }
4881
4882 if (mChannelHandle) {
4883
4884 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4885 mChannelHandle);
4886 if (rc != NO_ERROR) {
4887 LOGE("start_channel failed %d", rc);
4888 pthread_mutex_unlock(&mMutex);
4889 goto error_exit;
4890 }
4891 }
4892
4893 goto no_error;
4894error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004895 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 return rc;
4897no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004898 mWokenUpByDaemon = false;
4899 mPendingLiveRequest = 0;
4900 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004901 }
4902
Chien-Yu Chenee335912017-02-09 17:53:20 -08004903 // Enable HDR+ mode for the first PREVIEW_INTENT request.
4904 if (mHdrPlusClient != nullptr && !mFirstPreviewIntentSeen &&
4905 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
4906 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
4907 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
4908 rc = enableHdrPlusModeLocked();
4909 if (rc != OK) {
4910 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
4911 pthread_mutex_unlock(&mMutex);
4912 return rc;
4913 }
4914
4915 // Start HDR+ RAW source channel if AP provides RAW input buffers.
4916 if (mHdrPlusRawSrcChannel) {
4917 rc = mHdrPlusRawSrcChannel->start();
4918 if (rc != OK) {
4919 LOGE("Error Starting HDR+ RAW Channel");
4920 pthread_mutex_unlock(&mMutex);
4921 return rc;
4922 }
4923 }
4924 mFirstPreviewIntentSeen = true;
4925 }
4926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004929
4930 if (mFlushPerf) {
4931 //we cannot accept any requests during flush
4932 LOGE("process_capture_request cannot proceed during flush");
4933 pthread_mutex_unlock(&mMutex);
4934 return NO_ERROR; //should return an error
4935 }
4936
4937 if (meta.exists(ANDROID_REQUEST_ID)) {
4938 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4939 mCurrentRequestId = request_id;
4940 LOGD("Received request with id: %d", request_id);
4941 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4942 LOGE("Unable to find request id field, \
4943 & no previous id available");
4944 pthread_mutex_unlock(&mMutex);
4945 return NAME_NOT_FOUND;
4946 } else {
4947 LOGD("Re-using old request id");
4948 request_id = mCurrentRequestId;
4949 }
4950
4951 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4952 request->num_output_buffers,
4953 request->input_buffer,
4954 frameNumber);
4955 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004956 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08004958 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 uint32_t snapshotStreamId = 0;
4960 for (size_t i = 0; i < request->num_output_buffers; i++) {
4961 const camera3_stream_buffer_t& output = request->output_buffers[i];
4962 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4963
Emilian Peev7650c122017-01-19 08:24:33 -08004964 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
4965 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004966 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 blob_request = 1;
4968 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4969 }
4970
4971 if (output.acquire_fence != -1) {
4972 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4973 close(output.acquire_fence);
4974 if (rc != OK) {
4975 LOGE("sync wait failed %d", rc);
4976 pthread_mutex_unlock(&mMutex);
4977 return rc;
4978 }
4979 }
4980
Emilian Peev7650c122017-01-19 08:24:33 -08004981 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
4982 depthRequestPresent = true;
4983 continue;
4984 }
4985
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004986 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004987 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004988
4989 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4990 isVidBufRequested = true;
4991 }
4992 }
4993
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004994 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4995 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4996 itr++) {
4997 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4998 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4999 channel->getStreamID(channel->getStreamTypeMask());
5000
5001 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5002 isVidBufRequested = true;
5003 }
5004 }
5005
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005007 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005008 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 }
5010 if (blob_request && mRawDumpChannel) {
5011 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005012 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005014 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005015 }
5016
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005017 {
5018 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5019 // Request a RAW buffer if
5020 // 1. mHdrPlusRawSrcChannel is valid.
5021 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5022 // 3. There is no pending HDR+ request.
5023 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5024 mHdrPlusPendingRequests.size() == 0) {
5025 streamsArray.stream_request[streamsArray.num_streams].streamID =
5026 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5027 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5028 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005029 }
5030
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005031 //extract capture intent
5032 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5033 mCaptureIntent =
5034 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5035 }
5036
5037 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5038 mCacMode =
5039 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5040 }
5041
5042 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005043 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005044
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005045 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chenee335912017-02-09 17:53:20 -08005046 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005047 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5048 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005049 }
5050
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005051 if (hdrPlusRequest) {
5052 // For a HDR+ request, just set the frame parameters.
5053 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5054 if (rc < 0) {
5055 LOGE("fail to set frame parameters");
5056 pthread_mutex_unlock(&mMutex);
5057 return rc;
5058 }
5059 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 /* Parse the settings:
5061 * - For every request in NORMAL MODE
5062 * - For every request in HFR mode during preview only case
5063 * - For first request of every batch in HFR mode during video
5064 * recording. In batchmode the same settings except frame number is
5065 * repeated in each request of the batch.
5066 */
5067 if (!mBatchSize ||
5068 (mBatchSize && !isVidBufRequested) ||
5069 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005070 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005071 if (rc < 0) {
5072 LOGE("fail to set frame parameters");
5073 pthread_mutex_unlock(&mMutex);
5074 return rc;
5075 }
5076 }
5077 /* For batchMode HFR, setFrameParameters is not called for every
5078 * request. But only frame number of the latest request is parsed.
5079 * Keep track of first and last frame numbers in a batch so that
5080 * metadata for the frame numbers of batch can be duplicated in
5081 * handleBatchMetadta */
5082 if (mBatchSize) {
5083 if (!mToBeQueuedVidBufs) {
5084 //start of the batch
5085 mFirstFrameNumberInBatch = request->frame_number;
5086 }
5087 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5088 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5089 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005090 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005091 return BAD_VALUE;
5092 }
5093 }
5094 if (mNeedSensorRestart) {
5095 /* Unlock the mutex as restartSensor waits on the channels to be
5096 * stopped, which in turn calls stream callback functions -
5097 * handleBufferWithLock and handleMetadataWithLock */
5098 pthread_mutex_unlock(&mMutex);
5099 rc = dynamicUpdateMetaStreamInfo();
5100 if (rc != NO_ERROR) {
5101 LOGE("Restarting the sensor failed");
5102 return BAD_VALUE;
5103 }
5104 mNeedSensorRestart = false;
5105 pthread_mutex_lock(&mMutex);
5106 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005107 if(mResetInstantAEC) {
5108 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5109 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5110 mResetInstantAEC = false;
5111 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005112 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 if (request->input_buffer->acquire_fence != -1) {
5114 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5115 close(request->input_buffer->acquire_fence);
5116 if (rc != OK) {
5117 LOGE("input buffer sync wait failed %d", rc);
5118 pthread_mutex_unlock(&mMutex);
5119 return rc;
5120 }
5121 }
5122 }
5123
5124 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5125 mLastCustIntentFrmNum = frameNumber;
5126 }
5127 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005128 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 pendingRequestIterator latestRequest;
5130 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005131 pendingRequest.num_buffers = depthRequestPresent ?
5132 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005133 pendingRequest.request_id = request_id;
5134 pendingRequest.blob_request = blob_request;
5135 pendingRequest.timestamp = 0;
5136 pendingRequest.bUrgentReceived = 0;
5137 if (request->input_buffer) {
5138 pendingRequest.input_buffer =
5139 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5140 *(pendingRequest.input_buffer) = *(request->input_buffer);
5141 pInputBuffer = pendingRequest.input_buffer;
5142 } else {
5143 pendingRequest.input_buffer = NULL;
5144 pInputBuffer = NULL;
5145 }
5146
5147 pendingRequest.pipeline_depth = 0;
5148 pendingRequest.partial_result_cnt = 0;
5149 extractJpegMetadata(mCurJpegMeta, request);
5150 pendingRequest.jpegMetadata = mCurJpegMeta;
5151 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5152 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005153 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005154 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5155 mHybridAeEnable =
5156 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5157 }
5158 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005159 /* DevCamDebug metadata processCaptureRequest */
5160 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5161 mDevCamDebugMetaEnable =
5162 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5163 }
5164 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5165 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005166
5167 //extract CAC info
5168 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5169 mCacMode =
5170 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5171 }
5172 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005173 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005174
5175 PendingBuffersInRequest bufsForCurRequest;
5176 bufsForCurRequest.frame_number = frameNumber;
5177 // Mark current timestamp for the new request
5178 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005179 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005180
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005181 if (hdrPlusRequest) {
5182 // Save settings for this request.
5183 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5184 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5185
5186 // Add to pending HDR+ request queue.
5187 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5188 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5189
5190 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5191 }
5192
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005194 if (request->output_buffers[i].stream->data_space ==
5195 HAL_DATASPACE_DEPTH) {
5196 continue;
5197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 RequestedBufferInfo requestedBuf;
5199 memset(&requestedBuf, 0, sizeof(requestedBuf));
5200 requestedBuf.stream = request->output_buffers[i].stream;
5201 requestedBuf.buffer = NULL;
5202 pendingRequest.buffers.push_back(requestedBuf);
5203
5204 // Add to buffer handle the pending buffers list
5205 PendingBufferInfo bufferInfo;
5206 bufferInfo.buffer = request->output_buffers[i].buffer;
5207 bufferInfo.stream = request->output_buffers[i].stream;
5208 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5209 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5210 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5211 frameNumber, bufferInfo.buffer,
5212 channel->getStreamTypeMask(), bufferInfo.stream->format);
5213 }
5214 // Add this request packet into mPendingBuffersMap
5215 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5216 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5217 mPendingBuffersMap.get_num_overall_buffers());
5218
5219 latestRequest = mPendingRequestsList.insert(
5220 mPendingRequestsList.end(), pendingRequest);
5221 if(mFlush) {
5222 LOGI("mFlush is true");
5223 pthread_mutex_unlock(&mMutex);
5224 return NO_ERROR;
5225 }
5226
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005227 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5228 // channel.
5229 if (!hdrPlusRequest) {
5230 int indexUsed;
5231 // Notify metadata channel we receive a request
5232 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005233
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005234 if(request->input_buffer != NULL){
5235 LOGD("Input request, frame_number %d", frameNumber);
5236 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5237 if (NO_ERROR != rc) {
5238 LOGE("fail to set reproc parameters");
5239 pthread_mutex_unlock(&mMutex);
5240 return rc;
5241 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 }
5243
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005244 // Call request on other streams
5245 uint32_t streams_need_metadata = 0;
5246 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5247 for (size_t i = 0; i < request->num_output_buffers; i++) {
5248 const camera3_stream_buffer_t& output = request->output_buffers[i];
5249 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5250
5251 if (channel == NULL) {
5252 LOGW("invalid channel pointer for stream");
5253 continue;
5254 }
5255
5256 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5257 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5258 output.buffer, request->input_buffer, frameNumber);
5259 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005261 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5262 if (rc < 0) {
5263 LOGE("Fail to request on picture channel");
5264 pthread_mutex_unlock(&mMutex);
5265 return rc;
5266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005267 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005268 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5269 assert(NULL != mDepthChannel);
5270 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005271
Emilian Peev7650c122017-01-19 08:24:33 -08005272 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5273 if (rc < 0) {
5274 LOGE("Fail to map on depth buffer");
5275 pthread_mutex_unlock(&mMutex);
5276 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005277 }
Emilian Peev7650c122017-01-19 08:24:33 -08005278 } else {
5279 LOGD("snapshot request with buffer %p, frame_number %d",
5280 output.buffer, frameNumber);
5281 if (!request->settings) {
5282 rc = channel->request(output.buffer, frameNumber,
5283 NULL, mPrevParameters, indexUsed);
5284 } else {
5285 rc = channel->request(output.buffer, frameNumber,
5286 NULL, mParameters, indexUsed);
5287 }
5288 if (rc < 0) {
5289 LOGE("Fail to request on picture channel");
5290 pthread_mutex_unlock(&mMutex);
5291 return rc;
5292 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005293
Emilian Peev7650c122017-01-19 08:24:33 -08005294 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5295 uint32_t j = 0;
5296 for (j = 0; j < streamsArray.num_streams; j++) {
5297 if (streamsArray.stream_request[j].streamID == streamId) {
5298 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5299 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5300 else
5301 streamsArray.stream_request[j].buf_index = indexUsed;
5302 break;
5303 }
5304 }
5305 if (j == streamsArray.num_streams) {
5306 LOGE("Did not find matching stream to update index");
5307 assert(0);
5308 }
5309
5310 pendingBufferIter->need_metadata = true;
5311 streams_need_metadata++;
5312 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005314 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5315 bool needMetadata = false;
5316 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5317 rc = yuvChannel->request(output.buffer, frameNumber,
5318 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5319 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005321 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 pthread_mutex_unlock(&mMutex);
5323 return rc;
5324 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005325
5326 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5327 uint32_t j = 0;
5328 for (j = 0; j < streamsArray.num_streams; j++) {
5329 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5331 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5332 else
5333 streamsArray.stream_request[j].buf_index = indexUsed;
5334 break;
5335 }
5336 }
5337 if (j == streamsArray.num_streams) {
5338 LOGE("Did not find matching stream to update index");
5339 assert(0);
5340 }
5341
5342 pendingBufferIter->need_metadata = needMetadata;
5343 if (needMetadata)
5344 streams_need_metadata += 1;
5345 LOGD("calling YUV channel request, need_metadata is %d",
5346 needMetadata);
5347 } else {
5348 LOGD("request with buffer %p, frame_number %d",
5349 output.buffer, frameNumber);
5350
5351 rc = channel->request(output.buffer, frameNumber, indexUsed);
5352
5353 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5354 uint32_t j = 0;
5355 for (j = 0; j < streamsArray.num_streams; j++) {
5356 if (streamsArray.stream_request[j].streamID == streamId) {
5357 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5358 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5359 else
5360 streamsArray.stream_request[j].buf_index = indexUsed;
5361 break;
5362 }
5363 }
5364 if (j == streamsArray.num_streams) {
5365 LOGE("Did not find matching stream to update index");
5366 assert(0);
5367 }
5368
5369 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5370 && mBatchSize) {
5371 mToBeQueuedVidBufs++;
5372 if (mToBeQueuedVidBufs == mBatchSize) {
5373 channel->queueBatchBuf();
5374 }
5375 }
5376 if (rc < 0) {
5377 LOGE("request failed");
5378 pthread_mutex_unlock(&mMutex);
5379 return rc;
5380 }
5381 }
5382 pendingBufferIter++;
5383 }
5384
5385 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5386 itr++) {
5387 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5388
5389 if (channel == NULL) {
5390 LOGE("invalid channel pointer for stream");
5391 assert(0);
5392 return BAD_VALUE;
5393 }
5394
5395 InternalRequest requestedStream;
5396 requestedStream = (*itr);
5397
5398
5399 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5400 LOGD("snapshot request internally input buffer %p, frame_number %d",
5401 request->input_buffer, frameNumber);
5402 if(request->input_buffer != NULL){
5403 rc = channel->request(NULL, frameNumber,
5404 pInputBuffer, &mReprocMeta, indexUsed, true,
5405 requestedStream.meteringOnly);
5406 if (rc < 0) {
5407 LOGE("Fail to request on picture channel");
5408 pthread_mutex_unlock(&mMutex);
5409 return rc;
5410 }
5411 } else {
5412 LOGD("snapshot request with frame_number %d", frameNumber);
5413 if (!request->settings) {
5414 rc = channel->request(NULL, frameNumber,
5415 NULL, mPrevParameters, indexUsed, true,
5416 requestedStream.meteringOnly);
5417 } else {
5418 rc = channel->request(NULL, frameNumber,
5419 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5420 }
5421 if (rc < 0) {
5422 LOGE("Fail to request on picture channel");
5423 pthread_mutex_unlock(&mMutex);
5424 return rc;
5425 }
5426
5427 if ((*itr).meteringOnly != 1) {
5428 requestedStream.need_metadata = 1;
5429 streams_need_metadata++;
5430 }
5431 }
5432
5433 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5434 uint32_t j = 0;
5435 for (j = 0; j < streamsArray.num_streams; j++) {
5436 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005437 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5438 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5439 else
5440 streamsArray.stream_request[j].buf_index = indexUsed;
5441 break;
5442 }
5443 }
5444 if (j == streamsArray.num_streams) {
5445 LOGE("Did not find matching stream to update index");
5446 assert(0);
5447 }
5448
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005449 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005450 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005451 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005453 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005454 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005455 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005456
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 //If 2 streams have need_metadata set to true, fail the request, unless
5458 //we copy/reference count the metadata buffer
5459 if (streams_need_metadata > 1) {
5460 LOGE("not supporting request in which two streams requires"
5461 " 2 HAL metadata for reprocessing");
5462 pthread_mutex_unlock(&mMutex);
5463 return -EINVAL;
5464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005465
Emilian Peev7650c122017-01-19 08:24:33 -08005466 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5468 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5469 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5470 pthread_mutex_unlock(&mMutex);
5471 return BAD_VALUE;
5472 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005473 if (request->input_buffer == NULL) {
5474 /* Set the parameters to backend:
5475 * - For every request in NORMAL MODE
5476 * - For every request in HFR mode during preview only case
5477 * - Once every batch in HFR mode during video recording
5478 */
5479 if (!mBatchSize ||
5480 (mBatchSize && !isVidBufRequested) ||
5481 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5482 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5483 mBatchSize, isVidBufRequested,
5484 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005485
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005486 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5487 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5488 uint32_t m = 0;
5489 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5490 if (streamsArray.stream_request[k].streamID ==
5491 mBatchedStreamsArray.stream_request[m].streamID)
5492 break;
5493 }
5494 if (m == mBatchedStreamsArray.num_streams) {
5495 mBatchedStreamsArray.stream_request\
5496 [mBatchedStreamsArray.num_streams].streamID =
5497 streamsArray.stream_request[k].streamID;
5498 mBatchedStreamsArray.stream_request\
5499 [mBatchedStreamsArray.num_streams].buf_index =
5500 streamsArray.stream_request[k].buf_index;
5501 mBatchedStreamsArray.num_streams =
5502 mBatchedStreamsArray.num_streams + 1;
5503 }
5504 }
5505 streamsArray = mBatchedStreamsArray;
5506 }
5507 /* Update stream id of all the requested buffers */
5508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5509 streamsArray)) {
5510 LOGE("Failed to set stream type mask in the parameters");
5511 return BAD_VALUE;
5512 }
5513
5514 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5515 mParameters);
5516 if (rc < 0) {
5517 LOGE("set_parms failed");
5518 }
5519 /* reset to zero coz, the batch is queued */
5520 mToBeQueuedVidBufs = 0;
5521 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5522 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5523 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005524 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5525 uint32_t m = 0;
5526 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5527 if (streamsArray.stream_request[k].streamID ==
5528 mBatchedStreamsArray.stream_request[m].streamID)
5529 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005530 }
5531 if (m == mBatchedStreamsArray.num_streams) {
5532 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5533 streamID = streamsArray.stream_request[k].streamID;
5534 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5535 buf_index = streamsArray.stream_request[k].buf_index;
5536 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5537 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005538 }
5539 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005540 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 }
5543
5544 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5545
5546 mState = STARTED;
5547 // Added a timed condition wait
5548 struct timespec ts;
5549 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005550 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005551 if (rc < 0) {
5552 isValidTimeout = 0;
5553 LOGE("Error reading the real time clock!!");
5554 }
5555 else {
5556 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 int64_t timeout = 5;
5558 {
5559 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5560 // If there is a pending HDR+ request, the following requests may be blocked until the
5561 // HDR+ request is done. So allow a longer timeout.
5562 if (mHdrPlusPendingRequests.size() > 0) {
5563 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5564 }
5565 }
5566 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 }
5568 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005569 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005570 (mState != ERROR) && (mState != DEINIT)) {
5571 if (!isValidTimeout) {
5572 LOGD("Blocking on conditional wait");
5573 pthread_cond_wait(&mRequestCond, &mMutex);
5574 }
5575 else {
5576 LOGD("Blocking on timed conditional wait");
5577 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5578 if (rc == ETIMEDOUT) {
5579 rc = -ENODEV;
5580 LOGE("Unblocked on timeout!!!!");
5581 break;
5582 }
5583 }
5584 LOGD("Unblocked");
5585 if (mWokenUpByDaemon) {
5586 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005587 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 break;
5589 }
5590 }
5591 pthread_mutex_unlock(&mMutex);
5592
5593 return rc;
5594}
5595
5596/*===========================================================================
5597 * FUNCTION : dump
5598 *
5599 * DESCRIPTION:
5600 *
5601 * PARAMETERS :
5602 *
5603 *
5604 * RETURN :
5605 *==========================================================================*/
5606void QCamera3HardwareInterface::dump(int fd)
5607{
5608 pthread_mutex_lock(&mMutex);
5609 dprintf(fd, "\n Camera HAL3 information Begin \n");
5610
5611 dprintf(fd, "\nNumber of pending requests: %zu \n",
5612 mPendingRequestsList.size());
5613 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5614 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5615 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5616 for(pendingRequestIterator i = mPendingRequestsList.begin();
5617 i != mPendingRequestsList.end(); i++) {
5618 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5619 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5620 i->input_buffer);
5621 }
5622 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5623 mPendingBuffersMap.get_num_overall_buffers());
5624 dprintf(fd, "-------+------------------\n");
5625 dprintf(fd, " Frame | Stream type mask \n");
5626 dprintf(fd, "-------+------------------\n");
5627 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5628 for(auto &j : req.mPendingBufferList) {
5629 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5630 dprintf(fd, " %5d | %11d \n",
5631 req.frame_number, channel->getStreamTypeMask());
5632 }
5633 }
5634 dprintf(fd, "-------+------------------\n");
5635
5636 dprintf(fd, "\nPending frame drop list: %zu\n",
5637 mPendingFrameDropList.size());
5638 dprintf(fd, "-------+-----------\n");
5639 dprintf(fd, " Frame | Stream ID \n");
5640 dprintf(fd, "-------+-----------\n");
5641 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5642 i != mPendingFrameDropList.end(); i++) {
5643 dprintf(fd, " %5d | %9d \n",
5644 i->frame_number, i->stream_ID);
5645 }
5646 dprintf(fd, "-------+-----------\n");
5647
5648 dprintf(fd, "\n Camera HAL3 information End \n");
5649
5650 /* use dumpsys media.camera as trigger to send update debug level event */
5651 mUpdateDebugLevel = true;
5652 pthread_mutex_unlock(&mMutex);
5653 return;
5654}
5655
5656/*===========================================================================
5657 * FUNCTION : flush
5658 *
5659 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5660 * conditionally restarts channels
5661 *
5662 * PARAMETERS :
5663 * @ restartChannels: re-start all channels
5664 *
5665 *
5666 * RETURN :
5667 * 0 on success
5668 * Error code on failure
5669 *==========================================================================*/
5670int QCamera3HardwareInterface::flush(bool restartChannels)
5671{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005672 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005673 int32_t rc = NO_ERROR;
5674
5675 LOGD("Unblocking Process Capture Request");
5676 pthread_mutex_lock(&mMutex);
5677 mFlush = true;
5678 pthread_mutex_unlock(&mMutex);
5679
5680 rc = stopAllChannels();
5681 // unlink of dualcam
5682 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005683 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5684 &m_pDualCamCmdPtr->bundle_info;
5685 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005686 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5687 pthread_mutex_lock(&gCamLock);
5688
5689 if (mIsMainCamera == 1) {
5690 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5691 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005692 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005693 // related session id should be session id of linked session
5694 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5695 } else {
5696 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5697 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005698 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5700 }
5701 pthread_mutex_unlock(&gCamLock);
5702
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005703 rc = mCameraHandle->ops->set_dual_cam_cmd(
5704 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005705 if (rc < 0) {
5706 LOGE("Dualcam: Unlink failed, but still proceed to close");
5707 }
5708 }
5709
5710 if (rc < 0) {
5711 LOGE("stopAllChannels failed");
5712 return rc;
5713 }
5714 if (mChannelHandle) {
5715 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5716 mChannelHandle);
5717 }
5718
5719 // Reset bundle info
5720 rc = setBundleInfo();
5721 if (rc < 0) {
5722 LOGE("setBundleInfo failed %d", rc);
5723 return rc;
5724 }
5725
5726 // Mutex Lock
5727 pthread_mutex_lock(&mMutex);
5728
5729 // Unblock process_capture_request
5730 mPendingLiveRequest = 0;
5731 pthread_cond_signal(&mRequestCond);
5732
5733 rc = notifyErrorForPendingRequests();
5734 if (rc < 0) {
5735 LOGE("notifyErrorForPendingRequests failed");
5736 pthread_mutex_unlock(&mMutex);
5737 return rc;
5738 }
5739
5740 mFlush = false;
5741
5742 // Start the Streams/Channels
5743 if (restartChannels) {
5744 rc = startAllChannels();
5745 if (rc < 0) {
5746 LOGE("startAllChannels failed");
5747 pthread_mutex_unlock(&mMutex);
5748 return rc;
5749 }
5750 }
5751
5752 if (mChannelHandle) {
5753 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5754 mChannelHandle);
5755 if (rc < 0) {
5756 LOGE("start_channel failed");
5757 pthread_mutex_unlock(&mMutex);
5758 return rc;
5759 }
5760 }
5761
5762 pthread_mutex_unlock(&mMutex);
5763
5764 return 0;
5765}
5766
5767/*===========================================================================
5768 * FUNCTION : flushPerf
5769 *
5770 * DESCRIPTION: This is the performance optimization version of flush that does
5771 * not use stream off, rather flushes the system
5772 *
5773 * PARAMETERS :
5774 *
5775 *
5776 * RETURN : 0 : success
5777 * -EINVAL: input is malformed (device is not valid)
5778 * -ENODEV: if the device has encountered a serious error
5779 *==========================================================================*/
5780int QCamera3HardwareInterface::flushPerf()
5781{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005782 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005783 int32_t rc = 0;
5784 struct timespec timeout;
5785 bool timed_wait = false;
5786
5787 pthread_mutex_lock(&mMutex);
5788 mFlushPerf = true;
5789 mPendingBuffersMap.numPendingBufsAtFlush =
5790 mPendingBuffersMap.get_num_overall_buffers();
5791 LOGD("Calling flush. Wait for %d buffers to return",
5792 mPendingBuffersMap.numPendingBufsAtFlush);
5793
5794 /* send the flush event to the backend */
5795 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5796 if (rc < 0) {
5797 LOGE("Error in flush: IOCTL failure");
5798 mFlushPerf = false;
5799 pthread_mutex_unlock(&mMutex);
5800 return -ENODEV;
5801 }
5802
5803 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5804 LOGD("No pending buffers in HAL, return flush");
5805 mFlushPerf = false;
5806 pthread_mutex_unlock(&mMutex);
5807 return rc;
5808 }
5809
5810 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005811 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005812 if (rc < 0) {
5813 LOGE("Error reading the real time clock, cannot use timed wait");
5814 } else {
5815 timeout.tv_sec += FLUSH_TIMEOUT;
5816 timed_wait = true;
5817 }
5818
5819 //Block on conditional variable
5820 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5821 LOGD("Waiting on mBuffersCond");
5822 if (!timed_wait) {
5823 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5824 if (rc != 0) {
5825 LOGE("pthread_cond_wait failed due to rc = %s",
5826 strerror(rc));
5827 break;
5828 }
5829 } else {
5830 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5831 if (rc != 0) {
5832 LOGE("pthread_cond_timedwait failed due to rc = %s",
5833 strerror(rc));
5834 break;
5835 }
5836 }
5837 }
5838 if (rc != 0) {
5839 mFlushPerf = false;
5840 pthread_mutex_unlock(&mMutex);
5841 return -ENODEV;
5842 }
5843
5844 LOGD("Received buffers, now safe to return them");
5845
5846 //make sure the channels handle flush
5847 //currently only required for the picture channel to release snapshot resources
5848 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5849 it != mStreamInfo.end(); it++) {
5850 QCamera3Channel *channel = (*it)->channel;
5851 if (channel) {
5852 rc = channel->flush();
5853 if (rc) {
5854 LOGE("Flushing the channels failed with error %d", rc);
5855 // even though the channel flush failed we need to continue and
5856 // return the buffers we have to the framework, however the return
5857 // value will be an error
5858 rc = -ENODEV;
5859 }
5860 }
5861 }
5862
5863 /* notify the frameworks and send errored results */
5864 rc = notifyErrorForPendingRequests();
5865 if (rc < 0) {
5866 LOGE("notifyErrorForPendingRequests failed");
5867 pthread_mutex_unlock(&mMutex);
5868 return rc;
5869 }
5870
5871 //unblock process_capture_request
5872 mPendingLiveRequest = 0;
5873 unblockRequestIfNecessary();
5874
5875 mFlushPerf = false;
5876 pthread_mutex_unlock(&mMutex);
5877 LOGD ("Flush Operation complete. rc = %d", rc);
5878 return rc;
5879}
5880
5881/*===========================================================================
5882 * FUNCTION : handleCameraDeviceError
5883 *
5884 * DESCRIPTION: This function calls internal flush and notifies the error to
5885 * framework and updates the state variable.
5886 *
5887 * PARAMETERS : None
5888 *
5889 * RETURN : NO_ERROR on Success
5890 * Error code on failure
5891 *==========================================================================*/
5892int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5893{
5894 int32_t rc = NO_ERROR;
5895
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005896 {
5897 Mutex::Autolock lock(mFlushLock);
5898 pthread_mutex_lock(&mMutex);
5899 if (mState != ERROR) {
5900 //if mState != ERROR, nothing to be done
5901 pthread_mutex_unlock(&mMutex);
5902 return NO_ERROR;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005905
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005906 rc = flush(false /* restart channels */);
5907 if (NO_ERROR != rc) {
5908 LOGE("internal flush to handle mState = ERROR failed");
5909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005910
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005911 pthread_mutex_lock(&mMutex);
5912 mState = DEINIT;
5913 pthread_mutex_unlock(&mMutex);
5914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005915
5916 camera3_notify_msg_t notify_msg;
5917 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5918 notify_msg.type = CAMERA3_MSG_ERROR;
5919 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5920 notify_msg.message.error.error_stream = NULL;
5921 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005922 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005923
5924 return rc;
5925}
5926
5927/*===========================================================================
5928 * FUNCTION : captureResultCb
5929 *
5930 * DESCRIPTION: Callback handler for all capture result
5931 * (streams, as well as metadata)
5932 *
5933 * PARAMETERS :
5934 * @metadata : metadata information
5935 * @buffer : actual gralloc buffer to be returned to frameworks.
5936 * NULL if metadata.
5937 *
5938 * RETURN : NONE
5939 *==========================================================================*/
5940void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5941 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5942{
5943 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005944 pthread_mutex_lock(&mMutex);
5945 uint8_t batchSize = mBatchSize;
5946 pthread_mutex_unlock(&mMutex);
5947 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005948 handleBatchMetadata(metadata_buf,
5949 true /* free_and_bufdone_meta_buf */);
5950 } else { /* mBatchSize = 0 */
5951 hdrPlusPerfLock(metadata_buf);
5952 pthread_mutex_lock(&mMutex);
5953 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005954 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08005955 false /* first frame of batch metadata */ ,
5956 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07005957 pthread_mutex_unlock(&mMutex);
5958 }
5959 } else if (isInputBuffer) {
5960 pthread_mutex_lock(&mMutex);
5961 handleInputBufferWithLock(frame_number);
5962 pthread_mutex_unlock(&mMutex);
5963 } else {
5964 pthread_mutex_lock(&mMutex);
5965 handleBufferWithLock(buffer, frame_number);
5966 pthread_mutex_unlock(&mMutex);
5967 }
5968 return;
5969}
5970
5971/*===========================================================================
5972 * FUNCTION : getReprocessibleOutputStreamId
5973 *
5974 * DESCRIPTION: Get source output stream id for the input reprocess stream
5975 * based on size and format, which would be the largest
5976 * output stream if an input stream exists.
5977 *
5978 * PARAMETERS :
5979 * @id : return the stream id if found
5980 *
5981 * RETURN : int32_t type of status
5982 * NO_ERROR -- success
5983 * none-zero failure code
5984 *==========================================================================*/
5985int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5986{
5987 /* check if any output or bidirectional stream with the same size and format
5988 and return that stream */
5989 if ((mInputStreamInfo.dim.width > 0) &&
5990 (mInputStreamInfo.dim.height > 0)) {
5991 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5992 it != mStreamInfo.end(); it++) {
5993
5994 camera3_stream_t *stream = (*it)->stream;
5995 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5996 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5997 (stream->format == mInputStreamInfo.format)) {
5998 // Usage flag for an input stream and the source output stream
5999 // may be different.
6000 LOGD("Found reprocessible output stream! %p", *it);
6001 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6002 stream->usage, mInputStreamInfo.usage);
6003
6004 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6005 if (channel != NULL && channel->mStreams[0]) {
6006 id = channel->mStreams[0]->getMyServerID();
6007 return NO_ERROR;
6008 }
6009 }
6010 }
6011 } else {
6012 LOGD("No input stream, so no reprocessible output stream");
6013 }
6014 return NAME_NOT_FOUND;
6015}
6016
6017/*===========================================================================
6018 * FUNCTION : lookupFwkName
6019 *
6020 * DESCRIPTION: In case the enum is not same in fwk and backend
6021 * make sure the parameter is correctly propogated
6022 *
6023 * PARAMETERS :
6024 * @arr : map between the two enums
6025 * @len : len of the map
6026 * @hal_name : name of the hal_parm to map
6027 *
6028 * RETURN : int type of status
6029 * fwk_name -- success
6030 * none-zero failure code
6031 *==========================================================================*/
6032template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6033 size_t len, halType hal_name)
6034{
6035
6036 for (size_t i = 0; i < len; i++) {
6037 if (arr[i].hal_name == hal_name) {
6038 return arr[i].fwk_name;
6039 }
6040 }
6041
6042 /* Not able to find matching framework type is not necessarily
6043 * an error case. This happens when mm-camera supports more attributes
6044 * than the frameworks do */
6045 LOGH("Cannot find matching framework type");
6046 return NAME_NOT_FOUND;
6047}
6048
6049/*===========================================================================
6050 * FUNCTION : lookupHalName
6051 *
6052 * DESCRIPTION: In case the enum is not same in fwk and backend
6053 * make sure the parameter is correctly propogated
6054 *
6055 * PARAMETERS :
6056 * @arr : map between the two enums
6057 * @len : len of the map
6058 * @fwk_name : name of the hal_parm to map
6059 *
6060 * RETURN : int32_t type of status
6061 * hal_name -- success
6062 * none-zero failure code
6063 *==========================================================================*/
6064template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6065 size_t len, fwkType fwk_name)
6066{
6067 for (size_t i = 0; i < len; i++) {
6068 if (arr[i].fwk_name == fwk_name) {
6069 return arr[i].hal_name;
6070 }
6071 }
6072
6073 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6074 return NAME_NOT_FOUND;
6075}
6076
6077/*===========================================================================
6078 * FUNCTION : lookupProp
6079 *
6080 * DESCRIPTION: lookup a value by its name
6081 *
6082 * PARAMETERS :
6083 * @arr : map between the two enums
6084 * @len : size of the map
6085 * @name : name to be looked up
6086 *
6087 * RETURN : Value if found
6088 * CAM_CDS_MODE_MAX if not found
6089 *==========================================================================*/
6090template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6091 size_t len, const char *name)
6092{
6093 if (name) {
6094 for (size_t i = 0; i < len; i++) {
6095 if (!strcmp(arr[i].desc, name)) {
6096 return arr[i].val;
6097 }
6098 }
6099 }
6100 return CAM_CDS_MODE_MAX;
6101}
6102
6103/*===========================================================================
6104 *
6105 * DESCRIPTION:
6106 *
6107 * PARAMETERS :
6108 * @metadata : metadata information from callback
6109 * @timestamp: metadata buffer timestamp
6110 * @request_id: request id
6111 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006112 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006113 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6114 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006115 * @pprocDone: whether internal offline postprocsesing is done
6116 *
6117 * RETURN : camera_metadata_t*
6118 * metadata in a format specified by fwk
6119 *==========================================================================*/
6120camera_metadata_t*
6121QCamera3HardwareInterface::translateFromHalMetadata(
6122 metadata_buffer_t *metadata,
6123 nsecs_t timestamp,
6124 int32_t request_id,
6125 const CameraMetadata& jpegMetadata,
6126 uint8_t pipeline_depth,
6127 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006128 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006129 /* DevCamDebug metadata translateFromHalMetadata argument */
6130 uint8_t DevCamDebug_meta_enable,
6131 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006132 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006133 uint8_t fwk_cacMode,
6134 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006135{
6136 CameraMetadata camMetadata;
6137 camera_metadata_t *resultMetadata;
6138
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006139 if (mBatchSize && !firstMetadataInBatch) {
6140 /* In batch mode, use cached metadata from the first metadata
6141 in the batch */
6142 camMetadata.clear();
6143 camMetadata = mCachedMetadata;
6144 }
6145
Thierry Strudel3d639192016-09-09 11:52:26 -07006146 if (jpegMetadata.entryCount())
6147 camMetadata.append(jpegMetadata);
6148
6149 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6150 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6151 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6152 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006153 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006154 if (mBatchSize == 0) {
6155 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6156 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006158
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006159 if (mBatchSize && !firstMetadataInBatch) {
6160 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6161 resultMetadata = camMetadata.release();
6162 return resultMetadata;
6163 }
6164
Samuel Ha68ba5172016-12-15 18:41:12 -08006165 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6166 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6167 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6168 // DevCamDebug metadata translateFromHalMetadata AF
6169 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6170 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6171 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6172 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6173 }
6174 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6175 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6176 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6177 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6178 }
6179 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6180 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6181 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6182 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6183 }
6184 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6185 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6186 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6187 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6188 }
6189 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6190 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6191 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6192 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6193 }
6194 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6195 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6196 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6197 *DevCamDebug_af_monitor_pdaf_target_pos;
6198 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6199 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6200 }
6201 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6202 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6203 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6204 *DevCamDebug_af_monitor_pdaf_confidence;
6205 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6206 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6207 }
6208 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6209 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6210 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6211 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6212 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6213 }
6214 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6215 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6216 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6217 *DevCamDebug_af_monitor_tof_target_pos;
6218 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6219 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6220 }
6221 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6222 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6223 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6224 *DevCamDebug_af_monitor_tof_confidence;
6225 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6226 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6227 }
6228 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6229 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6230 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6231 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6232 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6233 }
6234 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6235 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6236 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6237 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6238 &fwk_DevCamDebug_af_monitor_type_select, 1);
6239 }
6240 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6241 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6242 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6243 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6244 &fwk_DevCamDebug_af_monitor_refocus, 1);
6245 }
6246 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6247 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6248 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6249 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6250 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6251 }
6252 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6253 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6254 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6255 *DevCamDebug_af_search_pdaf_target_pos;
6256 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6257 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6258 }
6259 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6260 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6261 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6262 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6263 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6264 }
6265 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6266 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6267 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6268 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6269 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6270 }
6271 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6272 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6273 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6274 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6275 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6276 }
6277 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6278 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6279 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6280 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6281 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6282 }
6283 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6284 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6285 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6286 *DevCamDebug_af_search_tof_target_pos;
6287 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6288 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6289 }
6290 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6291 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6292 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6293 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6294 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6295 }
6296 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6297 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6298 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6299 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6300 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6301 }
6302 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6303 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6304 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6305 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6306 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6307 }
6308 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6309 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6310 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6311 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6312 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6313 }
6314 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6315 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6316 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6317 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6318 &fwk_DevCamDebug_af_search_type_select, 1);
6319 }
6320 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6321 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6322 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6323 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6324 &fwk_DevCamDebug_af_search_next_pos, 1);
6325 }
6326 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6327 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6328 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6329 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6330 &fwk_DevCamDebug_af_search_target_pos, 1);
6331 }
6332 // DevCamDebug metadata translateFromHalMetadata AEC
6333 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6334 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6335 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6336 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6337 }
6338 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6339 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6340 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6341 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6342 }
6343 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6344 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6345 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6346 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6347 }
6348 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6349 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6350 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6351 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6352 }
6353 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6354 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6355 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6356 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6357 }
6358 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6359 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6360 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6361 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6362 }
6363 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6364 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6365 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6366 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6367 }
6368 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6369 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6370 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6371 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6372 }
6373 // DevCamDebug metadata translateFromHalMetadata AWB
6374 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6375 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6376 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6377 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6378 }
6379 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6380 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6381 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6382 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6383 }
6384 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6385 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6386 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6387 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6388 }
6389 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6390 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6391 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6392 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6393 }
6394 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6395 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6396 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6397 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6398 }
6399 }
6400 // atrace_end(ATRACE_TAG_ALWAYS);
6401
Thierry Strudel3d639192016-09-09 11:52:26 -07006402 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6403 int64_t fwk_frame_number = *frame_number;
6404 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6405 }
6406
6407 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6408 int32_t fps_range[2];
6409 fps_range[0] = (int32_t)float_range->min_fps;
6410 fps_range[1] = (int32_t)float_range->max_fps;
6411 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6412 fps_range, 2);
6413 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6414 fps_range[0], fps_range[1]);
6415 }
6416
6417 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6418 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6419 }
6420
6421 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6422 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6423 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6424 *sceneMode);
6425 if (NAME_NOT_FOUND != val) {
6426 uint8_t fwkSceneMode = (uint8_t)val;
6427 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6428 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6429 fwkSceneMode);
6430 }
6431 }
6432
6433 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6434 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6435 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6436 }
6437
6438 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6439 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6440 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6441 }
6442
6443 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6444 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6445 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6446 }
6447
6448 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6449 CAM_INTF_META_EDGE_MODE, metadata) {
6450 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6451 }
6452
6453 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6454 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6455 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6456 }
6457
6458 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6459 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6460 }
6461
6462 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6463 if (0 <= *flashState) {
6464 uint8_t fwk_flashState = (uint8_t) *flashState;
6465 if (!gCamCapability[mCameraId]->flash_available) {
6466 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6467 }
6468 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6469 }
6470 }
6471
6472 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6473 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6474 if (NAME_NOT_FOUND != val) {
6475 uint8_t fwk_flashMode = (uint8_t)val;
6476 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6477 }
6478 }
6479
6480 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6481 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6482 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6483 }
6484
6485 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6486 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6487 }
6488
6489 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6490 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6491 }
6492
6493 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6494 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6495 }
6496
6497 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6498 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6499 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6500 }
6501
6502 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6503 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6504 LOGD("fwk_videoStab = %d", fwk_videoStab);
6505 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6506 } else {
6507 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6508 // and so hardcoding the Video Stab result to OFF mode.
6509 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6510 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006511 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006512 }
6513
6514 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6515 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6516 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6517 }
6518
6519 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6520 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6521 }
6522
Thierry Strudel3d639192016-09-09 11:52:26 -07006523 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6524 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006525 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006526
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006527 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6528 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006529
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006530 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006531 blackLevelAppliedPattern->cam_black_level[0],
6532 blackLevelAppliedPattern->cam_black_level[1],
6533 blackLevelAppliedPattern->cam_black_level[2],
6534 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006535 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6536 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006537
6538#ifndef USE_HAL_3_3
6539 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006540 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6541 // depth space.
6542 fwk_blackLevelInd[0] /= 4.0;
6543 fwk_blackLevelInd[1] /= 4.0;
6544 fwk_blackLevelInd[2] /= 4.0;
6545 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006546 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6547 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006548#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006549 }
6550
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006551#ifndef USE_HAL_3_3
6552 // Fixed whitelevel is used by ISP/Sensor
6553 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6554 &gCamCapability[mCameraId]->white_level, 1);
6555#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006556
6557 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6558 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6559 int32_t scalerCropRegion[4];
6560 scalerCropRegion[0] = hScalerCropRegion->left;
6561 scalerCropRegion[1] = hScalerCropRegion->top;
6562 scalerCropRegion[2] = hScalerCropRegion->width;
6563 scalerCropRegion[3] = hScalerCropRegion->height;
6564
6565 // Adjust crop region from sensor output coordinate system to active
6566 // array coordinate system.
6567 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6568 scalerCropRegion[2], scalerCropRegion[3]);
6569
6570 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6571 }
6572
6573 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6574 LOGD("sensorExpTime = %lld", *sensorExpTime);
6575 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6576 }
6577
6578 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6579 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6580 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6581 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6582 }
6583
6584 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6585 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6586 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6587 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6588 sensorRollingShutterSkew, 1);
6589 }
6590
6591 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6592 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6593 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6594
6595 //calculate the noise profile based on sensitivity
6596 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6597 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6598 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6599 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6600 noise_profile[i] = noise_profile_S;
6601 noise_profile[i+1] = noise_profile_O;
6602 }
6603 LOGD("noise model entry (S, O) is (%f, %f)",
6604 noise_profile_S, noise_profile_O);
6605 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6606 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6607 }
6608
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006609#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006610 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006611 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006612 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006613 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006614 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6615 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6616 }
6617 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006618#endif
6619
Thierry Strudel3d639192016-09-09 11:52:26 -07006620 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6621 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6622 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6623 }
6624
6625 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6626 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6627 *faceDetectMode);
6628 if (NAME_NOT_FOUND != val) {
6629 uint8_t fwk_faceDetectMode = (uint8_t)val;
6630 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6631
6632 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6633 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6634 CAM_INTF_META_FACE_DETECTION, metadata) {
6635 uint8_t numFaces = MIN(
6636 faceDetectionInfo->num_faces_detected, MAX_ROI);
6637 int32_t faceIds[MAX_ROI];
6638 uint8_t faceScores[MAX_ROI];
6639 int32_t faceRectangles[MAX_ROI * 4];
6640 int32_t faceLandmarks[MAX_ROI * 6];
6641 size_t j = 0, k = 0;
6642
6643 for (size_t i = 0; i < numFaces; i++) {
6644 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6645 // Adjust crop region from sensor output coordinate system to active
6646 // array coordinate system.
6647 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6648 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6649 rect.width, rect.height);
6650
6651 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6652 faceRectangles+j, -1);
6653
6654 j+= 4;
6655 }
6656 if (numFaces <= 0) {
6657 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6658 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6659 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6660 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6661 }
6662
6663 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6664 numFaces);
6665 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6666 faceRectangles, numFaces * 4U);
6667 if (fwk_faceDetectMode ==
6668 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6669 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6670 CAM_INTF_META_FACE_LANDMARK, metadata) {
6671
6672 for (size_t i = 0; i < numFaces; i++) {
6673 // Map the co-ordinate sensor output coordinate system to active
6674 // array coordinate system.
6675 mCropRegionMapper.toActiveArray(
6676 landmarks->face_landmarks[i].left_eye_center.x,
6677 landmarks->face_landmarks[i].left_eye_center.y);
6678 mCropRegionMapper.toActiveArray(
6679 landmarks->face_landmarks[i].right_eye_center.x,
6680 landmarks->face_landmarks[i].right_eye_center.y);
6681 mCropRegionMapper.toActiveArray(
6682 landmarks->face_landmarks[i].mouth_center.x,
6683 landmarks->face_landmarks[i].mouth_center.y);
6684
6685 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006686 k+= TOTAL_LANDMARK_INDICES;
6687 }
6688 } else {
6689 for (size_t i = 0; i < numFaces; i++) {
6690 setInvalidLandmarks(faceLandmarks+k);
6691 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006692 }
6693 }
6694
6695 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6696 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6697 faceLandmarks, numFaces * 6U);
6698 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006699 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6700 CAM_INTF_META_FACE_BLINK, metadata) {
6701 uint8_t detected[MAX_ROI];
6702 uint8_t degree[MAX_ROI * 2];
6703 for (size_t i = 0; i < numFaces; i++) {
6704 detected[i] = blinks->blink[i].blink_detected;
6705 degree[2 * i] = blinks->blink[i].left_blink;
6706 degree[2 * i + 1] = blinks->blink[i].right_blink;
6707 }
6708 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6709 detected, numFaces);
6710 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6711 degree, numFaces * 2);
6712 }
6713 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6714 CAM_INTF_META_FACE_SMILE, metadata) {
6715 uint8_t degree[MAX_ROI];
6716 uint8_t confidence[MAX_ROI];
6717 for (size_t i = 0; i < numFaces; i++) {
6718 degree[i] = smiles->smile[i].smile_degree;
6719 confidence[i] = smiles->smile[i].smile_confidence;
6720 }
6721 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6722 degree, numFaces);
6723 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6724 confidence, numFaces);
6725 }
6726 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6727 CAM_INTF_META_FACE_GAZE, metadata) {
6728 int8_t angle[MAX_ROI];
6729 int32_t direction[MAX_ROI * 3];
6730 int8_t degree[MAX_ROI * 2];
6731 for (size_t i = 0; i < numFaces; i++) {
6732 angle[i] = gazes->gaze[i].gaze_angle;
6733 direction[3 * i] = gazes->gaze[i].updown_dir;
6734 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6735 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6736 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6737 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6738 }
6739 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6740 (uint8_t *)angle, numFaces);
6741 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6742 direction, numFaces * 3);
6743 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6744 (uint8_t *)degree, numFaces * 2);
6745 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006746 }
6747 }
6748 }
6749 }
6750
6751 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6752 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006753 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006754
Thierry Strudel54dc9782017-02-15 12:12:10 -08006755 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006756 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6757 // process histogram statistics info
Thierry Strudel54dc9782017-02-15 12:12:10 -08006758 uint32_t hist_buf[4][CAM_HISTOGRAM_STATS_SIZE];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006759 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006760 cam_histogram_data_t rHistData, grHistData, gbHistData, bHistData;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006761 memset(&rHistData, 0, sizeof(rHistData));
Thierry Strudel54dc9782017-02-15 12:12:10 -08006762 memset(&grHistData, 0, sizeof(grHistData));
6763 memset(&gbHistData, 0, sizeof(gbHistData));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006764 memset(&bHistData, 0, sizeof(bHistData));
6765
6766 switch (stats_data->type) {
6767 case CAM_HISTOGRAM_TYPE_BAYER:
6768 switch (stats_data->bayer_stats.data_type) {
6769 case CAM_STATS_CHANNEL_GR:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006770 rHistData = grHistData = gbHistData = bHistData =
6771 stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006772 break;
6773 case CAM_STATS_CHANNEL_GB:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006774 rHistData = grHistData = gbHistData = bHistData =
6775 stats_data->bayer_stats.gb_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006776 break;
6777 case CAM_STATS_CHANNEL_B:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006778 rHistData = grHistData = gbHistData = bHistData =
6779 stats_data->bayer_stats.b_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006780 break;
6781 case CAM_STATS_CHANNEL_ALL:
6782 rHistData = stats_data->bayer_stats.r_stats;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006783 gbHistData = stats_data->bayer_stats.gb_stats;
6784 grHistData = stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006785 bHistData = stats_data->bayer_stats.b_stats;
6786 break;
6787 case CAM_STATS_CHANNEL_Y:
6788 case CAM_STATS_CHANNEL_R:
6789 default:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006790 rHistData = grHistData = gbHistData = bHistData =
6791 stats_data->bayer_stats.r_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006792 break;
6793 }
6794 break;
6795 case CAM_HISTOGRAM_TYPE_YUV:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006796 rHistData = grHistData = gbHistData = bHistData =
6797 stats_data->yuv_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006798 break;
6799 }
6800
6801 memcpy(hist_buf, rHistData.hist_buf, hist_size);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006802 memcpy(hist_buf[1], gbHistData.hist_buf, hist_size);
6803 memcpy(hist_buf[2], grHistData.hist_buf, hist_size);
6804 memcpy(hist_buf[3], bHistData.hist_buf, hist_size);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006805
Thierry Strudel54dc9782017-02-15 12:12:10 -08006806 camMetadata.update(QCAMERA3_HISTOGRAM_STATS, (int32_t*)hist_buf, hist_size*4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006807 }
6808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006809 }
6810
6811 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6812 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6813 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6814 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6815 }
6816
6817 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6818 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6819 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6820 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6821 }
6822
6823 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6824 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6825 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6826 CAM_MAX_SHADING_MAP_HEIGHT);
6827 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6828 CAM_MAX_SHADING_MAP_WIDTH);
6829 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6830 lensShadingMap->lens_shading, 4U * map_width * map_height);
6831 }
6832
6833 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6834 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6835 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6836 }
6837
6838 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6839 //Populate CAM_INTF_META_TONEMAP_CURVES
6840 /* ch0 = G, ch 1 = B, ch 2 = R*/
6841 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6842 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6843 tonemap->tonemap_points_cnt,
6844 CAM_MAX_TONEMAP_CURVE_SIZE);
6845 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6846 }
6847
6848 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6849 &tonemap->curves[0].tonemap_points[0][0],
6850 tonemap->tonemap_points_cnt * 2);
6851
6852 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6853 &tonemap->curves[1].tonemap_points[0][0],
6854 tonemap->tonemap_points_cnt * 2);
6855
6856 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6857 &tonemap->curves[2].tonemap_points[0][0],
6858 tonemap->tonemap_points_cnt * 2);
6859 }
6860
6861 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6862 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6863 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6864 CC_GAIN_MAX);
6865 }
6866
6867 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6868 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6869 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6870 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6871 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6872 }
6873
6874 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6875 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6876 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6877 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6878 toneCurve->tonemap_points_cnt,
6879 CAM_MAX_TONEMAP_CURVE_SIZE);
6880 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6881 }
6882 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6883 (float*)toneCurve->curve.tonemap_points,
6884 toneCurve->tonemap_points_cnt * 2);
6885 }
6886
6887 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6888 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6889 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6890 predColorCorrectionGains->gains, 4);
6891 }
6892
6893 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6894 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6895 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6896 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6897 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6898 }
6899
6900 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6901 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6902 }
6903
6904 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6905 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6906 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6907 }
6908
6909 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6910 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6911 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6912 }
6913
6914 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6915 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6916 *effectMode);
6917 if (NAME_NOT_FOUND != val) {
6918 uint8_t fwk_effectMode = (uint8_t)val;
6919 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6920 }
6921 }
6922
6923 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6924 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6925 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6926 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6927 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6928 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6929 }
6930 int32_t fwk_testPatternData[4];
6931 fwk_testPatternData[0] = testPatternData->r;
6932 fwk_testPatternData[3] = testPatternData->b;
6933 switch (gCamCapability[mCameraId]->color_arrangement) {
6934 case CAM_FILTER_ARRANGEMENT_RGGB:
6935 case CAM_FILTER_ARRANGEMENT_GRBG:
6936 fwk_testPatternData[1] = testPatternData->gr;
6937 fwk_testPatternData[2] = testPatternData->gb;
6938 break;
6939 case CAM_FILTER_ARRANGEMENT_GBRG:
6940 case CAM_FILTER_ARRANGEMENT_BGGR:
6941 fwk_testPatternData[2] = testPatternData->gr;
6942 fwk_testPatternData[1] = testPatternData->gb;
6943 break;
6944 default:
6945 LOGE("color arrangement %d is not supported",
6946 gCamCapability[mCameraId]->color_arrangement);
6947 break;
6948 }
6949 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6950 }
6951
6952 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6953 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6954 }
6955
6956 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6957 String8 str((const char *)gps_methods);
6958 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6959 }
6960
6961 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6962 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6963 }
6964
6965 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6966 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6967 }
6968
6969 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6970 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6971 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6972 }
6973
6974 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6975 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6976 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6977 }
6978
6979 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6980 int32_t fwk_thumb_size[2];
6981 fwk_thumb_size[0] = thumb_size->width;
6982 fwk_thumb_size[1] = thumb_size->height;
6983 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6984 }
6985
6986 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6987 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6988 privateData,
6989 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6990 }
6991
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006992 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08006993 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006994 meteringMode, 1);
6995 }
6996
Thierry Strudel54dc9782017-02-15 12:12:10 -08006997 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
6998 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
6999 LOGD("hdr_scene_data: %d %f\n",
7000 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7001 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7002 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7003 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7004 &isHdr, 1);
7005 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7006 &isHdrConfidence, 1);
7007 }
7008
7009
7010
Thierry Strudel3d639192016-09-09 11:52:26 -07007011 if (metadata->is_tuning_params_valid) {
7012 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7013 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7014 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7015
7016
7017 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7018 sizeof(uint32_t));
7019 data += sizeof(uint32_t);
7020
7021 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7022 sizeof(uint32_t));
7023 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7024 data += sizeof(uint32_t);
7025
7026 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7027 sizeof(uint32_t));
7028 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7029 data += sizeof(uint32_t);
7030
7031 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7032 sizeof(uint32_t));
7033 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7034 data += sizeof(uint32_t);
7035
7036 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7037 sizeof(uint32_t));
7038 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7039 data += sizeof(uint32_t);
7040
7041 metadata->tuning_params.tuning_mod3_data_size = 0;
7042 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7043 sizeof(uint32_t));
7044 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7045 data += sizeof(uint32_t);
7046
7047 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7048 TUNING_SENSOR_DATA_MAX);
7049 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7050 count);
7051 data += count;
7052
7053 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7054 TUNING_VFE_DATA_MAX);
7055 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7056 count);
7057 data += count;
7058
7059 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7060 TUNING_CPP_DATA_MAX);
7061 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7062 count);
7063 data += count;
7064
7065 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7066 TUNING_CAC_DATA_MAX);
7067 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7068 count);
7069 data += count;
7070
7071 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7072 (int32_t *)(void *)tuning_meta_data_blob,
7073 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7074 }
7075
7076 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7077 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7078 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7079 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7080 NEUTRAL_COL_POINTS);
7081 }
7082
7083 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7084 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7085 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7086 }
7087
7088 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7089 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7090 // Adjust crop region from sensor output coordinate system to active
7091 // array coordinate system.
7092 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7093 hAeRegions->rect.width, hAeRegions->rect.height);
7094
7095 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7096 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7097 REGIONS_TUPLE_COUNT);
7098 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7099 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7100 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7101 hAeRegions->rect.height);
7102 }
7103
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007104 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7105 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7106 if (NAME_NOT_FOUND != val) {
7107 uint8_t fwkAfMode = (uint8_t)val;
7108 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7109 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7110 } else {
7111 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7112 val);
7113 }
7114 }
7115
Thierry Strudel3d639192016-09-09 11:52:26 -07007116 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7117 uint8_t fwk_afState = (uint8_t) *afState;
7118 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007119 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007120 }
7121
7122 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7123 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7124 }
7125
7126 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7127 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7128 }
7129
7130 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7131 uint8_t fwk_lensState = *lensState;
7132 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7133 }
7134
7135 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7136 /*af regions*/
7137 int32_t afRegions[REGIONS_TUPLE_COUNT];
7138 // Adjust crop region from sensor output coordinate system to active
7139 // array coordinate system.
7140 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7141 hAfRegions->rect.width, hAfRegions->rect.height);
7142
7143 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7144 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7145 REGIONS_TUPLE_COUNT);
7146 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7147 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7148 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7149 hAfRegions->rect.height);
7150 }
7151
7152 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007153 uint32_t ab_mode = *hal_ab_mode;
7154 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7155 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7156 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007158 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007159 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007160 if (NAME_NOT_FOUND != val) {
7161 uint8_t fwk_ab_mode = (uint8_t)val;
7162 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7163 }
7164 }
7165
7166 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7167 int val = lookupFwkName(SCENE_MODES_MAP,
7168 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7169 if (NAME_NOT_FOUND != val) {
7170 uint8_t fwkBestshotMode = (uint8_t)val;
7171 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7172 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7173 } else {
7174 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7175 }
7176 }
7177
7178 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7179 uint8_t fwk_mode = (uint8_t) *mode;
7180 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7181 }
7182
7183 /* Constant metadata values to be update*/
7184 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7185 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7186
7187 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7188 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7189
7190 int32_t hotPixelMap[2];
7191 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7192
7193 // CDS
7194 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7195 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7196 }
7197
Thierry Strudel04e026f2016-10-10 11:27:36 -07007198 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7199 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007200 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007201 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7202 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7203 } else {
7204 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7205 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007206
7207 if(fwk_hdr != curr_hdr_state) {
7208 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7209 if(fwk_hdr)
7210 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7211 else
7212 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7213 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007214 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7215 }
7216
Thierry Strudel54dc9782017-02-15 12:12:10 -08007217 //binning correction
7218 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7219 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7220 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7221 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7222 }
7223
Thierry Strudel04e026f2016-10-10 11:27:36 -07007224 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007225 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007226 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7227 int8_t is_ir_on = 0;
7228
7229 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7230 if(is_ir_on != curr_ir_state) {
7231 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7232 if(is_ir_on)
7233 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7234 else
7235 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7236 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007237 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007238 }
7239
Thierry Strudel269c81a2016-10-12 12:13:59 -07007240 // AEC SPEED
7241 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7242 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7243 }
7244
7245 // AWB SPEED
7246 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7247 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7248 }
7249
Thierry Strudel3d639192016-09-09 11:52:26 -07007250 // TNR
7251 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7252 uint8_t tnr_enable = tnr->denoise_enable;
7253 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007254 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7255 int8_t is_tnr_on = 0;
7256
7257 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7258 if(is_tnr_on != curr_tnr_state) {
7259 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7260 if(is_tnr_on)
7261 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7262 else
7263 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007265
7266 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7267 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7268 }
7269
7270 // Reprocess crop data
7271 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7272 uint8_t cnt = crop_data->num_of_streams;
7273 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7274 // mm-qcamera-daemon only posts crop_data for streams
7275 // not linked to pproc. So no valid crop metadata is not
7276 // necessarily an error case.
7277 LOGD("No valid crop metadata entries");
7278 } else {
7279 uint32_t reproc_stream_id;
7280 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7281 LOGD("No reprocessible stream found, ignore crop data");
7282 } else {
7283 int rc = NO_ERROR;
7284 Vector<int32_t> roi_map;
7285 int32_t *crop = new int32_t[cnt*4];
7286 if (NULL == crop) {
7287 rc = NO_MEMORY;
7288 }
7289 if (NO_ERROR == rc) {
7290 int32_t streams_found = 0;
7291 for (size_t i = 0; i < cnt; i++) {
7292 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7293 if (pprocDone) {
7294 // HAL already does internal reprocessing,
7295 // either via reprocessing before JPEG encoding,
7296 // or offline postprocessing for pproc bypass case.
7297 crop[0] = 0;
7298 crop[1] = 0;
7299 crop[2] = mInputStreamInfo.dim.width;
7300 crop[3] = mInputStreamInfo.dim.height;
7301 } else {
7302 crop[0] = crop_data->crop_info[i].crop.left;
7303 crop[1] = crop_data->crop_info[i].crop.top;
7304 crop[2] = crop_data->crop_info[i].crop.width;
7305 crop[3] = crop_data->crop_info[i].crop.height;
7306 }
7307 roi_map.add(crop_data->crop_info[i].roi_map.left);
7308 roi_map.add(crop_data->crop_info[i].roi_map.top);
7309 roi_map.add(crop_data->crop_info[i].roi_map.width);
7310 roi_map.add(crop_data->crop_info[i].roi_map.height);
7311 streams_found++;
7312 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7313 crop[0], crop[1], crop[2], crop[3]);
7314 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7315 crop_data->crop_info[i].roi_map.left,
7316 crop_data->crop_info[i].roi_map.top,
7317 crop_data->crop_info[i].roi_map.width,
7318 crop_data->crop_info[i].roi_map.height);
7319 break;
7320
7321 }
7322 }
7323 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7324 &streams_found, 1);
7325 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7326 crop, (size_t)(streams_found * 4));
7327 if (roi_map.array()) {
7328 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7329 roi_map.array(), roi_map.size());
7330 }
7331 }
7332 if (crop) {
7333 delete [] crop;
7334 }
7335 }
7336 }
7337 }
7338
7339 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7340 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7341 // so hardcoding the CAC result to OFF mode.
7342 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7343 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7344 } else {
7345 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7346 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7347 *cacMode);
7348 if (NAME_NOT_FOUND != val) {
7349 uint8_t resultCacMode = (uint8_t)val;
7350 // check whether CAC result from CB is equal to Framework set CAC mode
7351 // If not equal then set the CAC mode came in corresponding request
7352 if (fwk_cacMode != resultCacMode) {
7353 resultCacMode = fwk_cacMode;
7354 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007355 //Check if CAC is disabled by property
7356 if (m_cacModeDisabled) {
7357 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7358 }
7359
Thierry Strudel3d639192016-09-09 11:52:26 -07007360 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7361 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7362 } else {
7363 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7364 }
7365 }
7366 }
7367
7368 // Post blob of cam_cds_data through vendor tag.
7369 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7370 uint8_t cnt = cdsInfo->num_of_streams;
7371 cam_cds_data_t cdsDataOverride;
7372 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7373 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7374 cdsDataOverride.num_of_streams = 1;
7375 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7376 uint32_t reproc_stream_id;
7377 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7378 LOGD("No reprocessible stream found, ignore cds data");
7379 } else {
7380 for (size_t i = 0; i < cnt; i++) {
7381 if (cdsInfo->cds_info[i].stream_id ==
7382 reproc_stream_id) {
7383 cdsDataOverride.cds_info[0].cds_enable =
7384 cdsInfo->cds_info[i].cds_enable;
7385 break;
7386 }
7387 }
7388 }
7389 } else {
7390 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7391 }
7392 camMetadata.update(QCAMERA3_CDS_INFO,
7393 (uint8_t *)&cdsDataOverride,
7394 sizeof(cam_cds_data_t));
7395 }
7396
7397 // Ldaf calibration data
7398 if (!mLdafCalibExist) {
7399 IF_META_AVAILABLE(uint32_t, ldafCalib,
7400 CAM_INTF_META_LDAF_EXIF, metadata) {
7401 mLdafCalibExist = true;
7402 mLdafCalib[0] = ldafCalib[0];
7403 mLdafCalib[1] = ldafCalib[1];
7404 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7405 ldafCalib[0], ldafCalib[1]);
7406 }
7407 }
7408
Thierry Strudel54dc9782017-02-15 12:12:10 -08007409 // EXIF debug data through vendor tag
7410 /*
7411 * Mobicat Mask can assume 3 values:
7412 * 1 refers to Mobicat data,
7413 * 2 refers to Stats Debug and Exif Debug Data
7414 * 3 refers to Mobicat and Stats Debug Data
7415 * We want to make sure that we are sending Exif debug data
7416 * only when Mobicat Mask is 2.
7417 */
7418 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7419 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7420 (uint8_t *)(void *)mExifParams.debug_params,
7421 sizeof(mm_jpeg_debug_exif_params_t));
7422 }
7423
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007424 // Reprocess and DDM debug data through vendor tag
7425 cam_reprocess_info_t repro_info;
7426 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007427 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7428 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007429 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007430 }
7431 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7432 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007433 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007434 }
7435 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7436 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007437 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007438 }
7439 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7440 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007441 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007442 }
7443 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7444 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007445 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007446 }
7447 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007448 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007449 }
7450 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7451 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007452 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007453 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007454 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7455 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7456 }
7457 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7458 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7459 }
7460 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7461 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007462
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007463 // INSTANT AEC MODE
7464 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7465 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7466 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7467 }
7468
Shuzhen Wange763e802016-03-31 10:24:29 -07007469 // AF scene change
7470 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7471 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7472 }
7473
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007474 /* In batch mode, cache the first metadata in the batch */
7475 if (mBatchSize && firstMetadataInBatch) {
7476 mCachedMetadata.clear();
7477 mCachedMetadata = camMetadata;
7478 }
7479
Thierry Strudel3d639192016-09-09 11:52:26 -07007480 resultMetadata = camMetadata.release();
7481 return resultMetadata;
7482}
7483
7484/*===========================================================================
7485 * FUNCTION : saveExifParams
7486 *
7487 * DESCRIPTION:
7488 *
7489 * PARAMETERS :
7490 * @metadata : metadata information from callback
7491 *
7492 * RETURN : none
7493 *
7494 *==========================================================================*/
7495void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7496{
7497 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7498 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7499 if (mExifParams.debug_params) {
7500 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7501 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7502 }
7503 }
7504 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7505 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7506 if (mExifParams.debug_params) {
7507 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7508 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7509 }
7510 }
7511 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7512 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7513 if (mExifParams.debug_params) {
7514 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7515 mExifParams.debug_params->af_debug_params_valid = TRUE;
7516 }
7517 }
7518 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7519 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7520 if (mExifParams.debug_params) {
7521 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7522 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7523 }
7524 }
7525 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7526 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7527 if (mExifParams.debug_params) {
7528 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7529 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7530 }
7531 }
7532 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7533 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7534 if (mExifParams.debug_params) {
7535 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7536 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7537 }
7538 }
7539 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7540 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7541 if (mExifParams.debug_params) {
7542 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7543 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7544 }
7545 }
7546 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7547 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7548 if (mExifParams.debug_params) {
7549 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7550 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7551 }
7552 }
7553}
7554
7555/*===========================================================================
7556 * FUNCTION : get3AExifParams
7557 *
7558 * DESCRIPTION:
7559 *
7560 * PARAMETERS : none
7561 *
7562 *
7563 * RETURN : mm_jpeg_exif_params_t
7564 *
7565 *==========================================================================*/
7566mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7567{
7568 return mExifParams;
7569}
7570
7571/*===========================================================================
7572 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7573 *
7574 * DESCRIPTION:
7575 *
7576 * PARAMETERS :
7577 * @metadata : metadata information from callback
7578 *
7579 * RETURN : camera_metadata_t*
7580 * metadata in a format specified by fwk
7581 *==========================================================================*/
7582camera_metadata_t*
7583QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7584 (metadata_buffer_t *metadata)
7585{
7586 CameraMetadata camMetadata;
7587 camera_metadata_t *resultMetadata;
7588
7589
7590 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7591 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7592 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7593 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7594 }
7595
7596 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7597 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7598 &aecTrigger->trigger, 1);
7599 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7600 &aecTrigger->trigger_id, 1);
7601 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7602 aecTrigger->trigger);
7603 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7604 aecTrigger->trigger_id);
7605 }
7606
7607 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7608 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7609 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7610 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7611 }
7612
Thierry Strudel3d639192016-09-09 11:52:26 -07007613 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7614 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7615 &af_trigger->trigger, 1);
7616 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7617 af_trigger->trigger);
7618 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7619 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7620 af_trigger->trigger_id);
7621 }
7622
7623 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7624 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7625 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7626 if (NAME_NOT_FOUND != val) {
7627 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7628 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7629 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7630 } else {
7631 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7632 }
7633 }
7634
7635 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7636 uint32_t aeMode = CAM_AE_MODE_MAX;
7637 int32_t flashMode = CAM_FLASH_MODE_MAX;
7638 int32_t redeye = -1;
7639 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7640 aeMode = *pAeMode;
7641 }
7642 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7643 flashMode = *pFlashMode;
7644 }
7645 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7646 redeye = *pRedeye;
7647 }
7648
7649 if (1 == redeye) {
7650 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7651 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7652 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7653 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7654 flashMode);
7655 if (NAME_NOT_FOUND != val) {
7656 fwk_aeMode = (uint8_t)val;
7657 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7658 } else {
7659 LOGE("Unsupported flash mode %d", flashMode);
7660 }
7661 } else if (aeMode == CAM_AE_MODE_ON) {
7662 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7663 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7664 } else if (aeMode == CAM_AE_MODE_OFF) {
7665 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7666 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7667 } else {
7668 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7669 "flashMode:%d, aeMode:%u!!!",
7670 redeye, flashMode, aeMode);
7671 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007672 if (mInstantAEC) {
7673 // Increment frame Idx count untill a bound reached for instant AEC.
7674 mInstantAecFrameIdxCount++;
7675 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7676 CAM_INTF_META_AEC_INFO, metadata) {
7677 LOGH("ae_params->settled = %d",ae_params->settled);
7678 // If AEC settled, or if number of frames reached bound value,
7679 // should reset instant AEC.
7680 if (ae_params->settled ||
7681 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7682 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7683 mInstantAEC = false;
7684 mResetInstantAEC = true;
7685 mInstantAecFrameIdxCount = 0;
7686 }
7687 }
7688 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007689 resultMetadata = camMetadata.release();
7690 return resultMetadata;
7691}
7692
7693/*===========================================================================
7694 * FUNCTION : dumpMetadataToFile
7695 *
7696 * DESCRIPTION: Dumps tuning metadata to file system
7697 *
7698 * PARAMETERS :
7699 * @meta : tuning metadata
7700 * @dumpFrameCount : current dump frame count
7701 * @enabled : Enable mask
7702 *
7703 *==========================================================================*/
7704void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7705 uint32_t &dumpFrameCount,
7706 bool enabled,
7707 const char *type,
7708 uint32_t frameNumber)
7709{
7710 //Some sanity checks
7711 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7712 LOGE("Tuning sensor data size bigger than expected %d: %d",
7713 meta.tuning_sensor_data_size,
7714 TUNING_SENSOR_DATA_MAX);
7715 return;
7716 }
7717
7718 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7719 LOGE("Tuning VFE data size bigger than expected %d: %d",
7720 meta.tuning_vfe_data_size,
7721 TUNING_VFE_DATA_MAX);
7722 return;
7723 }
7724
7725 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7726 LOGE("Tuning CPP data size bigger than expected %d: %d",
7727 meta.tuning_cpp_data_size,
7728 TUNING_CPP_DATA_MAX);
7729 return;
7730 }
7731
7732 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7733 LOGE("Tuning CAC data size bigger than expected %d: %d",
7734 meta.tuning_cac_data_size,
7735 TUNING_CAC_DATA_MAX);
7736 return;
7737 }
7738 //
7739
7740 if(enabled){
7741 char timeBuf[FILENAME_MAX];
7742 char buf[FILENAME_MAX];
7743 memset(buf, 0, sizeof(buf));
7744 memset(timeBuf, 0, sizeof(timeBuf));
7745 time_t current_time;
7746 struct tm * timeinfo;
7747 time (&current_time);
7748 timeinfo = localtime (&current_time);
7749 if (timeinfo != NULL) {
7750 strftime (timeBuf, sizeof(timeBuf),
7751 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7752 }
7753 String8 filePath(timeBuf);
7754 snprintf(buf,
7755 sizeof(buf),
7756 "%dm_%s_%d.bin",
7757 dumpFrameCount,
7758 type,
7759 frameNumber);
7760 filePath.append(buf);
7761 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7762 if (file_fd >= 0) {
7763 ssize_t written_len = 0;
7764 meta.tuning_data_version = TUNING_DATA_VERSION;
7765 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7766 written_len += write(file_fd, data, sizeof(uint32_t));
7767 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7768 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7769 written_len += write(file_fd, data, sizeof(uint32_t));
7770 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7771 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7772 written_len += write(file_fd, data, sizeof(uint32_t));
7773 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7774 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7775 written_len += write(file_fd, data, sizeof(uint32_t));
7776 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7777 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7778 written_len += write(file_fd, data, sizeof(uint32_t));
7779 meta.tuning_mod3_data_size = 0;
7780 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7781 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7782 written_len += write(file_fd, data, sizeof(uint32_t));
7783 size_t total_size = meta.tuning_sensor_data_size;
7784 data = (void *)((uint8_t *)&meta.data);
7785 written_len += write(file_fd, data, total_size);
7786 total_size = meta.tuning_vfe_data_size;
7787 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7788 written_len += write(file_fd, data, total_size);
7789 total_size = meta.tuning_cpp_data_size;
7790 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7791 written_len += write(file_fd, data, total_size);
7792 total_size = meta.tuning_cac_data_size;
7793 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7794 written_len += write(file_fd, data, total_size);
7795 close(file_fd);
7796 }else {
7797 LOGE("fail to open file for metadata dumping");
7798 }
7799 }
7800}
7801
7802/*===========================================================================
7803 * FUNCTION : cleanAndSortStreamInfo
7804 *
7805 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7806 * and sort them such that raw stream is at the end of the list
7807 * This is a workaround for camera daemon constraint.
7808 *
7809 * PARAMETERS : None
7810 *
7811 *==========================================================================*/
7812void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7813{
7814 List<stream_info_t *> newStreamInfo;
7815
7816 /*clean up invalid streams*/
7817 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7818 it != mStreamInfo.end();) {
7819 if(((*it)->status) == INVALID){
7820 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7821 delete channel;
7822 free(*it);
7823 it = mStreamInfo.erase(it);
7824 } else {
7825 it++;
7826 }
7827 }
7828
7829 // Move preview/video/callback/snapshot streams into newList
7830 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7831 it != mStreamInfo.end();) {
7832 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7833 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7834 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7835 newStreamInfo.push_back(*it);
7836 it = mStreamInfo.erase(it);
7837 } else
7838 it++;
7839 }
7840 // Move raw streams into newList
7841 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7842 it != mStreamInfo.end();) {
7843 newStreamInfo.push_back(*it);
7844 it = mStreamInfo.erase(it);
7845 }
7846
7847 mStreamInfo = newStreamInfo;
7848}
7849
7850/*===========================================================================
7851 * FUNCTION : extractJpegMetadata
7852 *
7853 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7854 * JPEG metadata is cached in HAL, and return as part of capture
7855 * result when metadata is returned from camera daemon.
7856 *
7857 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7858 * @request: capture request
7859 *
7860 *==========================================================================*/
7861void QCamera3HardwareInterface::extractJpegMetadata(
7862 CameraMetadata& jpegMetadata,
7863 const camera3_capture_request_t *request)
7864{
7865 CameraMetadata frame_settings;
7866 frame_settings = request->settings;
7867
7868 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7869 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7870 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7871 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7872
7873 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7874 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7875 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7876 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7877
7878 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7879 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7880 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7881 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7882
7883 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7884 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7885 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7886 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7887
7888 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7889 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7890 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7891 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7892
7893 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7894 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7895 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7896 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7897
7898 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7899 int32_t thumbnail_size[2];
7900 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7901 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7902 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7903 int32_t orientation =
7904 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007905 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007906 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7907 int32_t temp;
7908 temp = thumbnail_size[0];
7909 thumbnail_size[0] = thumbnail_size[1];
7910 thumbnail_size[1] = temp;
7911 }
7912 }
7913 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7914 thumbnail_size,
7915 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7916 }
7917
7918}
7919
7920/*===========================================================================
7921 * FUNCTION : convertToRegions
7922 *
7923 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7924 *
7925 * PARAMETERS :
7926 * @rect : cam_rect_t struct to convert
7927 * @region : int32_t destination array
7928 * @weight : if we are converting from cam_area_t, weight is valid
7929 * else weight = -1
7930 *
7931 *==========================================================================*/
7932void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7933 int32_t *region, int weight)
7934{
7935 region[0] = rect.left;
7936 region[1] = rect.top;
7937 region[2] = rect.left + rect.width;
7938 region[3] = rect.top + rect.height;
7939 if (weight > -1) {
7940 region[4] = weight;
7941 }
7942}
7943
7944/*===========================================================================
7945 * FUNCTION : convertFromRegions
7946 *
7947 * DESCRIPTION: helper method to convert from array to cam_rect_t
7948 *
7949 * PARAMETERS :
7950 * @rect : cam_rect_t struct to convert
7951 * @region : int32_t destination array
7952 * @weight : if we are converting from cam_area_t, weight is valid
7953 * else weight = -1
7954 *
7955 *==========================================================================*/
7956void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08007957 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07007958{
Thierry Strudel3d639192016-09-09 11:52:26 -07007959 int32_t x_min = frame_settings.find(tag).data.i32[0];
7960 int32_t y_min = frame_settings.find(tag).data.i32[1];
7961 int32_t x_max = frame_settings.find(tag).data.i32[2];
7962 int32_t y_max = frame_settings.find(tag).data.i32[3];
7963 roi.weight = frame_settings.find(tag).data.i32[4];
7964 roi.rect.left = x_min;
7965 roi.rect.top = y_min;
7966 roi.rect.width = x_max - x_min;
7967 roi.rect.height = y_max - y_min;
7968}
7969
7970/*===========================================================================
7971 * FUNCTION : resetIfNeededROI
7972 *
7973 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7974 * crop region
7975 *
7976 * PARAMETERS :
7977 * @roi : cam_area_t struct to resize
7978 * @scalerCropRegion : cam_crop_region_t region to compare against
7979 *
7980 *
7981 *==========================================================================*/
7982bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7983 const cam_crop_region_t* scalerCropRegion)
7984{
7985 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7986 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7987 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7988 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7989
7990 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7991 * without having this check the calculations below to validate if the roi
7992 * is inside scalar crop region will fail resulting in the roi not being
7993 * reset causing algorithm to continue to use stale roi window
7994 */
7995 if (roi->weight == 0) {
7996 return true;
7997 }
7998
7999 if ((roi_x_max < scalerCropRegion->left) ||
8000 // right edge of roi window is left of scalar crop's left edge
8001 (roi_y_max < scalerCropRegion->top) ||
8002 // bottom edge of roi window is above scalar crop's top edge
8003 (roi->rect.left > crop_x_max) ||
8004 // left edge of roi window is beyond(right) of scalar crop's right edge
8005 (roi->rect.top > crop_y_max)){
8006 // top edge of roi windo is above scalar crop's top edge
8007 return false;
8008 }
8009 if (roi->rect.left < scalerCropRegion->left) {
8010 roi->rect.left = scalerCropRegion->left;
8011 }
8012 if (roi->rect.top < scalerCropRegion->top) {
8013 roi->rect.top = scalerCropRegion->top;
8014 }
8015 if (roi_x_max > crop_x_max) {
8016 roi_x_max = crop_x_max;
8017 }
8018 if (roi_y_max > crop_y_max) {
8019 roi_y_max = crop_y_max;
8020 }
8021 roi->rect.width = roi_x_max - roi->rect.left;
8022 roi->rect.height = roi_y_max - roi->rect.top;
8023 return true;
8024}
8025
8026/*===========================================================================
8027 * FUNCTION : convertLandmarks
8028 *
8029 * DESCRIPTION: helper method to extract the landmarks from face detection info
8030 *
8031 * PARAMETERS :
8032 * @landmark_data : input landmark data to be converted
8033 * @landmarks : int32_t destination array
8034 *
8035 *
8036 *==========================================================================*/
8037void QCamera3HardwareInterface::convertLandmarks(
8038 cam_face_landmarks_info_t landmark_data,
8039 int32_t *landmarks)
8040{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008041 if (landmark_data.is_left_eye_valid) {
8042 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8043 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8044 } else {
8045 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8046 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8047 }
8048
8049 if (landmark_data.is_right_eye_valid) {
8050 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8051 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8052 } else {
8053 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8054 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8055 }
8056
8057 if (landmark_data.is_mouth_valid) {
8058 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8059 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8060 } else {
8061 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8062 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8063 }
8064}
8065
8066/*===========================================================================
8067 * FUNCTION : setInvalidLandmarks
8068 *
8069 * DESCRIPTION: helper method to set invalid landmarks
8070 *
8071 * PARAMETERS :
8072 * @landmarks : int32_t destination array
8073 *
8074 *
8075 *==========================================================================*/
8076void QCamera3HardwareInterface::setInvalidLandmarks(
8077 int32_t *landmarks)
8078{
8079 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8080 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8081 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8082 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8083 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8084 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008085}
8086
8087#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008088
8089/*===========================================================================
8090 * FUNCTION : getCapabilities
8091 *
8092 * DESCRIPTION: query camera capability from back-end
8093 *
8094 * PARAMETERS :
8095 * @ops : mm-interface ops structure
8096 * @cam_handle : camera handle for which we need capability
8097 *
8098 * RETURN : ptr type of capability structure
8099 * capability for success
8100 * NULL for failure
8101 *==========================================================================*/
8102cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8103 uint32_t cam_handle)
8104{
8105 int rc = NO_ERROR;
8106 QCamera3HeapMemory *capabilityHeap = NULL;
8107 cam_capability_t *cap_ptr = NULL;
8108
8109 if (ops == NULL) {
8110 LOGE("Invalid arguments");
8111 return NULL;
8112 }
8113
8114 capabilityHeap = new QCamera3HeapMemory(1);
8115 if (capabilityHeap == NULL) {
8116 LOGE("creation of capabilityHeap failed");
8117 return NULL;
8118 }
8119
8120 /* Allocate memory for capability buffer */
8121 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8122 if(rc != OK) {
8123 LOGE("No memory for cappability");
8124 goto allocate_failed;
8125 }
8126
8127 /* Map memory for capability buffer */
8128 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8129
8130 rc = ops->map_buf(cam_handle,
8131 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8132 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8133 if(rc < 0) {
8134 LOGE("failed to map capability buffer");
8135 rc = FAILED_TRANSACTION;
8136 goto map_failed;
8137 }
8138
8139 /* Query Capability */
8140 rc = ops->query_capability(cam_handle);
8141 if(rc < 0) {
8142 LOGE("failed to query capability");
8143 rc = FAILED_TRANSACTION;
8144 goto query_failed;
8145 }
8146
8147 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8148 if (cap_ptr == NULL) {
8149 LOGE("out of memory");
8150 rc = NO_MEMORY;
8151 goto query_failed;
8152 }
8153
8154 memset(cap_ptr, 0, sizeof(cam_capability_t));
8155 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8156
8157 int index;
8158 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8159 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8160 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8161 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8162 }
8163
8164query_failed:
8165 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8166map_failed:
8167 capabilityHeap->deallocate();
8168allocate_failed:
8169 delete capabilityHeap;
8170
8171 if (rc != NO_ERROR) {
8172 return NULL;
8173 } else {
8174 return cap_ptr;
8175 }
8176}
8177
Thierry Strudel3d639192016-09-09 11:52:26 -07008178/*===========================================================================
8179 * FUNCTION : initCapabilities
8180 *
8181 * DESCRIPTION: initialize camera capabilities in static data struct
8182 *
8183 * PARAMETERS :
8184 * @cameraId : camera Id
8185 *
8186 * RETURN : int32_t type of status
8187 * NO_ERROR -- success
8188 * none-zero failure code
8189 *==========================================================================*/
8190int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8191{
8192 int rc = 0;
8193 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008194 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008195
8196 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8197 if (rc) {
8198 LOGE("camera_open failed. rc = %d", rc);
8199 goto open_failed;
8200 }
8201 if (!cameraHandle) {
8202 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8203 goto open_failed;
8204 }
8205
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008206 handle = get_main_camera_handle(cameraHandle->camera_handle);
8207 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8208 if (gCamCapability[cameraId] == NULL) {
8209 rc = FAILED_TRANSACTION;
8210 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008211 }
8212
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008213 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008214 if (is_dual_camera_by_idx(cameraId)) {
8215 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8216 gCamCapability[cameraId]->aux_cam_cap =
8217 getCapabilities(cameraHandle->ops, handle);
8218 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8219 rc = FAILED_TRANSACTION;
8220 free(gCamCapability[cameraId]);
8221 goto failed_op;
8222 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008223
8224 // Copy the main camera capability to main_cam_cap struct
8225 gCamCapability[cameraId]->main_cam_cap =
8226 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8227 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8228 LOGE("out of memory");
8229 rc = NO_MEMORY;
8230 goto failed_op;
8231 }
8232 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8233 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008234 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008235failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008236 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8237 cameraHandle = NULL;
8238open_failed:
8239 return rc;
8240}
8241
8242/*==========================================================================
8243 * FUNCTION : get3Aversion
8244 *
8245 * DESCRIPTION: get the Q3A S/W version
8246 *
8247 * PARAMETERS :
8248 * @sw_version: Reference of Q3A structure which will hold version info upon
8249 * return
8250 *
8251 * RETURN : None
8252 *
8253 *==========================================================================*/
8254void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8255{
8256 if(gCamCapability[mCameraId])
8257 sw_version = gCamCapability[mCameraId]->q3a_version;
8258 else
8259 LOGE("Capability structure NULL!");
8260}
8261
8262
8263/*===========================================================================
8264 * FUNCTION : initParameters
8265 *
8266 * DESCRIPTION: initialize camera parameters
8267 *
8268 * PARAMETERS :
8269 *
8270 * RETURN : int32_t type of status
8271 * NO_ERROR -- success
8272 * none-zero failure code
8273 *==========================================================================*/
8274int QCamera3HardwareInterface::initParameters()
8275{
8276 int rc = 0;
8277
8278 //Allocate Set Param Buffer
8279 mParamHeap = new QCamera3HeapMemory(1);
8280 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8281 if(rc != OK) {
8282 rc = NO_MEMORY;
8283 LOGE("Failed to allocate SETPARM Heap memory");
8284 delete mParamHeap;
8285 mParamHeap = NULL;
8286 return rc;
8287 }
8288
8289 //Map memory for parameters buffer
8290 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8291 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8292 mParamHeap->getFd(0),
8293 sizeof(metadata_buffer_t),
8294 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8295 if(rc < 0) {
8296 LOGE("failed to map SETPARM buffer");
8297 rc = FAILED_TRANSACTION;
8298 mParamHeap->deallocate();
8299 delete mParamHeap;
8300 mParamHeap = NULL;
8301 return rc;
8302 }
8303
8304 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8305
8306 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8307 return rc;
8308}
8309
8310/*===========================================================================
8311 * FUNCTION : deinitParameters
8312 *
8313 * DESCRIPTION: de-initialize camera parameters
8314 *
8315 * PARAMETERS :
8316 *
8317 * RETURN : NONE
8318 *==========================================================================*/
8319void QCamera3HardwareInterface::deinitParameters()
8320{
8321 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8322 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8323
8324 mParamHeap->deallocate();
8325 delete mParamHeap;
8326 mParamHeap = NULL;
8327
8328 mParameters = NULL;
8329
8330 free(mPrevParameters);
8331 mPrevParameters = NULL;
8332}
8333
8334/*===========================================================================
8335 * FUNCTION : calcMaxJpegSize
8336 *
8337 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8338 *
8339 * PARAMETERS :
8340 *
8341 * RETURN : max_jpeg_size
8342 *==========================================================================*/
8343size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8344{
8345 size_t max_jpeg_size = 0;
8346 size_t temp_width, temp_height;
8347 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8348 MAX_SIZES_CNT);
8349 for (size_t i = 0; i < count; i++) {
8350 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8351 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8352 if (temp_width * temp_height > max_jpeg_size ) {
8353 max_jpeg_size = temp_width * temp_height;
8354 }
8355 }
8356 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8357 return max_jpeg_size;
8358}
8359
8360/*===========================================================================
8361 * FUNCTION : getMaxRawSize
8362 *
8363 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8364 *
8365 * PARAMETERS :
8366 *
8367 * RETURN : Largest supported Raw Dimension
8368 *==========================================================================*/
8369cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8370{
8371 int max_width = 0;
8372 cam_dimension_t maxRawSize;
8373
8374 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8375 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8376 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8377 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8378 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8379 }
8380 }
8381 return maxRawSize;
8382}
8383
8384
8385/*===========================================================================
8386 * FUNCTION : calcMaxJpegDim
8387 *
8388 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8389 *
8390 * PARAMETERS :
8391 *
8392 * RETURN : max_jpeg_dim
8393 *==========================================================================*/
8394cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8395{
8396 cam_dimension_t max_jpeg_dim;
8397 cam_dimension_t curr_jpeg_dim;
8398 max_jpeg_dim.width = 0;
8399 max_jpeg_dim.height = 0;
8400 curr_jpeg_dim.width = 0;
8401 curr_jpeg_dim.height = 0;
8402 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8403 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8404 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8405 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8406 max_jpeg_dim.width * max_jpeg_dim.height ) {
8407 max_jpeg_dim.width = curr_jpeg_dim.width;
8408 max_jpeg_dim.height = curr_jpeg_dim.height;
8409 }
8410 }
8411 return max_jpeg_dim;
8412}
8413
8414/*===========================================================================
8415 * FUNCTION : addStreamConfig
8416 *
8417 * DESCRIPTION: adds the stream configuration to the array
8418 *
8419 * PARAMETERS :
8420 * @available_stream_configs : pointer to stream configuration array
8421 * @scalar_format : scalar format
8422 * @dim : configuration dimension
8423 * @config_type : input or output configuration type
8424 *
8425 * RETURN : NONE
8426 *==========================================================================*/
8427void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8428 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8429{
8430 available_stream_configs.add(scalar_format);
8431 available_stream_configs.add(dim.width);
8432 available_stream_configs.add(dim.height);
8433 available_stream_configs.add(config_type);
8434}
8435
8436/*===========================================================================
8437 * FUNCTION : suppportBurstCapture
8438 *
8439 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8440 *
8441 * PARAMETERS :
8442 * @cameraId : camera Id
8443 *
8444 * RETURN : true if camera supports BURST_CAPTURE
8445 * false otherwise
8446 *==========================================================================*/
8447bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8448{
8449 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8450 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8451 const int32_t highResWidth = 3264;
8452 const int32_t highResHeight = 2448;
8453
8454 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8455 // Maximum resolution images cannot be captured at >= 10fps
8456 // -> not supporting BURST_CAPTURE
8457 return false;
8458 }
8459
8460 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8461 // Maximum resolution images can be captured at >= 20fps
8462 // --> supporting BURST_CAPTURE
8463 return true;
8464 }
8465
8466 // Find the smallest highRes resolution, or largest resolution if there is none
8467 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8468 MAX_SIZES_CNT);
8469 size_t highRes = 0;
8470 while ((highRes + 1 < totalCnt) &&
8471 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8472 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8473 highResWidth * highResHeight)) {
8474 highRes++;
8475 }
8476 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8477 return true;
8478 } else {
8479 return false;
8480 }
8481}
8482
8483/*===========================================================================
8484 * FUNCTION : initStaticMetadata
8485 *
8486 * DESCRIPTION: initialize the static metadata
8487 *
8488 * PARAMETERS :
8489 * @cameraId : camera Id
8490 *
8491 * RETURN : int32_t type of status
8492 * 0 -- success
8493 * non-zero failure code
8494 *==========================================================================*/
8495int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8496{
8497 int rc = 0;
8498 CameraMetadata staticInfo;
8499 size_t count = 0;
8500 bool limitedDevice = false;
8501 char prop[PROPERTY_VALUE_MAX];
8502 bool supportBurst = false;
8503
8504 supportBurst = supportBurstCapture(cameraId);
8505
8506 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8507 * guaranteed or if min fps of max resolution is less than 20 fps, its
8508 * advertised as limited device*/
8509 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8510 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8511 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8512 !supportBurst;
8513
8514 uint8_t supportedHwLvl = limitedDevice ?
8515 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008516#ifndef USE_HAL_3_3
8517 // LEVEL_3 - This device will support level 3.
8518 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8519#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008520 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008521#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008522
8523 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8524 &supportedHwLvl, 1);
8525
8526 bool facingBack = false;
8527 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8528 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8529 facingBack = true;
8530 }
8531 /*HAL 3 only*/
8532 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8533 &gCamCapability[cameraId]->min_focus_distance, 1);
8534
8535 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8536 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8537
8538 /*should be using focal lengths but sensor doesn't provide that info now*/
8539 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8540 &gCamCapability[cameraId]->focal_length,
8541 1);
8542
8543 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8544 gCamCapability[cameraId]->apertures,
8545 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8546
8547 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8548 gCamCapability[cameraId]->filter_densities,
8549 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8550
8551
8552 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8553 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8554 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8555
8556 int32_t lens_shading_map_size[] = {
8557 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8558 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8559 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8560 lens_shading_map_size,
8561 sizeof(lens_shading_map_size)/sizeof(int32_t));
8562
8563 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8564 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8565
8566 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8567 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8568
8569 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8570 &gCamCapability[cameraId]->max_frame_duration, 1);
8571
8572 camera_metadata_rational baseGainFactor = {
8573 gCamCapability[cameraId]->base_gain_factor.numerator,
8574 gCamCapability[cameraId]->base_gain_factor.denominator};
8575 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8576 &baseGainFactor, 1);
8577
8578 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8579 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8580
8581 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8582 gCamCapability[cameraId]->pixel_array_size.height};
8583 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8584 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8585
8586 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8587 gCamCapability[cameraId]->active_array_size.top,
8588 gCamCapability[cameraId]->active_array_size.width,
8589 gCamCapability[cameraId]->active_array_size.height};
8590 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8591 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8592
8593 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8594 &gCamCapability[cameraId]->white_level, 1);
8595
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008596 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8597 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8598 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008599 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008600 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008601
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008602#ifndef USE_HAL_3_3
8603 bool hasBlackRegions = false;
8604 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8605 LOGW("black_region_count: %d is bounded to %d",
8606 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8607 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8608 }
8609 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8610 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8611 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8612 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8613 }
8614 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8615 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8616 hasBlackRegions = true;
8617 }
8618#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008619 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8620 &gCamCapability[cameraId]->flash_charge_duration, 1);
8621
8622 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8623 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8624
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008625 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8626 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8627 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008628 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8629 &timestampSource, 1);
8630
Thierry Strudel54dc9782017-02-15 12:12:10 -08008631 //update histogram vendor data
8632 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008633 &gCamCapability[cameraId]->histogram_size, 1);
8634
Thierry Strudel54dc9782017-02-15 12:12:10 -08008635 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008636 &gCamCapability[cameraId]->max_histogram_count, 1);
8637
8638 int32_t sharpness_map_size[] = {
8639 gCamCapability[cameraId]->sharpness_map_size.width,
8640 gCamCapability[cameraId]->sharpness_map_size.height};
8641
8642 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8643 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8644
8645 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8646 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8647
8648 int32_t scalar_formats[] = {
8649 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8650 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8651 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8652 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8653 HAL_PIXEL_FORMAT_RAW10,
8654 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8655 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8656 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8657 scalar_formats,
8658 scalar_formats_count);
8659
8660 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8661 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8662 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8663 count, MAX_SIZES_CNT, available_processed_sizes);
8664 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8665 available_processed_sizes, count * 2);
8666
8667 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8668 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8669 makeTable(gCamCapability[cameraId]->raw_dim,
8670 count, MAX_SIZES_CNT, available_raw_sizes);
8671 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8672 available_raw_sizes, count * 2);
8673
8674 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8675 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8676 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8677 count, MAX_SIZES_CNT, available_fps_ranges);
8678 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8679 available_fps_ranges, count * 2);
8680
8681 camera_metadata_rational exposureCompensationStep = {
8682 gCamCapability[cameraId]->exp_compensation_step.numerator,
8683 gCamCapability[cameraId]->exp_compensation_step.denominator};
8684 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8685 &exposureCompensationStep, 1);
8686
8687 Vector<uint8_t> availableVstabModes;
8688 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8689 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008690 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008691 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008692 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008693 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008694 count = IS_TYPE_MAX;
8695 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8696 for (size_t i = 0; i < count; i++) {
8697 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8698 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8699 eisSupported = true;
8700 break;
8701 }
8702 }
8703 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008704 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8705 }
8706 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8707 availableVstabModes.array(), availableVstabModes.size());
8708
8709 /*HAL 1 and HAL 3 common*/
8710 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8711 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8712 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8713 float maxZoom = maxZoomStep/minZoomStep;
8714 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8715 &maxZoom, 1);
8716
8717 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8718 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8719
8720 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8721 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8722 max3aRegions[2] = 0; /* AF not supported */
8723 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8724 max3aRegions, 3);
8725
8726 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8727 memset(prop, 0, sizeof(prop));
8728 property_get("persist.camera.facedetect", prop, "1");
8729 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8730 LOGD("Support face detection mode: %d",
8731 supportedFaceDetectMode);
8732
8733 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008734 /* support mode should be OFF if max number of face is 0 */
8735 if (maxFaces <= 0) {
8736 supportedFaceDetectMode = 0;
8737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008738 Vector<uint8_t> availableFaceDetectModes;
8739 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8740 if (supportedFaceDetectMode == 1) {
8741 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8742 } else if (supportedFaceDetectMode == 2) {
8743 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8744 } else if (supportedFaceDetectMode == 3) {
8745 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8746 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8747 } else {
8748 maxFaces = 0;
8749 }
8750 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8751 availableFaceDetectModes.array(),
8752 availableFaceDetectModes.size());
8753 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8754 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008755 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8756 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8757 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008758
Emilian Peev7650c122017-01-19 08:24:33 -08008759#ifdef SUPPORT_DEPTH_DATA
8760 //TODO: Update depth size accordingly, currently we use active array
8761 // as reference.
8762 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8763 int32_t depthHeight = gCamCapability[cameraId]->active_array_size.height;
8764 //As per spec. depth cloud should be sample count / 16
8765 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8766 assert(0 < depthSamplesCount);
8767 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, &depthSamplesCount, 1);
8768
8769 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8770 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8771 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8772 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
8773
8774 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8775 1, 1 };
8776 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8777 depthMinDuration,
8778 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
8779
8780 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8781 1, 0 };
8782 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8783 depthStallDuration,
8784 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
8785
8786 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8787 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8788#endif
8789
Thierry Strudel3d639192016-09-09 11:52:26 -07008790 int32_t exposureCompensationRange[] = {
8791 gCamCapability[cameraId]->exposure_compensation_min,
8792 gCamCapability[cameraId]->exposure_compensation_max};
8793 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8794 exposureCompensationRange,
8795 sizeof(exposureCompensationRange)/sizeof(int32_t));
8796
8797 uint8_t lensFacing = (facingBack) ?
8798 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8799 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8800
8801 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8802 available_thumbnail_sizes,
8803 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8804
8805 /*all sizes will be clubbed into this tag*/
8806 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8807 /*android.scaler.availableStreamConfigurations*/
8808 Vector<int32_t> available_stream_configs;
8809 cam_dimension_t active_array_dim;
8810 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8811 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8812 /* Add input/output stream configurations for each scalar formats*/
8813 for (size_t j = 0; j < scalar_formats_count; j++) {
8814 switch (scalar_formats[j]) {
8815 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8816 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8817 case HAL_PIXEL_FORMAT_RAW10:
8818 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8819 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8820 addStreamConfig(available_stream_configs, scalar_formats[j],
8821 gCamCapability[cameraId]->raw_dim[i],
8822 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8823 }
8824 break;
8825 case HAL_PIXEL_FORMAT_BLOB:
8826 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8827 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8828 addStreamConfig(available_stream_configs, scalar_formats[j],
8829 gCamCapability[cameraId]->picture_sizes_tbl[i],
8830 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8831 }
8832 break;
8833 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8834 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8835 default:
8836 cam_dimension_t largest_picture_size;
8837 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8838 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8839 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8840 addStreamConfig(available_stream_configs, scalar_formats[j],
8841 gCamCapability[cameraId]->picture_sizes_tbl[i],
8842 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8843 /* Book keep largest */
8844 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8845 >= largest_picture_size.width &&
8846 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8847 >= largest_picture_size.height)
8848 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8849 }
8850 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8851 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8852 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8853 addStreamConfig(available_stream_configs, scalar_formats[j],
8854 largest_picture_size,
8855 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8856 }
8857 break;
8858 }
8859 }
8860
8861 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8862 available_stream_configs.array(), available_stream_configs.size());
8863 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8864 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8865
8866 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8867 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8868
8869 /* android.scaler.availableMinFrameDurations */
8870 Vector<int64_t> available_min_durations;
8871 for (size_t j = 0; j < scalar_formats_count; j++) {
8872 switch (scalar_formats[j]) {
8873 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8874 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8875 case HAL_PIXEL_FORMAT_RAW10:
8876 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8877 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8878 available_min_durations.add(scalar_formats[j]);
8879 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8880 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8881 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8882 }
8883 break;
8884 default:
8885 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8886 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8887 available_min_durations.add(scalar_formats[j]);
8888 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8889 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8890 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8891 }
8892 break;
8893 }
8894 }
8895 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8896 available_min_durations.array(), available_min_durations.size());
8897
8898 Vector<int32_t> available_hfr_configs;
8899 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8900 int32_t fps = 0;
8901 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8902 case CAM_HFR_MODE_60FPS:
8903 fps = 60;
8904 break;
8905 case CAM_HFR_MODE_90FPS:
8906 fps = 90;
8907 break;
8908 case CAM_HFR_MODE_120FPS:
8909 fps = 120;
8910 break;
8911 case CAM_HFR_MODE_150FPS:
8912 fps = 150;
8913 break;
8914 case CAM_HFR_MODE_180FPS:
8915 fps = 180;
8916 break;
8917 case CAM_HFR_MODE_210FPS:
8918 fps = 210;
8919 break;
8920 case CAM_HFR_MODE_240FPS:
8921 fps = 240;
8922 break;
8923 case CAM_HFR_MODE_480FPS:
8924 fps = 480;
8925 break;
8926 case CAM_HFR_MODE_OFF:
8927 case CAM_HFR_MODE_MAX:
8928 default:
8929 break;
8930 }
8931
8932 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8933 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8934 /* For each HFR frame rate, need to advertise one variable fps range
8935 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8936 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8937 * set by the app. When video recording is started, [120, 120] is
8938 * set. This way sensor configuration does not change when recording
8939 * is started */
8940
8941 /* (width, height, fps_min, fps_max, batch_size_max) */
8942 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8943 j < MAX_SIZES_CNT; j++) {
8944 available_hfr_configs.add(
8945 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8946 available_hfr_configs.add(
8947 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8948 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8949 available_hfr_configs.add(fps);
8950 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8951
8952 /* (width, height, fps_min, fps_max, batch_size_max) */
8953 available_hfr_configs.add(
8954 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8955 available_hfr_configs.add(
8956 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8957 available_hfr_configs.add(fps);
8958 available_hfr_configs.add(fps);
8959 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8960 }
8961 }
8962 }
8963 //Advertise HFR capability only if the property is set
8964 memset(prop, 0, sizeof(prop));
8965 property_get("persist.camera.hal3hfr.enable", prop, "1");
8966 uint8_t hfrEnable = (uint8_t)atoi(prop);
8967
8968 if(hfrEnable && available_hfr_configs.array()) {
8969 staticInfo.update(
8970 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8971 available_hfr_configs.array(), available_hfr_configs.size());
8972 }
8973
8974 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8975 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8976 &max_jpeg_size, 1);
8977
8978 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8979 size_t size = 0;
8980 count = CAM_EFFECT_MODE_MAX;
8981 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8982 for (size_t i = 0; i < count; i++) {
8983 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8984 gCamCapability[cameraId]->supported_effects[i]);
8985 if (NAME_NOT_FOUND != val) {
8986 avail_effects[size] = (uint8_t)val;
8987 size++;
8988 }
8989 }
8990 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8991 avail_effects,
8992 size);
8993
8994 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8995 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8996 size_t supported_scene_modes_cnt = 0;
8997 count = CAM_SCENE_MODE_MAX;
8998 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8999 for (size_t i = 0; i < count; i++) {
9000 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9001 CAM_SCENE_MODE_OFF) {
9002 int val = lookupFwkName(SCENE_MODES_MAP,
9003 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9004 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009005
Thierry Strudel3d639192016-09-09 11:52:26 -07009006 if (NAME_NOT_FOUND != val) {
9007 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9008 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9009 supported_scene_modes_cnt++;
9010 }
9011 }
9012 }
9013 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9014 avail_scene_modes,
9015 supported_scene_modes_cnt);
9016
9017 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9018 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9019 supported_scene_modes_cnt,
9020 CAM_SCENE_MODE_MAX,
9021 scene_mode_overrides,
9022 supported_indexes,
9023 cameraId);
9024
9025 if (supported_scene_modes_cnt == 0) {
9026 supported_scene_modes_cnt = 1;
9027 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9028 }
9029
9030 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9031 scene_mode_overrides, supported_scene_modes_cnt * 3);
9032
9033 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9034 ANDROID_CONTROL_MODE_AUTO,
9035 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9036 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9037 available_control_modes,
9038 3);
9039
9040 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9041 size = 0;
9042 count = CAM_ANTIBANDING_MODE_MAX;
9043 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9044 for (size_t i = 0; i < count; i++) {
9045 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9046 gCamCapability[cameraId]->supported_antibandings[i]);
9047 if (NAME_NOT_FOUND != val) {
9048 avail_antibanding_modes[size] = (uint8_t)val;
9049 size++;
9050 }
9051
9052 }
9053 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9054 avail_antibanding_modes,
9055 size);
9056
9057 uint8_t avail_abberation_modes[] = {
9058 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9059 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9060 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9061 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9062 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9063 if (0 == count) {
9064 // If no aberration correction modes are available for a device, this advertise OFF mode
9065 size = 1;
9066 } else {
9067 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9068 // So, advertize all 3 modes if atleast any one mode is supported as per the
9069 // new M requirement
9070 size = 3;
9071 }
9072 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9073 avail_abberation_modes,
9074 size);
9075
9076 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9077 size = 0;
9078 count = CAM_FOCUS_MODE_MAX;
9079 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9080 for (size_t i = 0; i < count; i++) {
9081 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9082 gCamCapability[cameraId]->supported_focus_modes[i]);
9083 if (NAME_NOT_FOUND != val) {
9084 avail_af_modes[size] = (uint8_t)val;
9085 size++;
9086 }
9087 }
9088 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9089 avail_af_modes,
9090 size);
9091
9092 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9093 size = 0;
9094 count = CAM_WB_MODE_MAX;
9095 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9096 for (size_t i = 0; i < count; i++) {
9097 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9098 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9099 gCamCapability[cameraId]->supported_white_balances[i]);
9100 if (NAME_NOT_FOUND != val) {
9101 avail_awb_modes[size] = (uint8_t)val;
9102 size++;
9103 }
9104 }
9105 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9106 avail_awb_modes,
9107 size);
9108
9109 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9110 count = CAM_FLASH_FIRING_LEVEL_MAX;
9111 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9112 count);
9113 for (size_t i = 0; i < count; i++) {
9114 available_flash_levels[i] =
9115 gCamCapability[cameraId]->supported_firing_levels[i];
9116 }
9117 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9118 available_flash_levels, count);
9119
9120 uint8_t flashAvailable;
9121 if (gCamCapability[cameraId]->flash_available)
9122 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9123 else
9124 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9125 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9126 &flashAvailable, 1);
9127
9128 Vector<uint8_t> avail_ae_modes;
9129 count = CAM_AE_MODE_MAX;
9130 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9131 for (size_t i = 0; i < count; i++) {
9132 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9133 }
9134 if (flashAvailable) {
9135 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9136 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009137 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009138 }
9139 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9140 avail_ae_modes.array(),
9141 avail_ae_modes.size());
9142
9143 int32_t sensitivity_range[2];
9144 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9145 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9146 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9147 sensitivity_range,
9148 sizeof(sensitivity_range) / sizeof(int32_t));
9149
9150 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9151 &gCamCapability[cameraId]->max_analog_sensitivity,
9152 1);
9153
9154 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9155 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9156 &sensor_orientation,
9157 1);
9158
9159 int32_t max_output_streams[] = {
9160 MAX_STALLING_STREAMS,
9161 MAX_PROCESSED_STREAMS,
9162 MAX_RAW_STREAMS};
9163 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9164 max_output_streams,
9165 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9166
9167 uint8_t avail_leds = 0;
9168 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9169 &avail_leds, 0);
9170
9171 uint8_t focus_dist_calibrated;
9172 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9173 gCamCapability[cameraId]->focus_dist_calibrated);
9174 if (NAME_NOT_FOUND != val) {
9175 focus_dist_calibrated = (uint8_t)val;
9176 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9177 &focus_dist_calibrated, 1);
9178 }
9179
9180 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9181 size = 0;
9182 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9183 MAX_TEST_PATTERN_CNT);
9184 for (size_t i = 0; i < count; i++) {
9185 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9186 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9187 if (NAME_NOT_FOUND != testpatternMode) {
9188 avail_testpattern_modes[size] = testpatternMode;
9189 size++;
9190 }
9191 }
9192 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9193 avail_testpattern_modes,
9194 size);
9195
9196 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9197 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9198 &max_pipeline_depth,
9199 1);
9200
9201 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9202 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9203 &partial_result_count,
9204 1);
9205
9206 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9207 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9208
9209 Vector<uint8_t> available_capabilities;
9210 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9211 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9212 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9213 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9214 if (supportBurst) {
9215 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9216 }
9217 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9218 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9219 if (hfrEnable && available_hfr_configs.array()) {
9220 available_capabilities.add(
9221 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9222 }
9223
9224 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9225 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9226 }
9227 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9228 available_capabilities.array(),
9229 available_capabilities.size());
9230
9231 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9232 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9233 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9234 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9235
9236 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9237 &aeLockAvailable, 1);
9238
9239 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9240 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9241 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9242 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9243
9244 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9245 &awbLockAvailable, 1);
9246
9247 int32_t max_input_streams = 1;
9248 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9249 &max_input_streams,
9250 1);
9251
9252 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9253 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9254 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9255 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9256 HAL_PIXEL_FORMAT_YCbCr_420_888};
9257 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9258 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9259
9260 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9261 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9262 &max_latency,
9263 1);
9264
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009265#ifndef USE_HAL_3_3
9266 int32_t isp_sensitivity_range[2];
9267 isp_sensitivity_range[0] =
9268 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9269 isp_sensitivity_range[1] =
9270 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9271 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9272 isp_sensitivity_range,
9273 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9274#endif
9275
Thierry Strudel3d639192016-09-09 11:52:26 -07009276 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9277 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9278 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9279 available_hot_pixel_modes,
9280 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9281
9282 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9283 ANDROID_SHADING_MODE_FAST,
9284 ANDROID_SHADING_MODE_HIGH_QUALITY};
9285 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9286 available_shading_modes,
9287 3);
9288
9289 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9290 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9291 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9292 available_lens_shading_map_modes,
9293 2);
9294
9295 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9296 ANDROID_EDGE_MODE_FAST,
9297 ANDROID_EDGE_MODE_HIGH_QUALITY,
9298 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9299 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9300 available_edge_modes,
9301 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9302
9303 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9304 ANDROID_NOISE_REDUCTION_MODE_FAST,
9305 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9306 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9307 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9308 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9309 available_noise_red_modes,
9310 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9311
9312 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9313 ANDROID_TONEMAP_MODE_FAST,
9314 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9315 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9316 available_tonemap_modes,
9317 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9318
9319 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9320 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9321 available_hot_pixel_map_modes,
9322 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9323
9324 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9325 gCamCapability[cameraId]->reference_illuminant1);
9326 if (NAME_NOT_FOUND != val) {
9327 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9328 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9329 }
9330
9331 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9332 gCamCapability[cameraId]->reference_illuminant2);
9333 if (NAME_NOT_FOUND != val) {
9334 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9335 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9336 }
9337
9338 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9339 (void *)gCamCapability[cameraId]->forward_matrix1,
9340 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9341
9342 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9343 (void *)gCamCapability[cameraId]->forward_matrix2,
9344 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9345
9346 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9347 (void *)gCamCapability[cameraId]->color_transform1,
9348 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9349
9350 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9351 (void *)gCamCapability[cameraId]->color_transform2,
9352 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9353
9354 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9355 (void *)gCamCapability[cameraId]->calibration_transform1,
9356 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9357
9358 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9359 (void *)gCamCapability[cameraId]->calibration_transform2,
9360 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9361
9362 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9363 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9364 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9365 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9366 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9367 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9368 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9369 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9370 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9371 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9372 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9373 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9374 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9375 ANDROID_JPEG_GPS_COORDINATES,
9376 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9377 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9378 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9379 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9380 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9381 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9382 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9383 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9384 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9385 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009386#ifndef USE_HAL_3_3
9387 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9388#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009389 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009390 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009391 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9392 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009393 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009394 /* DevCamDebug metadata request_keys_basic */
9395 DEVCAMDEBUG_META_ENABLE,
9396 /* DevCamDebug metadata end */
9397 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009398
9399 size_t request_keys_cnt =
9400 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9401 Vector<int32_t> available_request_keys;
9402 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9403 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9404 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9405 }
9406
9407 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9408 available_request_keys.array(), available_request_keys.size());
9409
9410 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9411 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9412 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9413 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9414 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9415 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9416 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9417 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9418 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9419 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9420 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9421 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9422 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9423 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9424 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9425 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9426 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009427 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009428 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9429 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9430 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009431 ANDROID_STATISTICS_FACE_SCORES,
9432#ifndef USE_HAL_3_3
9433 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9434#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009435 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009436 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009437 // DevCamDebug metadata result_keys_basic
9438 DEVCAMDEBUG_META_ENABLE,
9439 // DevCamDebug metadata result_keys AF
9440 DEVCAMDEBUG_AF_LENS_POSITION,
9441 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9442 DEVCAMDEBUG_AF_TOF_DISTANCE,
9443 DEVCAMDEBUG_AF_LUMA,
9444 DEVCAMDEBUG_AF_HAF_STATE,
9445 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9446 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9447 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9448 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9449 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9450 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9451 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9452 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9453 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9454 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9455 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9456 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9457 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9458 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9459 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9460 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9461 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9462 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9463 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9464 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9465 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9466 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9467 // DevCamDebug metadata result_keys AEC
9468 DEVCAMDEBUG_AEC_TARGET_LUMA,
9469 DEVCAMDEBUG_AEC_COMP_LUMA,
9470 DEVCAMDEBUG_AEC_AVG_LUMA,
9471 DEVCAMDEBUG_AEC_CUR_LUMA,
9472 DEVCAMDEBUG_AEC_LINECOUNT,
9473 DEVCAMDEBUG_AEC_REAL_GAIN,
9474 DEVCAMDEBUG_AEC_EXP_INDEX,
9475 DEVCAMDEBUG_AEC_LUX_IDX,
9476 // DevCamDebug metadata result_keys AWB
9477 DEVCAMDEBUG_AWB_R_GAIN,
9478 DEVCAMDEBUG_AWB_G_GAIN,
9479 DEVCAMDEBUG_AWB_B_GAIN,
9480 DEVCAMDEBUG_AWB_CCT,
9481 DEVCAMDEBUG_AWB_DECISION,
9482 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009483 };
9484
Thierry Strudel3d639192016-09-09 11:52:26 -07009485 size_t result_keys_cnt =
9486 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9487
9488 Vector<int32_t> available_result_keys;
9489 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9490 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9491 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9492 }
9493 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9494 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9495 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9496 }
9497 if (supportedFaceDetectMode == 1) {
9498 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9499 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9500 } else if ((supportedFaceDetectMode == 2) ||
9501 (supportedFaceDetectMode == 3)) {
9502 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9503 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9504 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009505#ifndef USE_HAL_3_3
9506 if (hasBlackRegions) {
9507 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9508 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9509 }
9510#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009511 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9512 available_result_keys.array(), available_result_keys.size());
9513
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009514 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009515 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9516 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9517 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9518 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9519 ANDROID_SCALER_CROPPING_TYPE,
9520 ANDROID_SYNC_MAX_LATENCY,
9521 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9522 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9523 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9524 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9525 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9526 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9527 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9528 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9529 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9530 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9531 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9532 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9533 ANDROID_LENS_FACING,
9534 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9535 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9536 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9537 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9538 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9539 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9540 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9541 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9542 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9543 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9544 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9545 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9546 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9547 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9548 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9549 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9550 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9551 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9552 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9553 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009554 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009555 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9556 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9557 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9558 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9559 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9560 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9561 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9562 ANDROID_CONTROL_AVAILABLE_MODES,
9563 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9564 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9565 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9566 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009567 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009568#ifdef SUPPORT_DEPTH_DATA
9569 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9570 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9571 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9572 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9573 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9574#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009575#ifndef USE_HAL_3_3
9576 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9577 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9578#endif
9579 };
9580
9581 Vector<int32_t> available_characteristics_keys;
9582 available_characteristics_keys.appendArray(characteristics_keys_basic,
9583 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9584#ifndef USE_HAL_3_3
9585 if (hasBlackRegions) {
9586 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9587 }
9588#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009589 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009590 available_characteristics_keys.array(),
9591 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009592
9593 /*available stall durations depend on the hw + sw and will be different for different devices */
9594 /*have to add for raw after implementation*/
9595 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9596 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9597
9598 Vector<int64_t> available_stall_durations;
9599 for (uint32_t j = 0; j < stall_formats_count; j++) {
9600 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9601 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9602 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9603 available_stall_durations.add(stall_formats[j]);
9604 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9605 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9606 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9607 }
9608 } else {
9609 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9610 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9611 available_stall_durations.add(stall_formats[j]);
9612 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9613 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9614 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9615 }
9616 }
9617 }
9618 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9619 available_stall_durations.array(),
9620 available_stall_durations.size());
9621
9622 //QCAMERA3_OPAQUE_RAW
9623 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9624 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9625 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9626 case LEGACY_RAW:
9627 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9628 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9629 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9630 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9631 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9632 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9633 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9634 break;
9635 case MIPI_RAW:
9636 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9637 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9638 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9639 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9640 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9641 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9642 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9643 break;
9644 default:
9645 LOGE("unknown opaque_raw_format %d",
9646 gCamCapability[cameraId]->opaque_raw_fmt);
9647 break;
9648 }
9649 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9650
9651 Vector<int32_t> strides;
9652 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9653 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9654 cam_stream_buf_plane_info_t buf_planes;
9655 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9656 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9657 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9658 &gCamCapability[cameraId]->padding_info, &buf_planes);
9659 strides.add(buf_planes.plane_info.mp[0].stride);
9660 }
9661 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9662 strides.size());
9663
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009664 //TBD: remove the following line once backend advertises zzHDR in feature mask
9665 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009666 //Video HDR default
9667 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9668 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009669 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009670 int32_t vhdr_mode[] = {
9671 QCAMERA3_VIDEO_HDR_MODE_OFF,
9672 QCAMERA3_VIDEO_HDR_MODE_ON};
9673
9674 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9675 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9676 vhdr_mode, vhdr_mode_count);
9677 }
9678
Thierry Strudel3d639192016-09-09 11:52:26 -07009679 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9680 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9681 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9682
9683 uint8_t isMonoOnly =
9684 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9685 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9686 &isMonoOnly, 1);
9687
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009688#ifndef USE_HAL_3_3
9689 Vector<int32_t> opaque_size;
9690 for (size_t j = 0; j < scalar_formats_count; j++) {
9691 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9692 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9693 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9694 cam_stream_buf_plane_info_t buf_planes;
9695
9696 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9697 &gCamCapability[cameraId]->padding_info, &buf_planes);
9698
9699 if (rc == 0) {
9700 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9701 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9702 opaque_size.add(buf_planes.plane_info.frame_len);
9703 }else {
9704 LOGE("raw frame calculation failed!");
9705 }
9706 }
9707 }
9708 }
9709
9710 if ((opaque_size.size() > 0) &&
9711 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9712 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9713 else
9714 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9715#endif
9716
Thierry Strudel04e026f2016-10-10 11:27:36 -07009717 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9718 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9719 size = 0;
9720 count = CAM_IR_MODE_MAX;
9721 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9722 for (size_t i = 0; i < count; i++) {
9723 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9724 gCamCapability[cameraId]->supported_ir_modes[i]);
9725 if (NAME_NOT_FOUND != val) {
9726 avail_ir_modes[size] = (int32_t)val;
9727 size++;
9728 }
9729 }
9730 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9731 avail_ir_modes, size);
9732 }
9733
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009734 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9735 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9736 size = 0;
9737 count = CAM_AEC_CONVERGENCE_MAX;
9738 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9739 for (size_t i = 0; i < count; i++) {
9740 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9741 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9742 if (NAME_NOT_FOUND != val) {
9743 available_instant_aec_modes[size] = (int32_t)val;
9744 size++;
9745 }
9746 }
9747 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9748 available_instant_aec_modes, size);
9749 }
9750
Thierry Strudel54dc9782017-02-15 12:12:10 -08009751 int32_t sharpness_range[] = {
9752 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9753 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9754 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9755
9756 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9757 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9758 size = 0;
9759 count = CAM_BINNING_CORRECTION_MODE_MAX;
9760 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9761 for (size_t i = 0; i < count; i++) {
9762 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9763 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9764 gCamCapability[cameraId]->supported_binning_modes[i]);
9765 if (NAME_NOT_FOUND != val) {
9766 avail_binning_modes[size] = (int32_t)val;
9767 size++;
9768 }
9769 }
9770 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9771 avail_binning_modes, size);
9772 }
9773
9774 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9775 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9776 size = 0;
9777 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9778 for (size_t i = 0; i < count; i++) {
9779 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9780 gCamCapability[cameraId]->supported_aec_modes[i]);
9781 if (NAME_NOT_FOUND != val)
9782 available_aec_modes[size++] = val;
9783 }
9784 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
9785 available_aec_modes, size);
9786 }
9787
9788 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
9789 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
9790 size = 0;
9791 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
9792 for (size_t i = 0; i < count; i++) {
9793 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
9794 gCamCapability[cameraId]->supported_iso_modes[i]);
9795 if (NAME_NOT_FOUND != val)
9796 available_iso_modes[size++] = val;
9797 }
9798 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
9799 available_iso_modes, size);
9800 }
9801
9802 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
9803 for (size_t i = 0; i < count; i++)
9804 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
9805 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
9806 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
9807
9808 int32_t available_saturation_range[4];
9809 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
9810 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
9811 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
9812 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
9813 staticInfo.update(QCAMERA3_SATURATION_RANGE,
9814 available_saturation_range, 4);
9815
9816 uint8_t is_hdr_values[2];
9817 is_hdr_values[0] = 0;
9818 is_hdr_values[1] = 1;
9819 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
9820 is_hdr_values, 2);
9821
9822 float is_hdr_confidence_range[2];
9823 is_hdr_confidence_range[0] = 0.0;
9824 is_hdr_confidence_range[1] = 1.0;
9825 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
9826 is_hdr_confidence_range, 2);
9827
Thierry Strudel3d639192016-09-09 11:52:26 -07009828 gStaticMetadata[cameraId] = staticInfo.release();
9829 return rc;
9830}
9831
9832/*===========================================================================
9833 * FUNCTION : makeTable
9834 *
9835 * DESCRIPTION: make a table of sizes
9836 *
9837 * PARAMETERS :
9838 *
9839 *
9840 *==========================================================================*/
9841void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9842 size_t max_size, int32_t *sizeTable)
9843{
9844 size_t j = 0;
9845 if (size > max_size) {
9846 size = max_size;
9847 }
9848 for (size_t i = 0; i < size; i++) {
9849 sizeTable[j] = dimTable[i].width;
9850 sizeTable[j+1] = dimTable[i].height;
9851 j+=2;
9852 }
9853}
9854
9855/*===========================================================================
9856 * FUNCTION : makeFPSTable
9857 *
9858 * DESCRIPTION: make a table of fps ranges
9859 *
9860 * PARAMETERS :
9861 *
9862 *==========================================================================*/
9863void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9864 size_t max_size, int32_t *fpsRangesTable)
9865{
9866 size_t j = 0;
9867 if (size > max_size) {
9868 size = max_size;
9869 }
9870 for (size_t i = 0; i < size; i++) {
9871 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9872 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9873 j+=2;
9874 }
9875}
9876
9877/*===========================================================================
9878 * FUNCTION : makeOverridesList
9879 *
9880 * DESCRIPTION: make a list of scene mode overrides
9881 *
9882 * PARAMETERS :
9883 *
9884 *
9885 *==========================================================================*/
9886void QCamera3HardwareInterface::makeOverridesList(
9887 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9888 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9889{
9890 /*daemon will give a list of overrides for all scene modes.
9891 However we should send the fwk only the overrides for the scene modes
9892 supported by the framework*/
9893 size_t j = 0;
9894 if (size > max_size) {
9895 size = max_size;
9896 }
9897 size_t focus_count = CAM_FOCUS_MODE_MAX;
9898 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9899 focus_count);
9900 for (size_t i = 0; i < size; i++) {
9901 bool supt = false;
9902 size_t index = supported_indexes[i];
9903 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9904 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9905 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9906 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9907 overridesTable[index].awb_mode);
9908 if (NAME_NOT_FOUND != val) {
9909 overridesList[j+1] = (uint8_t)val;
9910 }
9911 uint8_t focus_override = overridesTable[index].af_mode;
9912 for (size_t k = 0; k < focus_count; k++) {
9913 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9914 supt = true;
9915 break;
9916 }
9917 }
9918 if (supt) {
9919 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9920 focus_override);
9921 if (NAME_NOT_FOUND != val) {
9922 overridesList[j+2] = (uint8_t)val;
9923 }
9924 } else {
9925 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9926 }
9927 j+=3;
9928 }
9929}
9930
9931/*===========================================================================
9932 * FUNCTION : filterJpegSizes
9933 *
9934 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9935 * could be downscaled to
9936 *
9937 * PARAMETERS :
9938 *
9939 * RETURN : length of jpegSizes array
9940 *==========================================================================*/
9941
9942size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9943 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9944 uint8_t downscale_factor)
9945{
9946 if (0 == downscale_factor) {
9947 downscale_factor = 1;
9948 }
9949
9950 int32_t min_width = active_array_size.width / downscale_factor;
9951 int32_t min_height = active_array_size.height / downscale_factor;
9952 size_t jpegSizesCnt = 0;
9953 if (processedSizesCnt > maxCount) {
9954 processedSizesCnt = maxCount;
9955 }
9956 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9957 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9958 jpegSizes[jpegSizesCnt] = processedSizes[i];
9959 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9960 jpegSizesCnt += 2;
9961 }
9962 }
9963 return jpegSizesCnt;
9964}
9965
9966/*===========================================================================
9967 * FUNCTION : computeNoiseModelEntryS
9968 *
9969 * DESCRIPTION: function to map a given sensitivity to the S noise
9970 * model parameters in the DNG noise model.
9971 *
9972 * PARAMETERS : sens : the sensor sensitivity
9973 *
9974 ** RETURN : S (sensor amplification) noise
9975 *
9976 *==========================================================================*/
9977double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9978 double s = gCamCapability[mCameraId]->gradient_S * sens +
9979 gCamCapability[mCameraId]->offset_S;
9980 return ((s < 0.0) ? 0.0 : s);
9981}
9982
9983/*===========================================================================
9984 * FUNCTION : computeNoiseModelEntryO
9985 *
9986 * DESCRIPTION: function to map a given sensitivity to the O noise
9987 * model parameters in the DNG noise model.
9988 *
9989 * PARAMETERS : sens : the sensor sensitivity
9990 *
9991 ** RETURN : O (sensor readout) noise
9992 *
9993 *==========================================================================*/
9994double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9995 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9996 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9997 1.0 : (1.0 * sens / max_analog_sens);
9998 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9999 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10000 return ((o < 0.0) ? 0.0 : o);
10001}
10002
10003/*===========================================================================
10004 * FUNCTION : getSensorSensitivity
10005 *
10006 * DESCRIPTION: convert iso_mode to an integer value
10007 *
10008 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10009 *
10010 ** RETURN : sensitivity supported by sensor
10011 *
10012 *==========================================================================*/
10013int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10014{
10015 int32_t sensitivity;
10016
10017 switch (iso_mode) {
10018 case CAM_ISO_MODE_100:
10019 sensitivity = 100;
10020 break;
10021 case CAM_ISO_MODE_200:
10022 sensitivity = 200;
10023 break;
10024 case CAM_ISO_MODE_400:
10025 sensitivity = 400;
10026 break;
10027 case CAM_ISO_MODE_800:
10028 sensitivity = 800;
10029 break;
10030 case CAM_ISO_MODE_1600:
10031 sensitivity = 1600;
10032 break;
10033 default:
10034 sensitivity = -1;
10035 break;
10036 }
10037 return sensitivity;
10038}
10039
10040/*===========================================================================
10041 * FUNCTION : getCamInfo
10042 *
10043 * DESCRIPTION: query camera capabilities
10044 *
10045 * PARAMETERS :
10046 * @cameraId : camera Id
10047 * @info : camera info struct to be filled in with camera capabilities
10048 *
10049 * RETURN : int type of status
10050 * NO_ERROR -- success
10051 * none-zero failure code
10052 *==========================================================================*/
10053int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10054 struct camera_info *info)
10055{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010056 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010057 int rc = 0;
10058
10059 pthread_mutex_lock(&gCamLock);
10060 if (NULL == gCamCapability[cameraId]) {
10061 rc = initCapabilities(cameraId);
10062 if (rc < 0) {
10063 pthread_mutex_unlock(&gCamLock);
10064 return rc;
10065 }
10066 }
10067
10068 if (NULL == gStaticMetadata[cameraId]) {
10069 rc = initStaticMetadata(cameraId);
10070 if (rc < 0) {
10071 pthread_mutex_unlock(&gCamLock);
10072 return rc;
10073 }
10074 }
10075
10076 switch(gCamCapability[cameraId]->position) {
10077 case CAM_POSITION_BACK:
10078 case CAM_POSITION_BACK_AUX:
10079 info->facing = CAMERA_FACING_BACK;
10080 break;
10081
10082 case CAM_POSITION_FRONT:
10083 case CAM_POSITION_FRONT_AUX:
10084 info->facing = CAMERA_FACING_FRONT;
10085 break;
10086
10087 default:
10088 LOGE("Unknown position type %d for camera id:%d",
10089 gCamCapability[cameraId]->position, cameraId);
10090 rc = -1;
10091 break;
10092 }
10093
10094
10095 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010096#ifndef USE_HAL_3_3
10097 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10098#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010099 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010100#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010101 info->static_camera_characteristics = gStaticMetadata[cameraId];
10102
10103 //For now assume both cameras can operate independently.
10104 info->conflicting_devices = NULL;
10105 info->conflicting_devices_length = 0;
10106
10107 //resource cost is 100 * MIN(1.0, m/M),
10108 //where m is throughput requirement with maximum stream configuration
10109 //and M is CPP maximum throughput.
10110 float max_fps = 0.0;
10111 for (uint32_t i = 0;
10112 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10113 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10114 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10115 }
10116 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10117 gCamCapability[cameraId]->active_array_size.width *
10118 gCamCapability[cameraId]->active_array_size.height * max_fps /
10119 gCamCapability[cameraId]->max_pixel_bandwidth;
10120 info->resource_cost = 100 * MIN(1.0, ratio);
10121 LOGI("camera %d resource cost is %d", cameraId,
10122 info->resource_cost);
10123
10124 pthread_mutex_unlock(&gCamLock);
10125 return rc;
10126}
10127
10128/*===========================================================================
10129 * FUNCTION : translateCapabilityToMetadata
10130 *
10131 * DESCRIPTION: translate the capability into camera_metadata_t
10132 *
10133 * PARAMETERS : type of the request
10134 *
10135 *
10136 * RETURN : success: camera_metadata_t*
10137 * failure: NULL
10138 *
10139 *==========================================================================*/
10140camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10141{
10142 if (mDefaultMetadata[type] != NULL) {
10143 return mDefaultMetadata[type];
10144 }
10145 //first time we are handling this request
10146 //fill up the metadata structure using the wrapper class
10147 CameraMetadata settings;
10148 //translate from cam_capability_t to camera_metadata_tag_t
10149 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10150 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10151 int32_t defaultRequestID = 0;
10152 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10153
10154 /* OIS disable */
10155 char ois_prop[PROPERTY_VALUE_MAX];
10156 memset(ois_prop, 0, sizeof(ois_prop));
10157 property_get("persist.camera.ois.disable", ois_prop, "0");
10158 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10159
10160 /* Force video to use OIS */
10161 char videoOisProp[PROPERTY_VALUE_MAX];
10162 memset(videoOisProp, 0, sizeof(videoOisProp));
10163 property_get("persist.camera.ois.video", videoOisProp, "1");
10164 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010165
10166 // Hybrid AE enable/disable
10167 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10168 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10169 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10170 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10171
Thierry Strudel3d639192016-09-09 11:52:26 -070010172 uint8_t controlIntent = 0;
10173 uint8_t focusMode;
10174 uint8_t vsMode;
10175 uint8_t optStabMode;
10176 uint8_t cacMode;
10177 uint8_t edge_mode;
10178 uint8_t noise_red_mode;
10179 uint8_t tonemap_mode;
10180 bool highQualityModeEntryAvailable = FALSE;
10181 bool fastModeEntryAvailable = FALSE;
10182 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10183 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010184
Thierry Strudel3d639192016-09-09 11:52:26 -070010185 switch (type) {
10186 case CAMERA3_TEMPLATE_PREVIEW:
10187 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10188 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10189 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10190 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10191 edge_mode = ANDROID_EDGE_MODE_FAST;
10192 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10193 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10194 break;
10195 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10196 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10197 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10198 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10199 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10200 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10201 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10202 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10203 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10204 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10205 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10206 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10207 highQualityModeEntryAvailable = TRUE;
10208 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10209 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10210 fastModeEntryAvailable = TRUE;
10211 }
10212 }
10213 if (highQualityModeEntryAvailable) {
10214 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10215 } else if (fastModeEntryAvailable) {
10216 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10217 }
10218 break;
10219 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10220 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10221 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10222 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010223 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10224 edge_mode = ANDROID_EDGE_MODE_FAST;
10225 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10226 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10227 if (forceVideoOis)
10228 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10229 break;
10230 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10231 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10232 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10233 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010234 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10235 edge_mode = ANDROID_EDGE_MODE_FAST;
10236 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10237 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10238 if (forceVideoOis)
10239 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10240 break;
10241 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10242 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10243 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10244 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10245 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10246 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10247 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10248 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10249 break;
10250 case CAMERA3_TEMPLATE_MANUAL:
10251 edge_mode = ANDROID_EDGE_MODE_FAST;
10252 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10253 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10254 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10255 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10256 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10257 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10258 break;
10259 default:
10260 edge_mode = ANDROID_EDGE_MODE_FAST;
10261 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10262 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10263 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10264 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10265 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10266 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10267 break;
10268 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010269 // Set CAC to OFF if underlying device doesn't support
10270 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10271 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10272 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010273 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10274 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10275 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10276 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10277 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10278 }
10279 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
10280
10281 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10282 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10283 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10284 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10285 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10286 || ois_disable)
10287 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10288 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
10289
10290 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10291 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10292
10293 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10294 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10295
10296 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10297 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10298
10299 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10300 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10301
10302 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10303 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10304
10305 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10306 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10307
10308 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10309 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10310
10311 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10312 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10313
10314 /*flash*/
10315 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10316 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10317
10318 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10319 settings.update(ANDROID_FLASH_FIRING_POWER,
10320 &flashFiringLevel, 1);
10321
10322 /* lens */
10323 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10324 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10325
10326 if (gCamCapability[mCameraId]->filter_densities_count) {
10327 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10328 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10329 gCamCapability[mCameraId]->filter_densities_count);
10330 }
10331
10332 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10333 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10334
10335 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
10336 float default_focus_distance = 0;
10337 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
10338 }
10339
10340 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10341 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10342
10343 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10344 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10345
10346 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10347 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10348
10349 /* face detection (default to OFF) */
10350 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10351 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10352
Thierry Strudel54dc9782017-02-15 12:12:10 -080010353 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10354 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010355
10356 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10357 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10358
10359 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10360 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10361
10362 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10363 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
10364
10365 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10366 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10367
10368 /* Exposure time(Update the Min Exposure Time)*/
10369 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10370 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10371
10372 /* frame duration */
10373 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10374 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10375
10376 /* sensitivity */
10377 static const int32_t default_sensitivity = 100;
10378 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010379#ifndef USE_HAL_3_3
10380 static const int32_t default_isp_sensitivity =
10381 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10382 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10383#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010384
10385 /*edge mode*/
10386 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10387
10388 /*noise reduction mode*/
10389 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10390
10391 /*color correction mode*/
10392 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10393 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10394
10395 /*transform matrix mode*/
10396 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10397
10398 int32_t scaler_crop_region[4];
10399 scaler_crop_region[0] = 0;
10400 scaler_crop_region[1] = 0;
10401 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10402 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10403 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10404
10405 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10406 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10407
10408 /*focus distance*/
10409 float focus_distance = 0.0;
10410 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10411
10412 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010413 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010414 float max_range = 0.0;
10415 float max_fixed_fps = 0.0;
10416 int32_t fps_range[2] = {0, 0};
10417 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10418 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010419 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10420 TEMPLATE_MAX_PREVIEW_FPS) {
10421 continue;
10422 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010423 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10424 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10425 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10426 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10427 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10428 if (range > max_range) {
10429 fps_range[0] =
10430 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10431 fps_range[1] =
10432 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10433 max_range = range;
10434 }
10435 } else {
10436 if (range < 0.01 && max_fixed_fps <
10437 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10438 fps_range[0] =
10439 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10440 fps_range[1] =
10441 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10442 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10443 }
10444 }
10445 }
10446 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10447
10448 /*precapture trigger*/
10449 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10450 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10451
10452 /*af trigger*/
10453 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10454 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10455
10456 /* ae & af regions */
10457 int32_t active_region[] = {
10458 gCamCapability[mCameraId]->active_array_size.left,
10459 gCamCapability[mCameraId]->active_array_size.top,
10460 gCamCapability[mCameraId]->active_array_size.left +
10461 gCamCapability[mCameraId]->active_array_size.width,
10462 gCamCapability[mCameraId]->active_array_size.top +
10463 gCamCapability[mCameraId]->active_array_size.height,
10464 0};
10465 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10466 sizeof(active_region) / sizeof(active_region[0]));
10467 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10468 sizeof(active_region) / sizeof(active_region[0]));
10469
10470 /* black level lock */
10471 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10472 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10473
10474 /* lens shading map mode */
10475 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10476 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10477 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10478 }
10479 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10480
10481 //special defaults for manual template
10482 if (type == CAMERA3_TEMPLATE_MANUAL) {
10483 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10484 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10485
10486 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10487 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10488
10489 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10490 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10491
10492 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10493 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10494
10495 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10496 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10497
10498 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10499 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10500 }
10501
10502
10503 /* TNR
10504 * We'll use this location to determine which modes TNR will be set.
10505 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10506 * This is not to be confused with linking on a per stream basis that decision
10507 * is still on per-session basis and will be handled as part of config stream
10508 */
10509 uint8_t tnr_enable = 0;
10510
10511 if (m_bTnrPreview || m_bTnrVideo) {
10512
10513 switch (type) {
10514 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10515 tnr_enable = 1;
10516 break;
10517
10518 default:
10519 tnr_enable = 0;
10520 break;
10521 }
10522
10523 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10524 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10525 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10526
10527 LOGD("TNR:%d with process plate %d for template:%d",
10528 tnr_enable, tnr_process_type, type);
10529 }
10530
10531 //Update Link tags to default
10532 int32_t sync_type = CAM_TYPE_STANDALONE;
10533 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10534
10535 int32_t is_main = 0; //this doesn't matter as app should overwrite
10536 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10537
10538 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10539
10540 /* CDS default */
10541 char prop[PROPERTY_VALUE_MAX];
10542 memset(prop, 0, sizeof(prop));
10543 property_get("persist.camera.CDS", prop, "Auto");
10544 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10545 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10546 if (CAM_CDS_MODE_MAX == cds_mode) {
10547 cds_mode = CAM_CDS_MODE_AUTO;
10548 }
10549
10550 /* Disabling CDS in templates which have TNR enabled*/
10551 if (tnr_enable)
10552 cds_mode = CAM_CDS_MODE_OFF;
10553
10554 int32_t mode = cds_mode;
10555 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010556
Thierry Strudel269c81a2016-10-12 12:13:59 -070010557 /* Manual Convergence AEC Speed is disabled by default*/
10558 float default_aec_speed = 0;
10559 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10560
10561 /* Manual Convergence AWB Speed is disabled by default*/
10562 float default_awb_speed = 0;
10563 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10564
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010565 // Set instant AEC to normal convergence by default
10566 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10567 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10568
Shuzhen Wang19463d72016-03-08 11:09:52 -080010569 /* hybrid ae */
10570 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10571
Thierry Strudel3d639192016-09-09 11:52:26 -070010572 mDefaultMetadata[type] = settings.release();
10573
10574 return mDefaultMetadata[type];
10575}
10576
10577/*===========================================================================
10578 * FUNCTION : setFrameParameters
10579 *
10580 * DESCRIPTION: set parameters per frame as requested in the metadata from
10581 * framework
10582 *
10583 * PARAMETERS :
10584 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010585 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010586 * @blob_request: Whether this request is a blob request or not
10587 *
10588 * RETURN : success: NO_ERROR
10589 * failure:
10590 *==========================================================================*/
10591int QCamera3HardwareInterface::setFrameParameters(
10592 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010593 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010594 int blob_request,
10595 uint32_t snapshotStreamId)
10596{
10597 /*translate from camera_metadata_t type to parm_type_t*/
10598 int rc = 0;
10599 int32_t hal_version = CAM_HAL_V3;
10600
10601 clear_metadata_buffer(mParameters);
10602 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10603 LOGE("Failed to set hal version in the parameters");
10604 return BAD_VALUE;
10605 }
10606
10607 /*we need to update the frame number in the parameters*/
10608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10609 request->frame_number)) {
10610 LOGE("Failed to set the frame number in the parameters");
10611 return BAD_VALUE;
10612 }
10613
10614 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010615 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010616 LOGE("Failed to set stream type mask in the parameters");
10617 return BAD_VALUE;
10618 }
10619
10620 if (mUpdateDebugLevel) {
10621 uint32_t dummyDebugLevel = 0;
10622 /* The value of dummyDebugLevel is irrelavent. On
10623 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10624 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10625 dummyDebugLevel)) {
10626 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10627 return BAD_VALUE;
10628 }
10629 mUpdateDebugLevel = false;
10630 }
10631
10632 if(request->settings != NULL){
10633 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10634 if (blob_request)
10635 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10636 }
10637
10638 return rc;
10639}
10640
10641/*===========================================================================
10642 * FUNCTION : setReprocParameters
10643 *
10644 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10645 * return it.
10646 *
10647 * PARAMETERS :
10648 * @request : request that needs to be serviced
10649 *
10650 * RETURN : success: NO_ERROR
10651 * failure:
10652 *==========================================================================*/
10653int32_t QCamera3HardwareInterface::setReprocParameters(
10654 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10655 uint32_t snapshotStreamId)
10656{
10657 /*translate from camera_metadata_t type to parm_type_t*/
10658 int rc = 0;
10659
10660 if (NULL == request->settings){
10661 LOGE("Reprocess settings cannot be NULL");
10662 return BAD_VALUE;
10663 }
10664
10665 if (NULL == reprocParam) {
10666 LOGE("Invalid reprocessing metadata buffer");
10667 return BAD_VALUE;
10668 }
10669 clear_metadata_buffer(reprocParam);
10670
10671 /*we need to update the frame number in the parameters*/
10672 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10673 request->frame_number)) {
10674 LOGE("Failed to set the frame number in the parameters");
10675 return BAD_VALUE;
10676 }
10677
10678 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10679 if (rc < 0) {
10680 LOGE("Failed to translate reproc request");
10681 return rc;
10682 }
10683
10684 CameraMetadata frame_settings;
10685 frame_settings = request->settings;
10686 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10687 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10688 int32_t *crop_count =
10689 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10690 int32_t *crop_data =
10691 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10692 int32_t *roi_map =
10693 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10694 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10695 cam_crop_data_t crop_meta;
10696 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10697 crop_meta.num_of_streams = 1;
10698 crop_meta.crop_info[0].crop.left = crop_data[0];
10699 crop_meta.crop_info[0].crop.top = crop_data[1];
10700 crop_meta.crop_info[0].crop.width = crop_data[2];
10701 crop_meta.crop_info[0].crop.height = crop_data[3];
10702
10703 crop_meta.crop_info[0].roi_map.left =
10704 roi_map[0];
10705 crop_meta.crop_info[0].roi_map.top =
10706 roi_map[1];
10707 crop_meta.crop_info[0].roi_map.width =
10708 roi_map[2];
10709 crop_meta.crop_info[0].roi_map.height =
10710 roi_map[3];
10711
10712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10713 rc = BAD_VALUE;
10714 }
10715 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10716 request->input_buffer->stream,
10717 crop_meta.crop_info[0].crop.left,
10718 crop_meta.crop_info[0].crop.top,
10719 crop_meta.crop_info[0].crop.width,
10720 crop_meta.crop_info[0].crop.height);
10721 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10722 request->input_buffer->stream,
10723 crop_meta.crop_info[0].roi_map.left,
10724 crop_meta.crop_info[0].roi_map.top,
10725 crop_meta.crop_info[0].roi_map.width,
10726 crop_meta.crop_info[0].roi_map.height);
10727 } else {
10728 LOGE("Invalid reprocess crop count %d!", *crop_count);
10729 }
10730 } else {
10731 LOGE("No crop data from matching output stream");
10732 }
10733
10734 /* These settings are not needed for regular requests so handle them specially for
10735 reprocess requests; information needed for EXIF tags */
10736 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10737 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10738 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10739 if (NAME_NOT_FOUND != val) {
10740 uint32_t flashMode = (uint32_t)val;
10741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10742 rc = BAD_VALUE;
10743 }
10744 } else {
10745 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10746 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10747 }
10748 } else {
10749 LOGH("No flash mode in reprocess settings");
10750 }
10751
10752 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10753 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10755 rc = BAD_VALUE;
10756 }
10757 } else {
10758 LOGH("No flash state in reprocess settings");
10759 }
10760
10761 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10762 uint8_t *reprocessFlags =
10763 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10764 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10765 *reprocessFlags)) {
10766 rc = BAD_VALUE;
10767 }
10768 }
10769
Thierry Strudel54dc9782017-02-15 12:12:10 -080010770 // Add exif debug data to internal metadata
10771 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
10772 mm_jpeg_debug_exif_params_t *debug_params =
10773 (mm_jpeg_debug_exif_params_t *)frame_settings.find
10774 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
10775 // AE
10776 if (debug_params->ae_debug_params_valid == TRUE) {
10777 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
10778 debug_params->ae_debug_params);
10779 }
10780 // AWB
10781 if (debug_params->awb_debug_params_valid == TRUE) {
10782 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
10783 debug_params->awb_debug_params);
10784 }
10785 // AF
10786 if (debug_params->af_debug_params_valid == TRUE) {
10787 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
10788 debug_params->af_debug_params);
10789 }
10790 // ASD
10791 if (debug_params->asd_debug_params_valid == TRUE) {
10792 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
10793 debug_params->asd_debug_params);
10794 }
10795 // Stats
10796 if (debug_params->stats_debug_params_valid == TRUE) {
10797 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
10798 debug_params->stats_debug_params);
10799 }
10800 // BE Stats
10801 if (debug_params->bestats_debug_params_valid == TRUE) {
10802 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
10803 debug_params->bestats_debug_params);
10804 }
10805 // BHIST
10806 if (debug_params->bhist_debug_params_valid == TRUE) {
10807 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
10808 debug_params->bhist_debug_params);
10809 }
10810 // 3A Tuning
10811 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
10812 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
10813 debug_params->q3a_tuning_debug_params);
10814 }
10815 }
10816
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010817 // Add metadata which reprocess needs
10818 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10819 cam_reprocess_info_t *repro_info =
10820 (cam_reprocess_info_t *)frame_settings.find
10821 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010822 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010823 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010824 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010825 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010826 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010827 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010828 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010829 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010830 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010831 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010832 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010833 repro_info->pipeline_flip);
10834 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10835 repro_info->af_roi);
10836 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10837 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10839 CAM_INTF_PARM_ROTATION metadata then has been added in
10840 translateToHalMetadata. HAL need to keep this new rotation
10841 metadata. Otherwise, the old rotation info saved in the vendor tag
10842 would be used */
10843 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10844 CAM_INTF_PARM_ROTATION, reprocParam) {
10845 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10846 } else {
10847 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010848 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010849 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010850 }
10851
10852 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10853 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10854 roi.width and roi.height would be the final JPEG size.
10855 For now, HAL only checks this for reprocess request */
10856 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10857 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10858 uint8_t *enable =
10859 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10860 if (*enable == TRUE) {
10861 int32_t *crop_data =
10862 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10863 cam_stream_crop_info_t crop_meta;
10864 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10865 crop_meta.stream_id = 0;
10866 crop_meta.crop.left = crop_data[0];
10867 crop_meta.crop.top = crop_data[1];
10868 crop_meta.crop.width = crop_data[2];
10869 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010870 // The JPEG crop roi should match cpp output size
10871 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10872 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10873 crop_meta.roi_map.left = 0;
10874 crop_meta.roi_map.top = 0;
10875 crop_meta.roi_map.width = cpp_crop->crop.width;
10876 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010877 }
10878 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10879 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010880 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010881 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010882 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10883 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010884 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010885 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10886
10887 // Add JPEG scale information
10888 cam_dimension_t scale_dim;
10889 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10890 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10891 int32_t *roi =
10892 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10893 scale_dim.width = roi[2];
10894 scale_dim.height = roi[3];
10895 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10896 scale_dim);
10897 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10898 scale_dim.width, scale_dim.height, mCameraId);
10899 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010900 }
10901 }
10902
10903 return rc;
10904}
10905
10906/*===========================================================================
10907 * FUNCTION : saveRequestSettings
10908 *
10909 * DESCRIPTION: Add any settings that might have changed to the request settings
10910 * and save the settings to be applied on the frame
10911 *
10912 * PARAMETERS :
10913 * @jpegMetadata : the extracted and/or modified jpeg metadata
10914 * @request : request with initial settings
10915 *
10916 * RETURN :
10917 * camera_metadata_t* : pointer to the saved request settings
10918 *==========================================================================*/
10919camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10920 const CameraMetadata &jpegMetadata,
10921 camera3_capture_request_t *request)
10922{
10923 camera_metadata_t *resultMetadata;
10924 CameraMetadata camMetadata;
10925 camMetadata = request->settings;
10926
10927 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10928 int32_t thumbnail_size[2];
10929 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10930 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10931 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10932 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10933 }
10934
10935 if (request->input_buffer != NULL) {
10936 uint8_t reprocessFlags = 1;
10937 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10938 (uint8_t*)&reprocessFlags,
10939 sizeof(reprocessFlags));
10940 }
10941
10942 resultMetadata = camMetadata.release();
10943 return resultMetadata;
10944}
10945
10946/*===========================================================================
10947 * FUNCTION : setHalFpsRange
10948 *
10949 * DESCRIPTION: set FPS range parameter
10950 *
10951 *
10952 * PARAMETERS :
10953 * @settings : Metadata from framework
10954 * @hal_metadata: Metadata buffer
10955 *
10956 *
10957 * RETURN : success: NO_ERROR
10958 * failure:
10959 *==========================================================================*/
10960int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10961 metadata_buffer_t *hal_metadata)
10962{
10963 int32_t rc = NO_ERROR;
10964 cam_fps_range_t fps_range;
10965 fps_range.min_fps = (float)
10966 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10967 fps_range.max_fps = (float)
10968 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10969 fps_range.video_min_fps = fps_range.min_fps;
10970 fps_range.video_max_fps = fps_range.max_fps;
10971
10972 LOGD("aeTargetFpsRange fps: [%f %f]",
10973 fps_range.min_fps, fps_range.max_fps);
10974 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10975 * follows:
10976 * ---------------------------------------------------------------|
10977 * Video stream is absent in configure_streams |
10978 * (Camcorder preview before the first video record |
10979 * ---------------------------------------------------------------|
10980 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10981 * | | | vid_min/max_fps|
10982 * ---------------------------------------------------------------|
10983 * NO | [ 30, 240] | 240 | [240, 240] |
10984 * |-------------|-------------|----------------|
10985 * | [240, 240] | 240 | [240, 240] |
10986 * ---------------------------------------------------------------|
10987 * Video stream is present in configure_streams |
10988 * ---------------------------------------------------------------|
10989 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10990 * | | | vid_min/max_fps|
10991 * ---------------------------------------------------------------|
10992 * NO | [ 30, 240] | 240 | [240, 240] |
10993 * (camcorder prev |-------------|-------------|----------------|
10994 * after video rec | [240, 240] | 240 | [240, 240] |
10995 * is stopped) | | | |
10996 * ---------------------------------------------------------------|
10997 * YES | [ 30, 240] | 240 | [240, 240] |
10998 * |-------------|-------------|----------------|
10999 * | [240, 240] | 240 | [240, 240] |
11000 * ---------------------------------------------------------------|
11001 * When Video stream is absent in configure_streams,
11002 * preview fps = sensor_fps / batchsize
11003 * Eg: for 240fps at batchSize 4, preview = 60fps
11004 * for 120fps at batchSize 4, preview = 30fps
11005 *
11006 * When video stream is present in configure_streams, preview fps is as per
11007 * the ratio of preview buffers to video buffers requested in process
11008 * capture request
11009 */
11010 mBatchSize = 0;
11011 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11012 fps_range.min_fps = fps_range.video_max_fps;
11013 fps_range.video_min_fps = fps_range.video_max_fps;
11014 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11015 fps_range.max_fps);
11016 if (NAME_NOT_FOUND != val) {
11017 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11019 return BAD_VALUE;
11020 }
11021
11022 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11023 /* If batchmode is currently in progress and the fps changes,
11024 * set the flag to restart the sensor */
11025 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11026 (mHFRVideoFps != fps_range.max_fps)) {
11027 mNeedSensorRestart = true;
11028 }
11029 mHFRVideoFps = fps_range.max_fps;
11030 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11031 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11032 mBatchSize = MAX_HFR_BATCH_SIZE;
11033 }
11034 }
11035 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11036
11037 }
11038 } else {
11039 /* HFR mode is session param in backend/ISP. This should be reset when
11040 * in non-HFR mode */
11041 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11043 return BAD_VALUE;
11044 }
11045 }
11046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11047 return BAD_VALUE;
11048 }
11049 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11050 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11051 return rc;
11052}
11053
11054/*===========================================================================
11055 * FUNCTION : translateToHalMetadata
11056 *
11057 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11058 *
11059 *
11060 * PARAMETERS :
11061 * @request : request sent from framework
11062 *
11063 *
11064 * RETURN : success: NO_ERROR
11065 * failure:
11066 *==========================================================================*/
11067int QCamera3HardwareInterface::translateToHalMetadata
11068 (const camera3_capture_request_t *request,
11069 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011070 uint32_t snapshotStreamId) {
11071 if (request == nullptr || hal_metadata == nullptr) {
11072 return BAD_VALUE;
11073 }
11074
11075 int64_t minFrameDuration = getMinFrameDuration(request);
11076
11077 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11078 minFrameDuration);
11079}
11080
11081int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11082 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11083 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11084
Thierry Strudel3d639192016-09-09 11:52:26 -070011085 int rc = 0;
11086 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011087 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011088
11089 /* Do not change the order of the following list unless you know what you are
11090 * doing.
11091 * The order is laid out in such a way that parameters in the front of the table
11092 * may be used to override the parameters later in the table. Examples are:
11093 * 1. META_MODE should precede AEC/AWB/AF MODE
11094 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11095 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11096 * 4. Any mode should precede it's corresponding settings
11097 */
11098 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11099 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11100 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11101 rc = BAD_VALUE;
11102 }
11103 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11104 if (rc != NO_ERROR) {
11105 LOGE("extractSceneMode failed");
11106 }
11107 }
11108
11109 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11110 uint8_t fwk_aeMode =
11111 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11112 uint8_t aeMode;
11113 int32_t redeye;
11114
11115 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11116 aeMode = CAM_AE_MODE_OFF;
11117 } else {
11118 aeMode = CAM_AE_MODE_ON;
11119 }
11120 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11121 redeye = 1;
11122 } else {
11123 redeye = 0;
11124 }
11125
11126 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11127 fwk_aeMode);
11128 if (NAME_NOT_FOUND != val) {
11129 int32_t flashMode = (int32_t)val;
11130 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11131 }
11132
11133 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11135 rc = BAD_VALUE;
11136 }
11137 }
11138
11139 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11140 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11141 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11142 fwk_whiteLevel);
11143 if (NAME_NOT_FOUND != val) {
11144 uint8_t whiteLevel = (uint8_t)val;
11145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11146 rc = BAD_VALUE;
11147 }
11148 }
11149 }
11150
11151 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11152 uint8_t fwk_cacMode =
11153 frame_settings.find(
11154 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11155 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11156 fwk_cacMode);
11157 if (NAME_NOT_FOUND != val) {
11158 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11159 bool entryAvailable = FALSE;
11160 // Check whether Frameworks set CAC mode is supported in device or not
11161 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11162 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11163 entryAvailable = TRUE;
11164 break;
11165 }
11166 }
11167 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11168 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11169 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11170 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11171 if (entryAvailable == FALSE) {
11172 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11173 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11174 } else {
11175 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11176 // High is not supported and so set the FAST as spec say's underlying
11177 // device implementation can be the same for both modes.
11178 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11179 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11180 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11181 // in order to avoid the fps drop due to high quality
11182 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11183 } else {
11184 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11185 }
11186 }
11187 }
11188 LOGD("Final cacMode is %d", cacMode);
11189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11190 rc = BAD_VALUE;
11191 }
11192 } else {
11193 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11194 }
11195 }
11196
11197 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
11198 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
11199 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11200 fwk_focusMode);
11201 if (NAME_NOT_FOUND != val) {
11202 uint8_t focusMode = (uint8_t)val;
11203 LOGD("set focus mode %d", focusMode);
11204 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11205 rc = BAD_VALUE;
11206 }
11207 }
11208 }
11209
11210 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
11211 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11213 focalDistance)) {
11214 rc = BAD_VALUE;
11215 }
11216 }
11217
11218 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11219 uint8_t fwk_antibandingMode =
11220 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11221 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11222 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11223 if (NAME_NOT_FOUND != val) {
11224 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011225 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11226 if (m60HzZone) {
11227 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11228 } else {
11229 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11230 }
11231 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11233 hal_antibandingMode)) {
11234 rc = BAD_VALUE;
11235 }
11236 }
11237 }
11238
11239 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11240 int32_t expCompensation = frame_settings.find(
11241 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11242 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11243 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11244 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11245 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011246 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11248 expCompensation)) {
11249 rc = BAD_VALUE;
11250 }
11251 }
11252
11253 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11254 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11256 rc = BAD_VALUE;
11257 }
11258 }
11259 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11260 rc = setHalFpsRange(frame_settings, hal_metadata);
11261 if (rc != NO_ERROR) {
11262 LOGE("setHalFpsRange failed");
11263 }
11264 }
11265
11266 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11267 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11269 rc = BAD_VALUE;
11270 }
11271 }
11272
11273 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11274 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11275 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11276 fwk_effectMode);
11277 if (NAME_NOT_FOUND != val) {
11278 uint8_t effectMode = (uint8_t)val;
11279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11280 rc = BAD_VALUE;
11281 }
11282 }
11283 }
11284
11285 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11286 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11287 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11288 colorCorrectMode)) {
11289 rc = BAD_VALUE;
11290 }
11291 }
11292
11293 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11294 cam_color_correct_gains_t colorCorrectGains;
11295 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11296 colorCorrectGains.gains[i] =
11297 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11298 }
11299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11300 colorCorrectGains)) {
11301 rc = BAD_VALUE;
11302 }
11303 }
11304
11305 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11306 cam_color_correct_matrix_t colorCorrectTransform;
11307 cam_rational_type_t transform_elem;
11308 size_t num = 0;
11309 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11310 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11311 transform_elem.numerator =
11312 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11313 transform_elem.denominator =
11314 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11315 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11316 num++;
11317 }
11318 }
11319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11320 colorCorrectTransform)) {
11321 rc = BAD_VALUE;
11322 }
11323 }
11324
11325 cam_trigger_t aecTrigger;
11326 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11327 aecTrigger.trigger_id = -1;
11328 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11329 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11330 aecTrigger.trigger =
11331 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11332 aecTrigger.trigger_id =
11333 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11335 aecTrigger)) {
11336 rc = BAD_VALUE;
11337 }
11338 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11339 aecTrigger.trigger, aecTrigger.trigger_id);
11340 }
11341
11342 /*af_trigger must come with a trigger id*/
11343 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11344 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11345 cam_trigger_t af_trigger;
11346 af_trigger.trigger =
11347 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11348 af_trigger.trigger_id =
11349 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11351 rc = BAD_VALUE;
11352 }
11353 LOGD("AfTrigger: %d AfTriggerID: %d",
11354 af_trigger.trigger, af_trigger.trigger_id);
11355 }
11356
11357 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11358 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11360 rc = BAD_VALUE;
11361 }
11362 }
11363 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11364 cam_edge_application_t edge_application;
11365 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011366
Thierry Strudel3d639192016-09-09 11:52:26 -070011367 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11368 edge_application.sharpness = 0;
11369 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011370 edge_application.sharpness =
11371 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11372 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11373 int32_t sharpness =
11374 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11375 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11376 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11377 LOGD("Setting edge mode sharpness %d", sharpness);
11378 edge_application.sharpness = sharpness;
11379 }
11380 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011381 }
11382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11383 rc = BAD_VALUE;
11384 }
11385 }
11386
11387 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11388 int32_t respectFlashMode = 1;
11389 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11390 uint8_t fwk_aeMode =
11391 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11392 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11393 respectFlashMode = 0;
11394 LOGH("AE Mode controls flash, ignore android.flash.mode");
11395 }
11396 }
11397 if (respectFlashMode) {
11398 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11399 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11400 LOGH("flash mode after mapping %d", val);
11401 // To check: CAM_INTF_META_FLASH_MODE usage
11402 if (NAME_NOT_FOUND != val) {
11403 uint8_t flashMode = (uint8_t)val;
11404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11405 rc = BAD_VALUE;
11406 }
11407 }
11408 }
11409 }
11410
11411 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11412 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11414 rc = BAD_VALUE;
11415 }
11416 }
11417
11418 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11419 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11421 flashFiringTime)) {
11422 rc = BAD_VALUE;
11423 }
11424 }
11425
11426 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11427 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11429 hotPixelMode)) {
11430 rc = BAD_VALUE;
11431 }
11432 }
11433
11434 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11435 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11437 lensAperture)) {
11438 rc = BAD_VALUE;
11439 }
11440 }
11441
11442 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11443 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11445 filterDensity)) {
11446 rc = BAD_VALUE;
11447 }
11448 }
11449
11450 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11451 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11453 focalLength)) {
11454 rc = BAD_VALUE;
11455 }
11456 }
11457
11458 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11459 uint8_t optStabMode =
11460 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11462 optStabMode)) {
11463 rc = BAD_VALUE;
11464 }
11465 }
11466
11467 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11468 uint8_t videoStabMode =
11469 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11470 LOGD("videoStabMode from APP = %d", videoStabMode);
11471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11472 videoStabMode)) {
11473 rc = BAD_VALUE;
11474 }
11475 }
11476
11477
11478 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11479 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11481 noiseRedMode)) {
11482 rc = BAD_VALUE;
11483 }
11484 }
11485
11486 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11487 float reprocessEffectiveExposureFactor =
11488 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11490 reprocessEffectiveExposureFactor)) {
11491 rc = BAD_VALUE;
11492 }
11493 }
11494
11495 cam_crop_region_t scalerCropRegion;
11496 bool scalerCropSet = false;
11497 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11498 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11499 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11500 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11501 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11502
11503 // Map coordinate system from active array to sensor output.
11504 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11505 scalerCropRegion.width, scalerCropRegion.height);
11506
11507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11508 scalerCropRegion)) {
11509 rc = BAD_VALUE;
11510 }
11511 scalerCropSet = true;
11512 }
11513
11514 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11515 int64_t sensorExpTime =
11516 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11517 LOGD("setting sensorExpTime %lld", sensorExpTime);
11518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11519 sensorExpTime)) {
11520 rc = BAD_VALUE;
11521 }
11522 }
11523
11524 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11525 int64_t sensorFrameDuration =
11526 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011527 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11528 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11529 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11530 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11532 sensorFrameDuration)) {
11533 rc = BAD_VALUE;
11534 }
11535 }
11536
11537 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11538 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11539 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11540 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11541 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11542 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11543 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11544 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11545 sensorSensitivity)) {
11546 rc = BAD_VALUE;
11547 }
11548 }
11549
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011550#ifndef USE_HAL_3_3
11551 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11552 int32_t ispSensitivity =
11553 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11554 if (ispSensitivity <
11555 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11556 ispSensitivity =
11557 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11558 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11559 }
11560 if (ispSensitivity >
11561 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11562 ispSensitivity =
11563 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11564 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11565 }
11566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11567 ispSensitivity)) {
11568 rc = BAD_VALUE;
11569 }
11570 }
11571#endif
11572
Thierry Strudel3d639192016-09-09 11:52:26 -070011573 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11574 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11576 rc = BAD_VALUE;
11577 }
11578 }
11579
11580 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11581 uint8_t fwk_facedetectMode =
11582 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11583
11584 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11585 fwk_facedetectMode);
11586
11587 if (NAME_NOT_FOUND != val) {
11588 uint8_t facedetectMode = (uint8_t)val;
11589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11590 facedetectMode)) {
11591 rc = BAD_VALUE;
11592 }
11593 }
11594 }
11595
Thierry Strudel54dc9782017-02-15 12:12:10 -080011596 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011597 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011598 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11600 histogramMode)) {
11601 rc = BAD_VALUE;
11602 }
11603 }
11604
11605 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11606 uint8_t sharpnessMapMode =
11607 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11609 sharpnessMapMode)) {
11610 rc = BAD_VALUE;
11611 }
11612 }
11613
11614 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11615 uint8_t tonemapMode =
11616 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11618 rc = BAD_VALUE;
11619 }
11620 }
11621 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11622 /*All tonemap channels will have the same number of points*/
11623 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11624 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11625 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11626 cam_rgb_tonemap_curves tonemapCurves;
11627 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11628 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11629 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11630 tonemapCurves.tonemap_points_cnt,
11631 CAM_MAX_TONEMAP_CURVE_SIZE);
11632 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11633 }
11634
11635 /* ch0 = G*/
11636 size_t point = 0;
11637 cam_tonemap_curve_t tonemapCurveGreen;
11638 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11639 for (size_t j = 0; j < 2; j++) {
11640 tonemapCurveGreen.tonemap_points[i][j] =
11641 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11642 point++;
11643 }
11644 }
11645 tonemapCurves.curves[0] = tonemapCurveGreen;
11646
11647 /* ch 1 = B */
11648 point = 0;
11649 cam_tonemap_curve_t tonemapCurveBlue;
11650 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11651 for (size_t j = 0; j < 2; j++) {
11652 tonemapCurveBlue.tonemap_points[i][j] =
11653 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11654 point++;
11655 }
11656 }
11657 tonemapCurves.curves[1] = tonemapCurveBlue;
11658
11659 /* ch 2 = R */
11660 point = 0;
11661 cam_tonemap_curve_t tonemapCurveRed;
11662 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11663 for (size_t j = 0; j < 2; j++) {
11664 tonemapCurveRed.tonemap_points[i][j] =
11665 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11666 point++;
11667 }
11668 }
11669 tonemapCurves.curves[2] = tonemapCurveRed;
11670
11671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11672 tonemapCurves)) {
11673 rc = BAD_VALUE;
11674 }
11675 }
11676
11677 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11678 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11680 captureIntent)) {
11681 rc = BAD_VALUE;
11682 }
11683 }
11684
11685 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11686 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11688 blackLevelLock)) {
11689 rc = BAD_VALUE;
11690 }
11691 }
11692
11693 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11694 uint8_t lensShadingMapMode =
11695 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11697 lensShadingMapMode)) {
11698 rc = BAD_VALUE;
11699 }
11700 }
11701
11702 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11703 cam_area_t roi;
11704 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011705 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011706
11707 // Map coordinate system from active array to sensor output.
11708 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11709 roi.rect.height);
11710
11711 if (scalerCropSet) {
11712 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11713 }
11714 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11715 rc = BAD_VALUE;
11716 }
11717 }
11718
11719 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11720 cam_area_t roi;
11721 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011722 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011723
11724 // Map coordinate system from active array to sensor output.
11725 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11726 roi.rect.height);
11727
11728 if (scalerCropSet) {
11729 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11730 }
11731 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11732 rc = BAD_VALUE;
11733 }
11734 }
11735
11736 // CDS for non-HFR non-video mode
11737 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11738 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11739 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11740 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11741 LOGE("Invalid CDS mode %d!", *fwk_cds);
11742 } else {
11743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11744 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11745 rc = BAD_VALUE;
11746 }
11747 }
11748 }
11749
Thierry Strudel04e026f2016-10-10 11:27:36 -070011750 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011751 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011752 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011753 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11754 }
11755 if (m_bVideoHdrEnabled)
11756 vhdr = CAM_VIDEO_HDR_MODE_ON;
11757
Thierry Strudel54dc9782017-02-15 12:12:10 -080011758 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
11759
11760 if(vhdr != curr_hdr_state)
11761 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
11762
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011763 rc = setVideoHdrMode(mParameters, vhdr);
11764 if (rc != NO_ERROR) {
11765 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011766 }
11767
11768 //IR
11769 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11770 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11771 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011772 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
11773 uint8_t isIRon = 0;
11774
11775 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011776 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11777 LOGE("Invalid IR mode %d!", fwk_ir);
11778 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011779 if(isIRon != curr_ir_state )
11780 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
11781
Thierry Strudel04e026f2016-10-10 11:27:36 -070011782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11783 CAM_INTF_META_IR_MODE, fwk_ir)) {
11784 rc = BAD_VALUE;
11785 }
11786 }
11787 }
11788
Thierry Strudel54dc9782017-02-15 12:12:10 -080011789 //Binning Correction Mode
11790 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
11791 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
11792 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
11793 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
11794 || (0 > fwk_binning_correction)) {
11795 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
11796 } else {
11797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11798 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
11799 rc = BAD_VALUE;
11800 }
11801 }
11802 }
11803
Thierry Strudel269c81a2016-10-12 12:13:59 -070011804 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11805 float aec_speed;
11806 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11807 LOGD("AEC Speed :%f", aec_speed);
11808 if ( aec_speed < 0 ) {
11809 LOGE("Invalid AEC mode %f!", aec_speed);
11810 } else {
11811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11812 aec_speed)) {
11813 rc = BAD_VALUE;
11814 }
11815 }
11816 }
11817
11818 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11819 float awb_speed;
11820 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11821 LOGD("AWB Speed :%f", awb_speed);
11822 if ( awb_speed < 0 ) {
11823 LOGE("Invalid AWB mode %f!", awb_speed);
11824 } else {
11825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11826 awb_speed)) {
11827 rc = BAD_VALUE;
11828 }
11829 }
11830 }
11831
Thierry Strudel3d639192016-09-09 11:52:26 -070011832 // TNR
11833 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11834 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11835 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011836 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070011837 cam_denoise_param_t tnr;
11838 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11839 tnr.process_plates =
11840 (cam_denoise_process_type_t)frame_settings.find(
11841 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11842 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011843
11844 if(b_TnrRequested != curr_tnr_state)
11845 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
11846
Thierry Strudel3d639192016-09-09 11:52:26 -070011847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11848 rc = BAD_VALUE;
11849 }
11850 }
11851
Thierry Strudel54dc9782017-02-15 12:12:10 -080011852 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011853 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011854 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011855 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11856 *exposure_metering_mode)) {
11857 rc = BAD_VALUE;
11858 }
11859 }
11860
Thierry Strudel3d639192016-09-09 11:52:26 -070011861 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11862 int32_t fwk_testPatternMode =
11863 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11864 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11865 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11866
11867 if (NAME_NOT_FOUND != testPatternMode) {
11868 cam_test_pattern_data_t testPatternData;
11869 memset(&testPatternData, 0, sizeof(testPatternData));
11870 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11871 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11872 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11873 int32_t *fwk_testPatternData =
11874 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11875 testPatternData.r = fwk_testPatternData[0];
11876 testPatternData.b = fwk_testPatternData[3];
11877 switch (gCamCapability[mCameraId]->color_arrangement) {
11878 case CAM_FILTER_ARRANGEMENT_RGGB:
11879 case CAM_FILTER_ARRANGEMENT_GRBG:
11880 testPatternData.gr = fwk_testPatternData[1];
11881 testPatternData.gb = fwk_testPatternData[2];
11882 break;
11883 case CAM_FILTER_ARRANGEMENT_GBRG:
11884 case CAM_FILTER_ARRANGEMENT_BGGR:
11885 testPatternData.gr = fwk_testPatternData[2];
11886 testPatternData.gb = fwk_testPatternData[1];
11887 break;
11888 default:
11889 LOGE("color arrangement %d is not supported",
11890 gCamCapability[mCameraId]->color_arrangement);
11891 break;
11892 }
11893 }
11894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11895 testPatternData)) {
11896 rc = BAD_VALUE;
11897 }
11898 } else {
11899 LOGE("Invalid framework sensor test pattern mode %d",
11900 fwk_testPatternMode);
11901 }
11902 }
11903
11904 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11905 size_t count = 0;
11906 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11907 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11908 gps_coords.data.d, gps_coords.count, count);
11909 if (gps_coords.count != count) {
11910 rc = BAD_VALUE;
11911 }
11912 }
11913
11914 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11915 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11916 size_t count = 0;
11917 const char *gps_methods_src = (const char *)
11918 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11919 memset(gps_methods, '\0', sizeof(gps_methods));
11920 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11921 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11922 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11923 if (GPS_PROCESSING_METHOD_SIZE != count) {
11924 rc = BAD_VALUE;
11925 }
11926 }
11927
11928 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11929 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11931 gps_timestamp)) {
11932 rc = BAD_VALUE;
11933 }
11934 }
11935
11936 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11937 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11938 cam_rotation_info_t rotation_info;
11939 if (orientation == 0) {
11940 rotation_info.rotation = ROTATE_0;
11941 } else if (orientation == 90) {
11942 rotation_info.rotation = ROTATE_90;
11943 } else if (orientation == 180) {
11944 rotation_info.rotation = ROTATE_180;
11945 } else if (orientation == 270) {
11946 rotation_info.rotation = ROTATE_270;
11947 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011948 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011949 rotation_info.streamId = snapshotStreamId;
11950 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11952 rc = BAD_VALUE;
11953 }
11954 }
11955
11956 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11957 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11959 rc = BAD_VALUE;
11960 }
11961 }
11962
11963 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11964 uint32_t thumb_quality = (uint32_t)
11965 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11967 thumb_quality)) {
11968 rc = BAD_VALUE;
11969 }
11970 }
11971
11972 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11973 cam_dimension_t dim;
11974 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11975 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11976 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11977 rc = BAD_VALUE;
11978 }
11979 }
11980
11981 // Internal metadata
11982 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11983 size_t count = 0;
11984 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11985 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11986 privatedata.data.i32, privatedata.count, count);
11987 if (privatedata.count != count) {
11988 rc = BAD_VALUE;
11989 }
11990 }
11991
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011992 // ISO/Exposure Priority
11993 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11994 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11995 cam_priority_mode_t mode =
11996 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11997 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11998 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11999 use_iso_exp_pty.previewOnly = FALSE;
12000 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12001 use_iso_exp_pty.value = *ptr;
12002
12003 if(CAM_ISO_PRIORITY == mode) {
12004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12005 use_iso_exp_pty)) {
12006 rc = BAD_VALUE;
12007 }
12008 }
12009 else {
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12011 use_iso_exp_pty)) {
12012 rc = BAD_VALUE;
12013 }
12014 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012015
12016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12017 rc = BAD_VALUE;
12018 }
12019 }
12020 } else {
12021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12022 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012023 }
12024 }
12025
12026 // Saturation
12027 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12028 int32_t* use_saturation =
12029 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12030 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12031 rc = BAD_VALUE;
12032 }
12033 }
12034
Thierry Strudel3d639192016-09-09 11:52:26 -070012035 // EV step
12036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12037 gCamCapability[mCameraId]->exp_compensation_step)) {
12038 rc = BAD_VALUE;
12039 }
12040
12041 // CDS info
12042 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12043 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12044 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12045
12046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12047 CAM_INTF_META_CDS_DATA, *cdsData)) {
12048 rc = BAD_VALUE;
12049 }
12050 }
12051
Shuzhen Wang19463d72016-03-08 11:09:52 -080012052 // Hybrid AE
12053 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12054 uint8_t *hybrid_ae = (uint8_t *)
12055 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12056
12057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12058 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12059 rc = BAD_VALUE;
12060 }
12061 }
12062
Thierry Strudel3d639192016-09-09 11:52:26 -070012063 return rc;
12064}
12065
12066/*===========================================================================
12067 * FUNCTION : captureResultCb
12068 *
12069 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12070 *
12071 * PARAMETERS :
12072 * @frame : frame information from mm-camera-interface
12073 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12074 * @userdata: userdata
12075 *
12076 * RETURN : NONE
12077 *==========================================================================*/
12078void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12079 camera3_stream_buffer_t *buffer,
12080 uint32_t frame_number, bool isInputBuffer, void *userdata)
12081{
12082 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12083 if (hw == NULL) {
12084 LOGE("Invalid hw %p", hw);
12085 return;
12086 }
12087
12088 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12089 return;
12090}
12091
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012092/*===========================================================================
12093 * FUNCTION : setBufferErrorStatus
12094 *
12095 * DESCRIPTION: Callback handler for channels to report any buffer errors
12096 *
12097 * PARAMETERS :
12098 * @ch : Channel on which buffer error is reported from
12099 * @frame_number : frame number on which buffer error is reported on
12100 * @buffer_status : buffer error status
12101 * @userdata: userdata
12102 *
12103 * RETURN : NONE
12104 *==========================================================================*/
12105void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12106 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12107{
12108 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12109 if (hw == NULL) {
12110 LOGE("Invalid hw %p", hw);
12111 return;
12112 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012113
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012114 hw->setBufferErrorStatus(ch, frame_number, err);
12115 return;
12116}
12117
12118void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12119 uint32_t frameNumber, camera3_buffer_status_t err)
12120{
12121 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12122 pthread_mutex_lock(&mMutex);
12123
12124 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12125 if (req.frame_number != frameNumber)
12126 continue;
12127 for (auto& k : req.mPendingBufferList) {
12128 if(k.stream->priv == ch) {
12129 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12130 }
12131 }
12132 }
12133
12134 pthread_mutex_unlock(&mMutex);
12135 return;
12136}
Thierry Strudel3d639192016-09-09 11:52:26 -070012137/*===========================================================================
12138 * FUNCTION : initialize
12139 *
12140 * DESCRIPTION: Pass framework callback pointers to HAL
12141 *
12142 * PARAMETERS :
12143 *
12144 *
12145 * RETURN : Success : 0
12146 * Failure: -ENODEV
12147 *==========================================================================*/
12148
12149int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12150 const camera3_callback_ops_t *callback_ops)
12151{
12152 LOGD("E");
12153 QCamera3HardwareInterface *hw =
12154 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12155 if (!hw) {
12156 LOGE("NULL camera device");
12157 return -ENODEV;
12158 }
12159
12160 int rc = hw->initialize(callback_ops);
12161 LOGD("X");
12162 return rc;
12163}
12164
12165/*===========================================================================
12166 * FUNCTION : configure_streams
12167 *
12168 * DESCRIPTION:
12169 *
12170 * PARAMETERS :
12171 *
12172 *
12173 * RETURN : Success: 0
12174 * Failure: -EINVAL (if stream configuration is invalid)
12175 * -ENODEV (fatal error)
12176 *==========================================================================*/
12177
12178int QCamera3HardwareInterface::configure_streams(
12179 const struct camera3_device *device,
12180 camera3_stream_configuration_t *stream_list)
12181{
12182 LOGD("E");
12183 QCamera3HardwareInterface *hw =
12184 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12185 if (!hw) {
12186 LOGE("NULL camera device");
12187 return -ENODEV;
12188 }
12189 int rc = hw->configureStreams(stream_list);
12190 LOGD("X");
12191 return rc;
12192}
12193
12194/*===========================================================================
12195 * FUNCTION : construct_default_request_settings
12196 *
12197 * DESCRIPTION: Configure a settings buffer to meet the required use case
12198 *
12199 * PARAMETERS :
12200 *
12201 *
12202 * RETURN : Success: Return valid metadata
12203 * Failure: Return NULL
12204 *==========================================================================*/
12205const camera_metadata_t* QCamera3HardwareInterface::
12206 construct_default_request_settings(const struct camera3_device *device,
12207 int type)
12208{
12209
12210 LOGD("E");
12211 camera_metadata_t* fwk_metadata = NULL;
12212 QCamera3HardwareInterface *hw =
12213 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12214 if (!hw) {
12215 LOGE("NULL camera device");
12216 return NULL;
12217 }
12218
12219 fwk_metadata = hw->translateCapabilityToMetadata(type);
12220
12221 LOGD("X");
12222 return fwk_metadata;
12223}
12224
12225/*===========================================================================
12226 * FUNCTION : process_capture_request
12227 *
12228 * DESCRIPTION:
12229 *
12230 * PARAMETERS :
12231 *
12232 *
12233 * RETURN :
12234 *==========================================================================*/
12235int QCamera3HardwareInterface::process_capture_request(
12236 const struct camera3_device *device,
12237 camera3_capture_request_t *request)
12238{
12239 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012240 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012241 QCamera3HardwareInterface *hw =
12242 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12243 if (!hw) {
12244 LOGE("NULL camera device");
12245 return -EINVAL;
12246 }
12247
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012248 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012249 LOGD("X");
12250 return rc;
12251}
12252
12253/*===========================================================================
12254 * FUNCTION : dump
12255 *
12256 * DESCRIPTION:
12257 *
12258 * PARAMETERS :
12259 *
12260 *
12261 * RETURN :
12262 *==========================================================================*/
12263
12264void QCamera3HardwareInterface::dump(
12265 const struct camera3_device *device, int fd)
12266{
12267 /* Log level property is read when "adb shell dumpsys media.camera" is
12268 called so that the log level can be controlled without restarting
12269 the media server */
12270 getLogLevel();
12271
12272 LOGD("E");
12273 QCamera3HardwareInterface *hw =
12274 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12275 if (!hw) {
12276 LOGE("NULL camera device");
12277 return;
12278 }
12279
12280 hw->dump(fd);
12281 LOGD("X");
12282 return;
12283}
12284
12285/*===========================================================================
12286 * FUNCTION : flush
12287 *
12288 * DESCRIPTION:
12289 *
12290 * PARAMETERS :
12291 *
12292 *
12293 * RETURN :
12294 *==========================================================================*/
12295
12296int QCamera3HardwareInterface::flush(
12297 const struct camera3_device *device)
12298{
12299 int rc;
12300 LOGD("E");
12301 QCamera3HardwareInterface *hw =
12302 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12303 if (!hw) {
12304 LOGE("NULL camera device");
12305 return -EINVAL;
12306 }
12307
12308 pthread_mutex_lock(&hw->mMutex);
12309 // Validate current state
12310 switch (hw->mState) {
12311 case STARTED:
12312 /* valid state */
12313 break;
12314
12315 case ERROR:
12316 pthread_mutex_unlock(&hw->mMutex);
12317 hw->handleCameraDeviceError();
12318 return -ENODEV;
12319
12320 default:
12321 LOGI("Flush returned during state %d", hw->mState);
12322 pthread_mutex_unlock(&hw->mMutex);
12323 return 0;
12324 }
12325 pthread_mutex_unlock(&hw->mMutex);
12326
12327 rc = hw->flush(true /* restart channels */ );
12328 LOGD("X");
12329 return rc;
12330}
12331
12332/*===========================================================================
12333 * FUNCTION : close_camera_device
12334 *
12335 * DESCRIPTION:
12336 *
12337 * PARAMETERS :
12338 *
12339 *
12340 * RETURN :
12341 *==========================================================================*/
12342int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12343{
12344 int ret = NO_ERROR;
12345 QCamera3HardwareInterface *hw =
12346 reinterpret_cast<QCamera3HardwareInterface *>(
12347 reinterpret_cast<camera3_device_t *>(device)->priv);
12348 if (!hw) {
12349 LOGE("NULL camera device");
12350 return BAD_VALUE;
12351 }
12352
12353 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12354 delete hw;
12355 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012356 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012357 return ret;
12358}
12359
12360/*===========================================================================
12361 * FUNCTION : getWaveletDenoiseProcessPlate
12362 *
12363 * DESCRIPTION: query wavelet denoise process plate
12364 *
12365 * PARAMETERS : None
12366 *
12367 * RETURN : WNR prcocess plate value
12368 *==========================================================================*/
12369cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12370{
12371 char prop[PROPERTY_VALUE_MAX];
12372 memset(prop, 0, sizeof(prop));
12373 property_get("persist.denoise.process.plates", prop, "0");
12374 int processPlate = atoi(prop);
12375 switch(processPlate) {
12376 case 0:
12377 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12378 case 1:
12379 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12380 case 2:
12381 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12382 case 3:
12383 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12384 default:
12385 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12386 }
12387}
12388
12389
12390/*===========================================================================
12391 * FUNCTION : getTemporalDenoiseProcessPlate
12392 *
12393 * DESCRIPTION: query temporal denoise process plate
12394 *
12395 * PARAMETERS : None
12396 *
12397 * RETURN : TNR prcocess plate value
12398 *==========================================================================*/
12399cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12400{
12401 char prop[PROPERTY_VALUE_MAX];
12402 memset(prop, 0, sizeof(prop));
12403 property_get("persist.tnr.process.plates", prop, "0");
12404 int processPlate = atoi(prop);
12405 switch(processPlate) {
12406 case 0:
12407 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12408 case 1:
12409 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12410 case 2:
12411 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12412 case 3:
12413 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12414 default:
12415 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12416 }
12417}
12418
12419
12420/*===========================================================================
12421 * FUNCTION : extractSceneMode
12422 *
12423 * DESCRIPTION: Extract scene mode from frameworks set metadata
12424 *
12425 * PARAMETERS :
12426 * @frame_settings: CameraMetadata reference
12427 * @metaMode: ANDROID_CONTORL_MODE
12428 * @hal_metadata: hal metadata structure
12429 *
12430 * RETURN : None
12431 *==========================================================================*/
12432int32_t QCamera3HardwareInterface::extractSceneMode(
12433 const CameraMetadata &frame_settings, uint8_t metaMode,
12434 metadata_buffer_t *hal_metadata)
12435{
12436 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012437 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12438
12439 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12440 LOGD("Ignoring control mode OFF_KEEP_STATE");
12441 return NO_ERROR;
12442 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012443
12444 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12445 camera_metadata_ro_entry entry =
12446 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12447 if (0 == entry.count)
12448 return rc;
12449
12450 uint8_t fwk_sceneMode = entry.data.u8[0];
12451
12452 int val = lookupHalName(SCENE_MODES_MAP,
12453 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12454 fwk_sceneMode);
12455 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012456 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012457 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012458 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012459 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012460
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012461 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12462 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12463 }
12464
12465 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12466 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012467 cam_hdr_param_t hdr_params;
12468 hdr_params.hdr_enable = 1;
12469 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12470 hdr_params.hdr_need_1x = false;
12471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12472 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012476
Thierry Strudel3d639192016-09-09 11:52:26 -070012477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12478 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12479 rc = BAD_VALUE;
12480 }
12481 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012482
12483 if (mForceHdrSnapshot) {
12484 cam_hdr_param_t hdr_params;
12485 hdr_params.hdr_enable = 1;
12486 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12487 hdr_params.hdr_need_1x = false;
12488 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12489 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
Thierry Strudel3d639192016-09-09 11:52:26 -070012494 return rc;
12495}
12496
12497/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012498 * FUNCTION : setVideoHdrMode
12499 *
12500 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12501 *
12502 * PARAMETERS :
12503 * @hal_metadata: hal metadata structure
12504 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12505 *
12506 * RETURN : None
12507 *==========================================================================*/
12508int32_t QCamera3HardwareInterface::setVideoHdrMode(
12509 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12510{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012511 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12512 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12513 }
12514
12515 LOGE("Invalid Video HDR mode %d!", vhdr);
12516 return BAD_VALUE;
12517}
12518
12519/*===========================================================================
12520 * FUNCTION : setSensorHDR
12521 *
12522 * DESCRIPTION: Enable/disable sensor HDR.
12523 *
12524 * PARAMETERS :
12525 * @hal_metadata: hal metadata structure
12526 * @enable: boolean whether to enable/disable sensor HDR
12527 *
12528 * RETURN : None
12529 *==========================================================================*/
12530int32_t QCamera3HardwareInterface::setSensorHDR(
12531 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12532{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012533 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012534 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12535
12536 if (enable) {
12537 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12538 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12539 #ifdef _LE_CAMERA_
12540 //Default to staggered HDR for IOT
12541 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12542 #else
12543 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12544 #endif
12545 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12546 }
12547
12548 bool isSupported = false;
12549 switch (sensor_hdr) {
12550 case CAM_SENSOR_HDR_IN_SENSOR:
12551 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12552 CAM_QCOM_FEATURE_SENSOR_HDR) {
12553 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012554 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012555 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012556 break;
12557 case CAM_SENSOR_HDR_ZIGZAG:
12558 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12559 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12560 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012561 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012562 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012563 break;
12564 case CAM_SENSOR_HDR_STAGGERED:
12565 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12566 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12567 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012568 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012569 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012570 break;
12571 case CAM_SENSOR_HDR_OFF:
12572 isSupported = true;
12573 LOGD("Turning off sensor HDR");
12574 break;
12575 default:
12576 LOGE("HDR mode %d not supported", sensor_hdr);
12577 rc = BAD_VALUE;
12578 break;
12579 }
12580
12581 if(isSupported) {
12582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12583 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12584 rc = BAD_VALUE;
12585 } else {
12586 if(!isVideoHdrEnable)
12587 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012588 }
12589 }
12590 return rc;
12591}
12592
12593/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012594 * FUNCTION : needRotationReprocess
12595 *
12596 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12597 *
12598 * PARAMETERS : none
12599 *
12600 * RETURN : true: needed
12601 * false: no need
12602 *==========================================================================*/
12603bool QCamera3HardwareInterface::needRotationReprocess()
12604{
12605 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12606 // current rotation is not zero, and pp has the capability to process rotation
12607 LOGH("need do reprocess for rotation");
12608 return true;
12609 }
12610
12611 return false;
12612}
12613
12614/*===========================================================================
12615 * FUNCTION : needReprocess
12616 *
12617 * DESCRIPTION: if reprocess in needed
12618 *
12619 * PARAMETERS : none
12620 *
12621 * RETURN : true: needed
12622 * false: no need
12623 *==========================================================================*/
12624bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12625{
12626 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12627 // TODO: add for ZSL HDR later
12628 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12629 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12630 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12631 return true;
12632 } else {
12633 LOGH("already post processed frame");
12634 return false;
12635 }
12636 }
12637 return needRotationReprocess();
12638}
12639
12640/*===========================================================================
12641 * FUNCTION : needJpegExifRotation
12642 *
12643 * DESCRIPTION: if rotation from jpeg is needed
12644 *
12645 * PARAMETERS : none
12646 *
12647 * RETURN : true: needed
12648 * false: no need
12649 *==========================================================================*/
12650bool QCamera3HardwareInterface::needJpegExifRotation()
12651{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012652 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012653 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12654 LOGD("Need use Jpeg EXIF Rotation");
12655 return true;
12656 }
12657 return false;
12658}
12659
12660/*===========================================================================
12661 * FUNCTION : addOfflineReprocChannel
12662 *
12663 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12664 * coming from input channel
12665 *
12666 * PARAMETERS :
12667 * @config : reprocess configuration
12668 * @inputChHandle : pointer to the input (source) channel
12669 *
12670 *
12671 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12672 *==========================================================================*/
12673QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12674 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12675{
12676 int32_t rc = NO_ERROR;
12677 QCamera3ReprocessChannel *pChannel = NULL;
12678
12679 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012680 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12681 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012682 if (NULL == pChannel) {
12683 LOGE("no mem for reprocess channel");
12684 return NULL;
12685 }
12686
12687 rc = pChannel->initialize(IS_TYPE_NONE);
12688 if (rc != NO_ERROR) {
12689 LOGE("init reprocess channel failed, ret = %d", rc);
12690 delete pChannel;
12691 return NULL;
12692 }
12693
12694 // pp feature config
12695 cam_pp_feature_config_t pp_config;
12696 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12697
12698 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12699 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12700 & CAM_QCOM_FEATURE_DSDN) {
12701 //Use CPP CDS incase h/w supports it.
12702 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12703 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12704 }
12705 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12706 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12707 }
12708
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012709 if (config.hdr_param.hdr_enable) {
12710 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12711 pp_config.hdr_param = config.hdr_param;
12712 }
12713
12714 if (mForceHdrSnapshot) {
12715 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12716 pp_config.hdr_param.hdr_enable = 1;
12717 pp_config.hdr_param.hdr_need_1x = 0;
12718 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12719 }
12720
Thierry Strudel3d639192016-09-09 11:52:26 -070012721 rc = pChannel->addReprocStreamsFromSource(pp_config,
12722 config,
12723 IS_TYPE_NONE,
12724 mMetadataChannel);
12725
12726 if (rc != NO_ERROR) {
12727 delete pChannel;
12728 return NULL;
12729 }
12730 return pChannel;
12731}
12732
12733/*===========================================================================
12734 * FUNCTION : getMobicatMask
12735 *
12736 * DESCRIPTION: returns mobicat mask
12737 *
12738 * PARAMETERS : none
12739 *
12740 * RETURN : mobicat mask
12741 *
12742 *==========================================================================*/
12743uint8_t QCamera3HardwareInterface::getMobicatMask()
12744{
12745 return m_MobicatMask;
12746}
12747
12748/*===========================================================================
12749 * FUNCTION : setMobicat
12750 *
12751 * DESCRIPTION: set Mobicat on/off.
12752 *
12753 * PARAMETERS :
12754 * @params : none
12755 *
12756 * RETURN : int32_t type of status
12757 * NO_ERROR -- success
12758 * none-zero failure code
12759 *==========================================================================*/
12760int32_t QCamera3HardwareInterface::setMobicat()
12761{
12762 char value [PROPERTY_VALUE_MAX];
12763 property_get("persist.camera.mobicat", value, "0");
12764 int32_t ret = NO_ERROR;
12765 uint8_t enableMobi = (uint8_t)atoi(value);
12766
12767 if (enableMobi) {
12768 tune_cmd_t tune_cmd;
12769 tune_cmd.type = SET_RELOAD_CHROMATIX;
12770 tune_cmd.module = MODULE_ALL;
12771 tune_cmd.value = TRUE;
12772 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12773 CAM_INTF_PARM_SET_VFE_COMMAND,
12774 tune_cmd);
12775
12776 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12777 CAM_INTF_PARM_SET_PP_COMMAND,
12778 tune_cmd);
12779 }
12780 m_MobicatMask = enableMobi;
12781
12782 return ret;
12783}
12784
12785/*===========================================================================
12786* FUNCTION : getLogLevel
12787*
12788* DESCRIPTION: Reads the log level property into a variable
12789*
12790* PARAMETERS :
12791* None
12792*
12793* RETURN :
12794* None
12795*==========================================================================*/
12796void QCamera3HardwareInterface::getLogLevel()
12797{
12798 char prop[PROPERTY_VALUE_MAX];
12799 uint32_t globalLogLevel = 0;
12800
12801 property_get("persist.camera.hal.debug", prop, "0");
12802 int val = atoi(prop);
12803 if (0 <= val) {
12804 gCamHal3LogLevel = (uint32_t)val;
12805 }
12806
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012807 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012808 gKpiDebugLevel = atoi(prop);
12809
12810 property_get("persist.camera.global.debug", prop, "0");
12811 val = atoi(prop);
12812 if (0 <= val) {
12813 globalLogLevel = (uint32_t)val;
12814 }
12815
12816 /* Highest log level among hal.logs and global.logs is selected */
12817 if (gCamHal3LogLevel < globalLogLevel)
12818 gCamHal3LogLevel = globalLogLevel;
12819
12820 return;
12821}
12822
12823/*===========================================================================
12824 * FUNCTION : validateStreamRotations
12825 *
12826 * DESCRIPTION: Check if the rotations requested are supported
12827 *
12828 * PARAMETERS :
12829 * @stream_list : streams to be configured
12830 *
12831 * RETURN : NO_ERROR on success
12832 * -EINVAL on failure
12833 *
12834 *==========================================================================*/
12835int QCamera3HardwareInterface::validateStreamRotations(
12836 camera3_stream_configuration_t *streamList)
12837{
12838 int rc = NO_ERROR;
12839
12840 /*
12841 * Loop through all streams requested in configuration
12842 * Check if unsupported rotations have been requested on any of them
12843 */
12844 for (size_t j = 0; j < streamList->num_streams; j++){
12845 camera3_stream_t *newStream = streamList->streams[j];
12846
12847 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12848 bool isImplDef = (newStream->format ==
12849 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12850 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12851 isImplDef);
12852
12853 if (isRotated && (!isImplDef || isZsl)) {
12854 LOGE("Error: Unsupported rotation of %d requested for stream"
12855 "type:%d and stream format:%d",
12856 newStream->rotation, newStream->stream_type,
12857 newStream->format);
12858 rc = -EINVAL;
12859 break;
12860 }
12861 }
12862
12863 return rc;
12864}
12865
12866/*===========================================================================
12867* FUNCTION : getFlashInfo
12868*
12869* DESCRIPTION: Retrieve information about whether the device has a flash.
12870*
12871* PARAMETERS :
12872* @cameraId : Camera id to query
12873* @hasFlash : Boolean indicating whether there is a flash device
12874* associated with given camera
12875* @flashNode : If a flash device exists, this will be its device node.
12876*
12877* RETURN :
12878* None
12879*==========================================================================*/
12880void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12881 bool& hasFlash,
12882 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12883{
12884 cam_capability_t* camCapability = gCamCapability[cameraId];
12885 if (NULL == camCapability) {
12886 hasFlash = false;
12887 flashNode[0] = '\0';
12888 } else {
12889 hasFlash = camCapability->flash_available;
12890 strlcpy(flashNode,
12891 (char*)camCapability->flash_dev_name,
12892 QCAMERA_MAX_FILEPATH_LENGTH);
12893 }
12894}
12895
12896/*===========================================================================
12897* FUNCTION : getEepromVersionInfo
12898*
12899* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12900*
12901* PARAMETERS : None
12902*
12903* RETURN : string describing EEPROM version
12904* "\0" if no such info available
12905*==========================================================================*/
12906const char *QCamera3HardwareInterface::getEepromVersionInfo()
12907{
12908 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12909}
12910
12911/*===========================================================================
12912* FUNCTION : getLdafCalib
12913*
12914* DESCRIPTION: Retrieve Laser AF calibration data
12915*
12916* PARAMETERS : None
12917*
12918* RETURN : Two uint32_t describing laser AF calibration data
12919* NULL if none is available.
12920*==========================================================================*/
12921const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12922{
12923 if (mLdafCalibExist) {
12924 return &mLdafCalib[0];
12925 } else {
12926 return NULL;
12927 }
12928}
12929
12930/*===========================================================================
12931 * FUNCTION : dynamicUpdateMetaStreamInfo
12932 *
12933 * DESCRIPTION: This function:
12934 * (1) stops all the channels
12935 * (2) returns error on pending requests and buffers
12936 * (3) sends metastream_info in setparams
12937 * (4) starts all channels
12938 * This is useful when sensor has to be restarted to apply any
12939 * settings such as frame rate from a different sensor mode
12940 *
12941 * PARAMETERS : None
12942 *
12943 * RETURN : NO_ERROR on success
12944 * Error codes on failure
12945 *
12946 *==========================================================================*/
12947int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12948{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012949 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012950 int rc = NO_ERROR;
12951
12952 LOGD("E");
12953
12954 rc = stopAllChannels();
12955 if (rc < 0) {
12956 LOGE("stopAllChannels failed");
12957 return rc;
12958 }
12959
12960 rc = notifyErrorForPendingRequests();
12961 if (rc < 0) {
12962 LOGE("notifyErrorForPendingRequests failed");
12963 return rc;
12964 }
12965
12966 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12967 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12968 "Format:%d",
12969 mStreamConfigInfo.type[i],
12970 mStreamConfigInfo.stream_sizes[i].width,
12971 mStreamConfigInfo.stream_sizes[i].height,
12972 mStreamConfigInfo.postprocess_mask[i],
12973 mStreamConfigInfo.format[i]);
12974 }
12975
12976 /* Send meta stream info once again so that ISP can start */
12977 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12978 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12979 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12980 mParameters);
12981 if (rc < 0) {
12982 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12983 }
12984
12985 rc = startAllChannels();
12986 if (rc < 0) {
12987 LOGE("startAllChannels failed");
12988 return rc;
12989 }
12990
12991 LOGD("X");
12992 return rc;
12993}
12994
12995/*===========================================================================
12996 * FUNCTION : stopAllChannels
12997 *
12998 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12999 *
13000 * PARAMETERS : None
13001 *
13002 * RETURN : NO_ERROR on success
13003 * Error codes on failure
13004 *
13005 *==========================================================================*/
13006int32_t QCamera3HardwareInterface::stopAllChannels()
13007{
13008 int32_t rc = NO_ERROR;
13009
13010 LOGD("Stopping all channels");
13011 // Stop the Streams/Channels
13012 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13013 it != mStreamInfo.end(); it++) {
13014 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13015 if (channel) {
13016 channel->stop();
13017 }
13018 (*it)->status = INVALID;
13019 }
13020
13021 if (mSupportChannel) {
13022 mSupportChannel->stop();
13023 }
13024 if (mAnalysisChannel) {
13025 mAnalysisChannel->stop();
13026 }
13027 if (mRawDumpChannel) {
13028 mRawDumpChannel->stop();
13029 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013030 if (mHdrPlusRawSrcChannel) {
13031 mHdrPlusRawSrcChannel->stop();
13032 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013033 if (mMetadataChannel) {
13034 /* If content of mStreamInfo is not 0, there is metadata stream */
13035 mMetadataChannel->stop();
13036 }
13037
13038 LOGD("All channels stopped");
13039 return rc;
13040}
13041
13042/*===========================================================================
13043 * FUNCTION : startAllChannels
13044 *
13045 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13046 *
13047 * PARAMETERS : None
13048 *
13049 * RETURN : NO_ERROR on success
13050 * Error codes on failure
13051 *
13052 *==========================================================================*/
13053int32_t QCamera3HardwareInterface::startAllChannels()
13054{
13055 int32_t rc = NO_ERROR;
13056
13057 LOGD("Start all channels ");
13058 // Start the Streams/Channels
13059 if (mMetadataChannel) {
13060 /* If content of mStreamInfo is not 0, there is metadata stream */
13061 rc = mMetadataChannel->start();
13062 if (rc < 0) {
13063 LOGE("META channel start failed");
13064 return rc;
13065 }
13066 }
13067 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13068 it != mStreamInfo.end(); it++) {
13069 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13070 if (channel) {
13071 rc = channel->start();
13072 if (rc < 0) {
13073 LOGE("channel start failed");
13074 return rc;
13075 }
13076 }
13077 }
13078 if (mAnalysisChannel) {
13079 mAnalysisChannel->start();
13080 }
13081 if (mSupportChannel) {
13082 rc = mSupportChannel->start();
13083 if (rc < 0) {
13084 LOGE("Support channel start failed");
13085 return rc;
13086 }
13087 }
13088 if (mRawDumpChannel) {
13089 rc = mRawDumpChannel->start();
13090 if (rc < 0) {
13091 LOGE("RAW dump channel start failed");
13092 return rc;
13093 }
13094 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013095 if (mHdrPlusRawSrcChannel) {
13096 rc = mHdrPlusRawSrcChannel->start();
13097 if (rc < 0) {
13098 LOGE("HDR+ RAW channel start failed");
13099 return rc;
13100 }
13101 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013102
13103 LOGD("All channels started");
13104 return rc;
13105}
13106
13107/*===========================================================================
13108 * FUNCTION : notifyErrorForPendingRequests
13109 *
13110 * DESCRIPTION: This function sends error for all the pending requests/buffers
13111 *
13112 * PARAMETERS : None
13113 *
13114 * RETURN : Error codes
13115 * NO_ERROR on success
13116 *
13117 *==========================================================================*/
13118int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13119{
13120 int32_t rc = NO_ERROR;
13121 unsigned int frameNum = 0;
13122 camera3_capture_result_t result;
13123 camera3_stream_buffer_t *pStream_Buf = NULL;
13124
13125 memset(&result, 0, sizeof(camera3_capture_result_t));
13126
13127 if (mPendingRequestsList.size() > 0) {
13128 pendingRequestIterator i = mPendingRequestsList.begin();
13129 frameNum = i->frame_number;
13130 } else {
13131 /* There might still be pending buffers even though there are
13132 no pending requests. Setting the frameNum to MAX so that
13133 all the buffers with smaller frame numbers are returned */
13134 frameNum = UINT_MAX;
13135 }
13136
13137 LOGH("Oldest frame num on mPendingRequestsList = %u",
13138 frameNum);
13139
Emilian Peev7650c122017-01-19 08:24:33 -080013140 notifyErrorFoPendingDepthData(mDepthChannel);
13141
Thierry Strudel3d639192016-09-09 11:52:26 -070013142 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13143 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13144
13145 if (req->frame_number < frameNum) {
13146 // Send Error notify to frameworks for each buffer for which
13147 // metadata buffer is already sent
13148 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13149 req->frame_number, req->mPendingBufferList.size());
13150
13151 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13152 if (NULL == pStream_Buf) {
13153 LOGE("No memory for pending buffers array");
13154 return NO_MEMORY;
13155 }
13156 memset(pStream_Buf, 0,
13157 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13158 result.result = NULL;
13159 result.frame_number = req->frame_number;
13160 result.num_output_buffers = req->mPendingBufferList.size();
13161 result.output_buffers = pStream_Buf;
13162
13163 size_t index = 0;
13164 for (auto info = req->mPendingBufferList.begin();
13165 info != req->mPendingBufferList.end(); ) {
13166
13167 camera3_notify_msg_t notify_msg;
13168 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13169 notify_msg.type = CAMERA3_MSG_ERROR;
13170 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13171 notify_msg.message.error.error_stream = info->stream;
13172 notify_msg.message.error.frame_number = req->frame_number;
13173 pStream_Buf[index].acquire_fence = -1;
13174 pStream_Buf[index].release_fence = -1;
13175 pStream_Buf[index].buffer = info->buffer;
13176 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13177 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013178 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013179 index++;
13180 // Remove buffer from list
13181 info = req->mPendingBufferList.erase(info);
13182 }
13183
13184 // Remove this request from Map
13185 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13186 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13187 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13188
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013189 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013190
13191 delete [] pStream_Buf;
13192 } else {
13193
13194 // Go through the pending requests info and send error request to framework
13195 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13196
13197 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13198
13199 // Send error notify to frameworks
13200 camera3_notify_msg_t notify_msg;
13201 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13202 notify_msg.type = CAMERA3_MSG_ERROR;
13203 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13204 notify_msg.message.error.error_stream = NULL;
13205 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013206 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013207
13208 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13209 if (NULL == pStream_Buf) {
13210 LOGE("No memory for pending buffers array");
13211 return NO_MEMORY;
13212 }
13213 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13214
13215 result.result = NULL;
13216 result.frame_number = req->frame_number;
13217 result.input_buffer = i->input_buffer;
13218 result.num_output_buffers = req->mPendingBufferList.size();
13219 result.output_buffers = pStream_Buf;
13220
13221 size_t index = 0;
13222 for (auto info = req->mPendingBufferList.begin();
13223 info != req->mPendingBufferList.end(); ) {
13224 pStream_Buf[index].acquire_fence = -1;
13225 pStream_Buf[index].release_fence = -1;
13226 pStream_Buf[index].buffer = info->buffer;
13227 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13228 pStream_Buf[index].stream = info->stream;
13229 index++;
13230 // Remove buffer from list
13231 info = req->mPendingBufferList.erase(info);
13232 }
13233
13234 // Remove this request from Map
13235 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13236 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13237 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13238
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013239 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013240 delete [] pStream_Buf;
13241 i = erasePendingRequest(i);
13242 }
13243 }
13244
13245 /* Reset pending frame Drop list and requests list */
13246 mPendingFrameDropList.clear();
13247
13248 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13249 req.mPendingBufferList.clear();
13250 }
13251 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013252 LOGH("Cleared all the pending buffers ");
13253
13254 return rc;
13255}
13256
13257bool QCamera3HardwareInterface::isOnEncoder(
13258 const cam_dimension_t max_viewfinder_size,
13259 uint32_t width, uint32_t height)
13260{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013261 return ((width > (uint32_t)max_viewfinder_size.width) ||
13262 (height > (uint32_t)max_viewfinder_size.height) ||
13263 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13264 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013265}
13266
13267/*===========================================================================
13268 * FUNCTION : setBundleInfo
13269 *
13270 * DESCRIPTION: Set bundle info for all streams that are bundle.
13271 *
13272 * PARAMETERS : None
13273 *
13274 * RETURN : NO_ERROR on success
13275 * Error codes on failure
13276 *==========================================================================*/
13277int32_t QCamera3HardwareInterface::setBundleInfo()
13278{
13279 int32_t rc = NO_ERROR;
13280
13281 if (mChannelHandle) {
13282 cam_bundle_config_t bundleInfo;
13283 memset(&bundleInfo, 0, sizeof(bundleInfo));
13284 rc = mCameraHandle->ops->get_bundle_info(
13285 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13286 if (rc != NO_ERROR) {
13287 LOGE("get_bundle_info failed");
13288 return rc;
13289 }
13290 if (mAnalysisChannel) {
13291 mAnalysisChannel->setBundleInfo(bundleInfo);
13292 }
13293 if (mSupportChannel) {
13294 mSupportChannel->setBundleInfo(bundleInfo);
13295 }
13296 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13297 it != mStreamInfo.end(); it++) {
13298 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13299 channel->setBundleInfo(bundleInfo);
13300 }
13301 if (mRawDumpChannel) {
13302 mRawDumpChannel->setBundleInfo(bundleInfo);
13303 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013304 if (mHdrPlusRawSrcChannel) {
13305 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13306 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013307 }
13308
13309 return rc;
13310}
13311
13312/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013313 * FUNCTION : setInstantAEC
13314 *
13315 * DESCRIPTION: Set Instant AEC related params.
13316 *
13317 * PARAMETERS :
13318 * @meta: CameraMetadata reference
13319 *
13320 * RETURN : NO_ERROR on success
13321 * Error codes on failure
13322 *==========================================================================*/
13323int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13324{
13325 int32_t rc = NO_ERROR;
13326 uint8_t val = 0;
13327 char prop[PROPERTY_VALUE_MAX];
13328
13329 // First try to configure instant AEC from framework metadata
13330 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13331 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13332 }
13333
13334 // If framework did not set this value, try to read from set prop.
13335 if (val == 0) {
13336 memset(prop, 0, sizeof(prop));
13337 property_get("persist.camera.instant.aec", prop, "0");
13338 val = (uint8_t)atoi(prop);
13339 }
13340
13341 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13342 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13343 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13344 mInstantAEC = val;
13345 mInstantAECSettledFrameNumber = 0;
13346 mInstantAecFrameIdxCount = 0;
13347 LOGH("instantAEC value set %d",val);
13348 if (mInstantAEC) {
13349 memset(prop, 0, sizeof(prop));
13350 property_get("persist.camera.ae.instant.bound", prop, "10");
13351 int32_t aec_frame_skip_cnt = atoi(prop);
13352 if (aec_frame_skip_cnt >= 0) {
13353 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13354 } else {
13355 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13356 rc = BAD_VALUE;
13357 }
13358 }
13359 } else {
13360 LOGE("Bad instant aec value set %d", val);
13361 rc = BAD_VALUE;
13362 }
13363 return rc;
13364}
13365
13366/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013367 * FUNCTION : get_num_overall_buffers
13368 *
13369 * DESCRIPTION: Estimate number of pending buffers across all requests.
13370 *
13371 * PARAMETERS : None
13372 *
13373 * RETURN : Number of overall pending buffers
13374 *
13375 *==========================================================================*/
13376uint32_t PendingBuffersMap::get_num_overall_buffers()
13377{
13378 uint32_t sum_buffers = 0;
13379 for (auto &req : mPendingBuffersInRequest) {
13380 sum_buffers += req.mPendingBufferList.size();
13381 }
13382 return sum_buffers;
13383}
13384
13385/*===========================================================================
13386 * FUNCTION : removeBuf
13387 *
13388 * DESCRIPTION: Remove a matching buffer from tracker.
13389 *
13390 * PARAMETERS : @buffer: image buffer for the callback
13391 *
13392 * RETURN : None
13393 *
13394 *==========================================================================*/
13395void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13396{
13397 bool buffer_found = false;
13398 for (auto req = mPendingBuffersInRequest.begin();
13399 req != mPendingBuffersInRequest.end(); req++) {
13400 for (auto k = req->mPendingBufferList.begin();
13401 k != req->mPendingBufferList.end(); k++ ) {
13402 if (k->buffer == buffer) {
13403 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13404 req->frame_number, buffer);
13405 k = req->mPendingBufferList.erase(k);
13406 if (req->mPendingBufferList.empty()) {
13407 // Remove this request from Map
13408 req = mPendingBuffersInRequest.erase(req);
13409 }
13410 buffer_found = true;
13411 break;
13412 }
13413 }
13414 if (buffer_found) {
13415 break;
13416 }
13417 }
13418 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13419 get_num_overall_buffers());
13420}
13421
13422/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013423 * FUNCTION : getBufErrStatus
13424 *
13425 * DESCRIPTION: get buffer error status
13426 *
13427 * PARAMETERS : @buffer: buffer handle
13428 *
13429 * RETURN : Error status
13430 *
13431 *==========================================================================*/
13432int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13433{
13434 for (auto& req : mPendingBuffersInRequest) {
13435 for (auto& k : req.mPendingBufferList) {
13436 if (k.buffer == buffer)
13437 return k.bufStatus;
13438 }
13439 }
13440 return CAMERA3_BUFFER_STATUS_OK;
13441}
13442
13443/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013444 * FUNCTION : setPAAFSupport
13445 *
13446 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13447 * feature mask according to stream type and filter
13448 * arrangement
13449 *
13450 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13451 * @stream_type: stream type
13452 * @filter_arrangement: filter arrangement
13453 *
13454 * RETURN : None
13455 *==========================================================================*/
13456void QCamera3HardwareInterface::setPAAFSupport(
13457 cam_feature_mask_t& feature_mask,
13458 cam_stream_type_t stream_type,
13459 cam_color_filter_arrangement_t filter_arrangement)
13460{
13461 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13462 feature_mask, stream_type, filter_arrangement);
13463
13464 switch (filter_arrangement) {
13465 case CAM_FILTER_ARRANGEMENT_RGGB:
13466 case CAM_FILTER_ARRANGEMENT_GRBG:
13467 case CAM_FILTER_ARRANGEMENT_GBRG:
13468 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013469 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13470 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013471 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
13472 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13473 }
13474 break;
13475 case CAM_FILTER_ARRANGEMENT_Y:
13476 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13477 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13478 }
13479 break;
13480 default:
13481 break;
13482 }
13483}
13484
13485/*===========================================================================
13486* FUNCTION : getSensorMountAngle
13487*
13488* DESCRIPTION: Retrieve sensor mount angle
13489*
13490* PARAMETERS : None
13491*
13492* RETURN : sensor mount angle in uint32_t
13493*==========================================================================*/
13494uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13495{
13496 return gCamCapability[mCameraId]->sensor_mount_angle;
13497}
13498
13499/*===========================================================================
13500* FUNCTION : getRelatedCalibrationData
13501*
13502* DESCRIPTION: Retrieve related system calibration data
13503*
13504* PARAMETERS : None
13505*
13506* RETURN : Pointer of related system calibration data
13507*==========================================================================*/
13508const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13509{
13510 return (const cam_related_system_calibration_data_t *)
13511 &(gCamCapability[mCameraId]->related_cam_calibration);
13512}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013513
13514/*===========================================================================
13515 * FUNCTION : is60HzZone
13516 *
13517 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13518 *
13519 * PARAMETERS : None
13520 *
13521 * RETURN : True if in 60Hz zone, False otherwise
13522 *==========================================================================*/
13523bool QCamera3HardwareInterface::is60HzZone()
13524{
13525 time_t t = time(NULL);
13526 struct tm lt;
13527
13528 struct tm* r = localtime_r(&t, &lt);
13529
13530 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13531 return true;
13532 else
13533 return false;
13534}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013535
13536/*===========================================================================
13537 * FUNCTION : adjustBlackLevelForCFA
13538 *
13539 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13540 * of bayer CFA (Color Filter Array).
13541 *
13542 * PARAMETERS : @input: black level pattern in the order of RGGB
13543 * @output: black level pattern in the order of CFA
13544 * @color_arrangement: CFA color arrangement
13545 *
13546 * RETURN : None
13547 *==========================================================================*/
13548template<typename T>
13549void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13550 T input[BLACK_LEVEL_PATTERN_CNT],
13551 T output[BLACK_LEVEL_PATTERN_CNT],
13552 cam_color_filter_arrangement_t color_arrangement)
13553{
13554 switch (color_arrangement) {
13555 case CAM_FILTER_ARRANGEMENT_GRBG:
13556 output[0] = input[1];
13557 output[1] = input[0];
13558 output[2] = input[3];
13559 output[3] = input[2];
13560 break;
13561 case CAM_FILTER_ARRANGEMENT_GBRG:
13562 output[0] = input[2];
13563 output[1] = input[3];
13564 output[2] = input[0];
13565 output[3] = input[1];
13566 break;
13567 case CAM_FILTER_ARRANGEMENT_BGGR:
13568 output[0] = input[3];
13569 output[1] = input[2];
13570 output[2] = input[1];
13571 output[3] = input[0];
13572 break;
13573 case CAM_FILTER_ARRANGEMENT_RGGB:
13574 output[0] = input[0];
13575 output[1] = input[1];
13576 output[2] = input[2];
13577 output[3] = input[3];
13578 break;
13579 default:
13580 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13581 break;
13582 }
13583}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013584
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013585void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13586 CameraMetadata &resultMetadata,
13587 std::shared_ptr<metadata_buffer_t> settings)
13588{
13589 if (settings == nullptr) {
13590 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13591 return;
13592 }
13593
13594 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13595 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13596 }
13597
13598 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13599 String8 str((const char *)gps_methods);
13600 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13601 }
13602
13603 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13604 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13605 }
13606
13607 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13608 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13609 }
13610
13611 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13612 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13613 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13614 }
13615
13616 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13617 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13618 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13619 }
13620
13621 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13622 int32_t fwk_thumb_size[2];
13623 fwk_thumb_size[0] = thumb_size->width;
13624 fwk_thumb_size[1] = thumb_size->height;
13625 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13626 }
13627
13628 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13629 uint8_t fwk_intent = intent[0];
13630 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13631 }
13632}
13633
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013634bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13635 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13636{
13637 if (hdrPlusRequest == nullptr) return false;
13638
13639 // Check noise reduction mode is high quality.
13640 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13641 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13642 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013643 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13644 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013645 return false;
13646 }
13647
13648 // Check edge mode is high quality.
13649 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13650 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13651 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13652 return false;
13653 }
13654
13655 if (request.num_output_buffers != 1 ||
13656 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13657 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013658 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13659 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13660 request.output_buffers[0].stream->width,
13661 request.output_buffers[0].stream->height,
13662 request.output_buffers[0].stream->format);
13663 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013664 return false;
13665 }
13666
13667 // Get a YUV buffer from pic channel.
13668 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13669 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13670 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13671 if (res != OK) {
13672 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13673 __FUNCTION__, strerror(-res), res);
13674 return false;
13675 }
13676
13677 pbcamera::StreamBuffer buffer;
13678 buffer.streamId = kPbYuvOutputStreamId;
13679 buffer.data = yuvBuffer->buffer;
13680 buffer.dataSize = yuvBuffer->frame_len;
13681
13682 pbcamera::CaptureRequest pbRequest;
13683 pbRequest.id = request.frame_number;
13684 pbRequest.outputBuffers.push_back(buffer);
13685
13686 // Submit an HDR+ capture request to HDR+ service.
13687 res = mHdrPlusClient->submitCaptureRequest(&pbRequest);
13688 if (res != OK) {
13689 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13690 strerror(-res), res);
13691 return false;
13692 }
13693
13694 hdrPlusRequest->yuvBuffer = yuvBuffer;
13695 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13696
13697 return true;
13698}
13699
Chien-Yu Chenee335912017-02-09 17:53:20 -080013700status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13701{
13702 if (mHdrPlusClient == nullptr) {
13703 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13704 return -ENODEV;
13705 }
13706
13707 // Connect to HDR+ service
13708 status_t res = mHdrPlusClient->connect(this);
13709 if (res != OK) {
13710 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
13711 strerror(-res), res);
13712 return res;
13713 }
13714
13715 // Set static metadata.
13716 res = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
13717 if (res != OK) {
13718 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
13719 strerror(-res), res);
13720 mHdrPlusClient->disconnect();
13721 return res;
13722 }
13723
13724 // Configure stream for HDR+.
13725 res = configureHdrPlusStreamsLocked();
13726 if (res != OK) {
13727 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
13728 mHdrPlusClient->disconnect();
13729 return res;
13730 }
13731
13732 mHdrPlusModeEnabled = true;
13733 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
13734
13735 return OK;
13736}
13737
13738void QCamera3HardwareInterface::disableHdrPlusModeLocked()
13739{
13740 // Disconnect from HDR+ service.
13741 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
13742 mHdrPlusClient->disconnect();
13743 }
13744
13745 mHdrPlusModeEnabled = false;
13746 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
13747}
13748
13749status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013750{
13751 pbcamera::InputConfiguration inputConfig;
13752 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
13753 status_t res = OK;
13754
13755 // Configure HDR+ client streams.
13756 // Get input config.
13757 if (mHdrPlusRawSrcChannel) {
13758 // HDR+ input buffers will be provided by HAL.
13759 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
13760 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
13761 if (res != OK) {
13762 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
13763 __FUNCTION__, strerror(-res), res);
13764 return res;
13765 }
13766
13767 inputConfig.isSensorInput = false;
13768 } else {
13769 // Sensor MIPI will send data to Easel.
13770 inputConfig.isSensorInput = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080013771 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
13772 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
13773 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
13774 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
13775 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
13776 if (mSensorModeInfo.num_raw_bits != 10) {
13777 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
13778 mSensorModeInfo.num_raw_bits);
13779 return BAD_VALUE;
13780 }
13781
13782 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013783 }
13784
13785 // Get output configurations.
13786 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080013787 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013788
13789 // Easel may need to output YUV output buffers if mPictureChannel was created.
13790 pbcamera::StreamConfiguration yuvOutputConfig;
13791 if (mPictureChannel != nullptr) {
13792 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
13793 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
13794 if (res != OK) {
13795 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
13796 __FUNCTION__, strerror(-res), res);
13797
13798 return res;
13799 }
13800
13801 outputStreamConfigs.push_back(yuvOutputConfig);
13802 }
13803
13804 // TODO: consider other channels for YUV output buffers.
13805
13806 res = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
13807 if (res != OK) {
13808 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
13809 strerror(-res), res);
13810 return res;
13811 }
13812
13813 return OK;
13814}
13815
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013816void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13817 const camera_metadata_t &resultMetadata) {
13818 if (result != nullptr) {
13819 if (result->outputBuffers.size() != 1) {
13820 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13821 result->outputBuffers.size());
13822 return;
13823 }
13824
13825 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13826 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13827 result->outputBuffers[0].streamId);
13828 return;
13829 }
13830
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013831 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013832 HdrPlusPendingRequest pendingRequest;
13833 {
13834 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13835 auto req = mHdrPlusPendingRequests.find(result->requestId);
13836 pendingRequest = req->second;
13837 }
13838
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013839 // Update the result metadata with the settings of the HDR+ still capture request because
13840 // the result metadata belongs to a ZSL buffer.
13841 CameraMetadata metadata;
13842 metadata = &resultMetadata;
13843 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
13844 camera_metadata_t* updatedResultMetadata = metadata.release();
13845
13846 QCamera3PicChannel *picChannel =
13847 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13848
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013849 // Check if dumping HDR+ YUV output is enabled.
13850 char prop[PROPERTY_VALUE_MAX];
13851 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13852 bool dumpYuvOutput = atoi(prop);
13853
13854 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013855 // Dump yuv buffer to a ppm file.
13856 pbcamera::StreamConfiguration outputConfig;
13857 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13858 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13859 if (rc == OK) {
13860 char buf[FILENAME_MAX] = {};
13861 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13862 result->requestId, result->outputBuffers[0].streamId,
13863 outputConfig.image.width, outputConfig.image.height);
13864
13865 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13866 } else {
13867 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13868 __FUNCTION__, strerror(-rc), rc);
13869 }
13870 }
13871
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013872 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
13873 auto halMetadata = std::make_shared<metadata_buffer_t>();
13874 clear_metadata_buffer(halMetadata.get());
13875
13876 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
13877 // encoding.
13878 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
13879 halStreamId, /*minFrameDuration*/0);
13880 if (res == OK) {
13881 // Return the buffer to pic channel for encoding.
13882 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13883 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13884 halMetadata);
13885 } else {
13886 // Return the buffer without encoding.
13887 // TODO: This should not happen but we may want to report an error buffer to camera
13888 // service.
13889 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
13890 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
13891 strerror(-res), res);
13892 }
13893
13894 // Send HDR+ metadata to framework.
13895 {
13896 pthread_mutex_lock(&mMutex);
13897
13898 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
13899 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
13900 pthread_mutex_unlock(&mMutex);
13901 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013902
13903 // Remove the HDR+ pending request.
13904 {
13905 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13906 auto req = mHdrPlusPendingRequests.find(result->requestId);
13907 mHdrPlusPendingRequests.erase(req);
13908 }
13909 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013910}
13911
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013912void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13913 // TODO: Handle HDR+ capture failures and send the failure to framework.
13914 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13915 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13916
13917 // Return the buffer to pic channel.
13918 QCamera3PicChannel *picChannel =
13919 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
13920 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
13921
13922 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013923}
13924
Thierry Strudel3d639192016-09-09 11:52:26 -070013925}; //end namespace qcamera