blob: 4d2cf2810a6cc3fb0bec1708da4a66dff9483bd9 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
Thierry Strudel54dc9782017-02-15 12:12:10 -0800143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_binning_correction_mode_t,
145 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
146 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
147 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
148};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700149
150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_ir_mode_t,
152 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
153 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
154 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
155 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
156};
Thierry Strudel3d639192016-09-09 11:52:26 -0700157
158const QCamera3HardwareInterface::QCameraMap<
159 camera_metadata_enum_android_control_effect_mode_t,
160 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
161 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
162 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
163 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
164 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
165 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
166 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
167 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
168 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
169 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
170};
171
172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_control_awb_mode_t,
174 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
175 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
176 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
177 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
178 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
179 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
180 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
181 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
182 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
183 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
184};
185
186const QCamera3HardwareInterface::QCameraMap<
187 camera_metadata_enum_android_control_scene_mode_t,
188 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
189 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
190 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
191 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
192 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
193 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
194 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
195 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
196 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
197 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
198 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
199 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
200 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
201 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
202 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
203 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800204 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
205 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_control_af_mode_t,
210 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
211 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
212 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
213 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
214 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
215 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
216 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
217 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221 camera_metadata_enum_android_color_correction_aberration_mode_t,
222 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
223 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
224 CAM_COLOR_CORRECTION_ABERRATION_OFF },
225 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
226 CAM_COLOR_CORRECTION_ABERRATION_FAST },
227 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
228 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
229};
230
231const QCamera3HardwareInterface::QCameraMap<
232 camera_metadata_enum_android_control_ae_antibanding_mode_t,
233 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
234 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
235 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
236 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
237 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_ae_mode_t,
242 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
243 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
244 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
245 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
246 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
247 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
248};
249
250const QCamera3HardwareInterface::QCameraMap<
251 camera_metadata_enum_android_flash_mode_t,
252 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
253 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
254 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
255 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
256};
257
258const QCamera3HardwareInterface::QCameraMap<
259 camera_metadata_enum_android_statistics_face_detect_mode_t,
260 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
261 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
262 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
263 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
268 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
269 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
270 CAM_FOCUS_UNCALIBRATED },
271 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
272 CAM_FOCUS_APPROXIMATE },
273 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
274 CAM_FOCUS_CALIBRATED }
275};
276
277const QCamera3HardwareInterface::QCameraMap<
278 camera_metadata_enum_android_lens_state_t,
279 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
280 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
281 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
282};
283
284const int32_t available_thumbnail_sizes[] = {0, 0,
285 176, 144,
286 240, 144,
287 256, 144,
288 240, 160,
289 256, 154,
290 240, 240,
291 320, 240};
292
293const QCamera3HardwareInterface::QCameraMap<
294 camera_metadata_enum_android_sensor_test_pattern_mode_t,
295 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
296 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
297 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
298 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
299 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
300 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
302};
303
304/* Since there is no mapping for all the options some Android enum are not listed.
305 * Also, the order in this list is important because while mapping from HAL to Android it will
306 * traverse from lower to higher index which means that for HAL values that are map to different
307 * Android values, the traverse logic will select the first one found.
308 */
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_sensor_reference_illuminant1_t,
311 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
328};
329
330const QCamera3HardwareInterface::QCameraMap<
331 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
332 { 60, CAM_HFR_MODE_60FPS},
333 { 90, CAM_HFR_MODE_90FPS},
334 { 120, CAM_HFR_MODE_120FPS},
335 { 150, CAM_HFR_MODE_150FPS},
336 { 180, CAM_HFR_MODE_180FPS},
337 { 210, CAM_HFR_MODE_210FPS},
338 { 240, CAM_HFR_MODE_240FPS},
339 { 480, CAM_HFR_MODE_480FPS},
340};
341
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700342const QCamera3HardwareInterface::QCameraMap<
343 qcamera3_ext_instant_aec_mode_t,
344 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
345 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
346 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
347 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
348};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800349
350const QCamera3HardwareInterface::QCameraMap<
351 qcamera3_ext_exposure_meter_mode_t,
352 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
353 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
354 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
355 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
356 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
357 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
358 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 qcamera3_ext_iso_mode_t,
364 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
365 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
366 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
367 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
368 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
369 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
370 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
371 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
372 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
373};
374
Thierry Strudel3d639192016-09-09 11:52:26 -0700375camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
376 .initialize = QCamera3HardwareInterface::initialize,
377 .configure_streams = QCamera3HardwareInterface::configure_streams,
378 .register_stream_buffers = NULL,
379 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
380 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
381 .get_metadata_vendor_tag_ops = NULL,
382 .dump = QCamera3HardwareInterface::dump,
383 .flush = QCamera3HardwareInterface::flush,
384 .reserved = {0},
385};
386
387// initialise to some default value
388uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
389
390/*===========================================================================
391 * FUNCTION : QCamera3HardwareInterface
392 *
393 * DESCRIPTION: constructor of QCamera3HardwareInterface
394 *
395 * PARAMETERS :
396 * @cameraId : camera ID
397 *
398 * RETURN : none
399 *==========================================================================*/
400QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
401 const camera_module_callbacks_t *callbacks)
402 : mCameraId(cameraId),
403 mCameraHandle(NULL),
404 mCameraInitialized(false),
405 mCallbackOps(NULL),
406 mMetadataChannel(NULL),
407 mPictureChannel(NULL),
408 mRawChannel(NULL),
409 mSupportChannel(NULL),
410 mAnalysisChannel(NULL),
411 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700412 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700413 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800414 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mCommon(),
416 mChannelHandle(0),
417 mFirstConfiguration(true),
418 mFlush(false),
419 mFlushPerf(false),
420 mParamHeap(NULL),
421 mParameters(NULL),
422 mPrevParameters(NULL),
423 m_bIsVideo(false),
424 m_bIs4KVideo(false),
425 m_bEisSupportedSize(false),
426 m_bEisEnable(false),
427 m_MobicatMask(0),
428 mMinProcessedFrameDuration(0),
429 mMinJpegFrameDuration(0),
430 mMinRawFrameDuration(0),
431 mMetaFrameCount(0U),
432 mUpdateDebugLevel(false),
433 mCallbacks(callbacks),
434 mCaptureIntent(0),
435 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700436 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800437 /* DevCamDebug metadata internal m control*/
438 mDevCamDebugMetaEnable(0),
439 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700440 mBatchSize(0),
441 mToBeQueuedVidBufs(0),
442 mHFRVideoFps(DEFAULT_VIDEO_FPS),
443 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800444 mStreamConfig(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700445 mFirstFrameNumberInBatch(0),
446 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800447 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700448 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
449 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700450 mInstantAEC(false),
451 mResetInstantAEC(false),
452 mInstantAECSettledFrameNumber(0),
453 mAecSkipDisplayFrameBound(0),
454 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800455 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700456 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mLastCustIntentFrmNum(-1),
458 mState(CLOSED),
459 mIsDeviceLinked(false),
460 mIsMainCamera(true),
461 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700462 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800463 m_pDualCamCmdPtr(NULL),
464 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700465{
466 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700467 mCommon.init(gCamCapability[cameraId]);
468 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700469#ifndef USE_HAL_3_3
470 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
471#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700473#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700474 mCameraDevice.common.close = close_camera_device;
475 mCameraDevice.ops = &mCameraOps;
476 mCameraDevice.priv = this;
477 gCamCapability[cameraId]->version = CAM_HAL_V3;
478 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
479 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
480 gCamCapability[cameraId]->min_num_pp_bufs = 3;
481
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800482 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700483
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800484 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700485 mPendingLiveRequest = 0;
486 mCurrentRequestId = -1;
487 pthread_mutex_init(&mMutex, NULL);
488
489 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
490 mDefaultMetadata[i] = NULL;
491
492 // Getting system props of different kinds
493 char prop[PROPERTY_VALUE_MAX];
494 memset(prop, 0, sizeof(prop));
495 property_get("persist.camera.raw.dump", prop, "0");
496 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800497 property_get("persist.camera.hal3.force.hdr", prop, "0");
498 mForceHdrSnapshot = atoi(prop);
499
Thierry Strudel3d639192016-09-09 11:52:26 -0700500 if (mEnableRawDump)
501 LOGD("Raw dump from Camera HAL enabled");
502
503 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
504 memset(mLdafCalib, 0, sizeof(mLdafCalib));
505
506 memset(prop, 0, sizeof(prop));
507 property_get("persist.camera.tnr.preview", prop, "0");
508 m_bTnrPreview = (uint8_t)atoi(prop);
509
510 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800511 property_get("persist.camera.swtnr.preview", prop, "1");
512 m_bSwTnrPreview = (uint8_t)atoi(prop);
513
514 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 property_get("persist.camera.tnr.video", prop, "0");
516 m_bTnrVideo = (uint8_t)atoi(prop);
517
518 memset(prop, 0, sizeof(prop));
519 property_get("persist.camera.avtimer.debug", prop, "0");
520 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800521 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700522
Thierry Strudel54dc9782017-02-15 12:12:10 -0800523 memset(prop, 0, sizeof(prop));
524 property_get("persist.camera.cacmode.disable", prop, "0");
525 m_cacModeDisabled = (uint8_t)atoi(prop);
526
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 //Load and read GPU library.
528 lib_surface_utils = NULL;
529 LINK_get_surface_pixel_alignment = NULL;
530 mSurfaceStridePadding = CAM_PAD_TO_32;
531 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
532 if (lib_surface_utils) {
533 *(void **)&LINK_get_surface_pixel_alignment =
534 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
535 if (LINK_get_surface_pixel_alignment) {
536 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
537 }
538 dlclose(lib_surface_utils);
539 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700540
541 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700542}
543
544/*===========================================================================
545 * FUNCTION : ~QCamera3HardwareInterface
546 *
547 * DESCRIPTION: destructor of QCamera3HardwareInterface
548 *
549 * PARAMETERS : none
550 *
551 * RETURN : none
552 *==========================================================================*/
553QCamera3HardwareInterface::~QCamera3HardwareInterface()
554{
555 LOGD("E");
556
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800557 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700558
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800559 // Disable power hint and enable the perf lock for close camera
560 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
561 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
562
563 // unlink of dualcam during close camera
564 if (mIsDeviceLinked) {
565 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
566 &m_pDualCamCmdPtr->bundle_info;
567 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
568 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
569 pthread_mutex_lock(&gCamLock);
570
571 if (mIsMainCamera == 1) {
572 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
573 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
574 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
575 // related session id should be session id of linked session
576 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
577 } else {
578 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
579 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
580 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
581 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
582 }
583 pthread_mutex_unlock(&gCamLock);
584
585 rc = mCameraHandle->ops->set_dual_cam_cmd(
586 mCameraHandle->camera_handle);
587 if (rc < 0) {
588 LOGE("Dualcam: Unlink failed, but still proceed to close");
589 }
590 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700591
592 /* We need to stop all streams before deleting any stream */
593 if (mRawDumpChannel) {
594 mRawDumpChannel->stop();
595 }
596
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700597 if (mHdrPlusRawSrcChannel) {
598 mHdrPlusRawSrcChannel->stop();
599 }
600
Thierry Strudel3d639192016-09-09 11:52:26 -0700601 // NOTE: 'camera3_stream_t *' objects are already freed at
602 // this stage by the framework
603 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
604 it != mStreamInfo.end(); it++) {
605 QCamera3ProcessingChannel *channel = (*it)->channel;
606 if (channel) {
607 channel->stop();
608 }
609 }
610 if (mSupportChannel)
611 mSupportChannel->stop();
612
613 if (mAnalysisChannel) {
614 mAnalysisChannel->stop();
615 }
616 if (mMetadataChannel) {
617 mMetadataChannel->stop();
618 }
619 if (mChannelHandle) {
620 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
621 mChannelHandle);
622 LOGD("stopping channel %d", mChannelHandle);
623 }
624
625 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
626 it != mStreamInfo.end(); it++) {
627 QCamera3ProcessingChannel *channel = (*it)->channel;
628 if (channel)
629 delete channel;
630 free (*it);
631 }
632 if (mSupportChannel) {
633 delete mSupportChannel;
634 mSupportChannel = NULL;
635 }
636
637 if (mAnalysisChannel) {
638 delete mAnalysisChannel;
639 mAnalysisChannel = NULL;
640 }
641 if (mRawDumpChannel) {
642 delete mRawDumpChannel;
643 mRawDumpChannel = NULL;
644 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700645 if (mHdrPlusRawSrcChannel) {
646 delete mHdrPlusRawSrcChannel;
647 mHdrPlusRawSrcChannel = NULL;
648 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700649 if (mDummyBatchChannel) {
650 delete mDummyBatchChannel;
651 mDummyBatchChannel = NULL;
652 }
653
654 mPictureChannel = NULL;
655
656 if (mMetadataChannel) {
657 delete mMetadataChannel;
658 mMetadataChannel = NULL;
659 }
660
661 /* Clean up all channels */
662 if (mCameraInitialized) {
663 if(!mFirstConfiguration){
664 //send the last unconfigure
665 cam_stream_size_info_t stream_config_info;
666 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
667 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
668 stream_config_info.buffer_info.max_buffers =
669 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700670 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700671 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
672 stream_config_info);
673 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
674 if (rc < 0) {
675 LOGE("set_parms failed for unconfigure");
676 }
677 }
678 deinitParameters();
679 }
680
681 if (mChannelHandle) {
682 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
683 mChannelHandle);
684 LOGH("deleting channel %d", mChannelHandle);
685 mChannelHandle = 0;
686 }
687
688 if (mState != CLOSED)
689 closeCamera();
690
691 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
692 req.mPendingBufferList.clear();
693 }
694 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700695 for (pendingRequestIterator i = mPendingRequestsList.begin();
696 i != mPendingRequestsList.end();) {
697 i = erasePendingRequest(i);
698 }
699 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
700 if (mDefaultMetadata[i])
701 free_camera_metadata(mDefaultMetadata[i]);
702
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800703 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700704
705 pthread_cond_destroy(&mRequestCond);
706
707 pthread_cond_destroy(&mBuffersCond);
708
709 pthread_mutex_destroy(&mMutex);
710 LOGD("X");
711}
712
713/*===========================================================================
714 * FUNCTION : erasePendingRequest
715 *
716 * DESCRIPTION: function to erase a desired pending request after freeing any
717 * allocated memory
718 *
719 * PARAMETERS :
720 * @i : iterator pointing to pending request to be erased
721 *
722 * RETURN : iterator pointing to the next request
723 *==========================================================================*/
724QCamera3HardwareInterface::pendingRequestIterator
725 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
726{
727 if (i->input_buffer != NULL) {
728 free(i->input_buffer);
729 i->input_buffer = NULL;
730 }
731 if (i->settings != NULL)
732 free_camera_metadata((camera_metadata_t*)i->settings);
733 return mPendingRequestsList.erase(i);
734}
735
736/*===========================================================================
737 * FUNCTION : camEvtHandle
738 *
739 * DESCRIPTION: Function registered to mm-camera-interface to handle events
740 *
741 * PARAMETERS :
742 * @camera_handle : interface layer camera handle
743 * @evt : ptr to event
744 * @user_data : user data ptr
745 *
746 * RETURN : none
747 *==========================================================================*/
748void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
749 mm_camera_event_t *evt,
750 void *user_data)
751{
752 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
753 if (obj && evt) {
754 switch(evt->server_event_type) {
755 case CAM_EVENT_TYPE_DAEMON_DIED:
756 pthread_mutex_lock(&obj->mMutex);
757 obj->mState = ERROR;
758 pthread_mutex_unlock(&obj->mMutex);
759 LOGE("Fatal, camera daemon died");
760 break;
761
762 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
763 LOGD("HAL got request pull from Daemon");
764 pthread_mutex_lock(&obj->mMutex);
765 obj->mWokenUpByDaemon = true;
766 obj->unblockRequestIfNecessary();
767 pthread_mutex_unlock(&obj->mMutex);
768 break;
769
770 default:
771 LOGW("Warning: Unhandled event %d",
772 evt->server_event_type);
773 break;
774 }
775 } else {
776 LOGE("NULL user_data/evt");
777 }
778}
779
780/*===========================================================================
781 * FUNCTION : openCamera
782 *
783 * DESCRIPTION: open camera
784 *
785 * PARAMETERS :
786 * @hw_device : double ptr for camera device struct
787 *
788 * RETURN : int32_t type of status
789 * NO_ERROR -- success
790 * none-zero failure code
791 *==========================================================================*/
792int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
793{
794 int rc = 0;
795 if (mState != CLOSED) {
796 *hw_device = NULL;
797 return PERMISSION_DENIED;
798 }
799
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800800 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700801 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
802 mCameraId);
803
804 rc = openCamera();
805 if (rc == 0) {
806 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800807 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700808 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800809 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700810
Thierry Strudel3d639192016-09-09 11:52:26 -0700811 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
812 mCameraId, rc);
813
814 if (rc == NO_ERROR) {
815 mState = OPENED;
816 }
817 return rc;
818}
819
820/*===========================================================================
821 * FUNCTION : openCamera
822 *
823 * DESCRIPTION: open camera
824 *
825 * PARAMETERS : none
826 *
827 * RETURN : int32_t type of status
828 * NO_ERROR -- success
829 * none-zero failure code
830 *==========================================================================*/
831int QCamera3HardwareInterface::openCamera()
832{
833 int rc = 0;
834 char value[PROPERTY_VALUE_MAX];
835
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800836 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700837 if (mCameraHandle) {
838 LOGE("Failure: Camera already opened");
839 return ALREADY_EXISTS;
840 }
841
842 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
843 if (rc < 0) {
844 LOGE("Failed to reserve flash for camera id: %d",
845 mCameraId);
846 return UNKNOWN_ERROR;
847 }
848
849 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
850 if (rc) {
851 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
852 return rc;
853 }
854
855 if (!mCameraHandle) {
856 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
857 return -ENODEV;
858 }
859
860 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
861 camEvtHandle, (void *)this);
862
863 if (rc < 0) {
864 LOGE("Error, failed to register event callback");
865 /* Not closing camera here since it is already handled in destructor */
866 return FAILED_TRANSACTION;
867 }
868
869 mExifParams.debug_params =
870 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
871 if (mExifParams.debug_params) {
872 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
873 } else {
874 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
875 return NO_MEMORY;
876 }
877 mFirstConfiguration = true;
878
879 //Notify display HAL that a camera session is active.
880 //But avoid calling the same during bootup because camera service might open/close
881 //cameras at boot time during its initialization and display service will also internally
882 //wait for camera service to initialize first while calling this display API, resulting in a
883 //deadlock situation. Since boot time camera open/close calls are made only to fetch
884 //capabilities, no need of this display bw optimization.
885 //Use "service.bootanim.exit" property to know boot status.
886 property_get("service.bootanim.exit", value, "0");
887 if (atoi(value) == 1) {
888 pthread_mutex_lock(&gCamLock);
889 if (gNumCameraSessions++ == 0) {
890 setCameraLaunchStatus(true);
891 }
892 pthread_mutex_unlock(&gCamLock);
893 }
894
895 //fill the session id needed while linking dual cam
896 pthread_mutex_lock(&gCamLock);
897 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
898 &sessionId[mCameraId]);
899 pthread_mutex_unlock(&gCamLock);
900
901 if (rc < 0) {
902 LOGE("Error, failed to get sessiion id");
903 return UNKNOWN_ERROR;
904 } else {
905 //Allocate related cam sync buffer
906 //this is needed for the payload that goes along with bundling cmd for related
907 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700908 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
909 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 if(rc != OK) {
911 rc = NO_MEMORY;
912 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
913 return NO_MEMORY;
914 }
915
916 //Map memory for related cam sync buffer
917 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700918 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
919 m_pDualCamCmdHeap->getFd(0),
920 sizeof(cam_dual_camera_cmd_info_t),
921 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700922 if(rc < 0) {
923 LOGE("Dualcam: failed to map Related cam sync buffer");
924 rc = FAILED_TRANSACTION;
925 return NO_MEMORY;
926 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700927 m_pDualCamCmdPtr =
928 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700929 }
930
931 LOGH("mCameraId=%d",mCameraId);
932
933 return NO_ERROR;
934}
935
936/*===========================================================================
937 * FUNCTION : closeCamera
938 *
939 * DESCRIPTION: close camera
940 *
941 * PARAMETERS : none
942 *
943 * RETURN : int32_t type of status
944 * NO_ERROR -- success
945 * none-zero failure code
946 *==========================================================================*/
947int QCamera3HardwareInterface::closeCamera()
948{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800949 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700950 int rc = NO_ERROR;
951 char value[PROPERTY_VALUE_MAX];
952
953 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
954 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700955
956 // unmap memory for related cam sync buffer
957 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800958 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700959 if (NULL != m_pDualCamCmdHeap) {
960 m_pDualCamCmdHeap->deallocate();
961 delete m_pDualCamCmdHeap;
962 m_pDualCamCmdHeap = NULL;
963 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700964 }
965
Thierry Strudel3d639192016-09-09 11:52:26 -0700966 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
967 mCameraHandle = NULL;
968
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700969 // Disconnect from HDR+ client.
970 if (mHdrPlusClient != nullptr) {
971 mHdrPlusClient->disconnect();
972 mHdrPlusClient = nullptr;
973 }
974
Thierry Strudel3d639192016-09-09 11:52:26 -0700975 //reset session id to some invalid id
976 pthread_mutex_lock(&gCamLock);
977 sessionId[mCameraId] = 0xDEADBEEF;
978 pthread_mutex_unlock(&gCamLock);
979
980 //Notify display HAL that there is no active camera session
981 //but avoid calling the same during bootup. Refer to openCamera
982 //for more details.
983 property_get("service.bootanim.exit", value, "0");
984 if (atoi(value) == 1) {
985 pthread_mutex_lock(&gCamLock);
986 if (--gNumCameraSessions == 0) {
987 setCameraLaunchStatus(false);
988 }
989 pthread_mutex_unlock(&gCamLock);
990 }
991
Thierry Strudel3d639192016-09-09 11:52:26 -0700992 if (mExifParams.debug_params) {
993 free(mExifParams.debug_params);
994 mExifParams.debug_params = NULL;
995 }
996 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
997 LOGW("Failed to release flash for camera id: %d",
998 mCameraId);
999 }
1000 mState = CLOSED;
1001 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1002 mCameraId, rc);
1003 return rc;
1004}
1005
1006/*===========================================================================
1007 * FUNCTION : initialize
1008 *
1009 * DESCRIPTION: Initialize frameworks callback functions
1010 *
1011 * PARAMETERS :
1012 * @callback_ops : callback function to frameworks
1013 *
1014 * RETURN :
1015 *
1016 *==========================================================================*/
1017int QCamera3HardwareInterface::initialize(
1018 const struct camera3_callback_ops *callback_ops)
1019{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001020 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 int rc;
1022
1023 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1024 pthread_mutex_lock(&mMutex);
1025
1026 // Validate current state
1027 switch (mState) {
1028 case OPENED:
1029 /* valid state */
1030 break;
1031 default:
1032 LOGE("Invalid state %d", mState);
1033 rc = -ENODEV;
1034 goto err1;
1035 }
1036
1037 rc = initParameters();
1038 if (rc < 0) {
1039 LOGE("initParamters failed %d", rc);
1040 goto err1;
1041 }
1042 mCallbackOps = callback_ops;
1043
1044 mChannelHandle = mCameraHandle->ops->add_channel(
1045 mCameraHandle->camera_handle, NULL, NULL, this);
1046 if (mChannelHandle == 0) {
1047 LOGE("add_channel failed");
1048 rc = -ENOMEM;
1049 pthread_mutex_unlock(&mMutex);
1050 return rc;
1051 }
1052
1053 pthread_mutex_unlock(&mMutex);
1054 mCameraInitialized = true;
1055 mState = INITIALIZED;
1056 LOGI("X");
1057 return 0;
1058
1059err1:
1060 pthread_mutex_unlock(&mMutex);
1061 return rc;
1062}
1063
1064/*===========================================================================
1065 * FUNCTION : validateStreamDimensions
1066 *
1067 * DESCRIPTION: Check if the configuration requested are those advertised
1068 *
1069 * PARAMETERS :
1070 * @stream_list : streams to be configured
1071 *
1072 * RETURN :
1073 *
1074 *==========================================================================*/
1075int QCamera3HardwareInterface::validateStreamDimensions(
1076 camera3_stream_configuration_t *streamList)
1077{
1078 int rc = NO_ERROR;
1079 size_t count = 0;
1080
1081 camera3_stream_t *inputStream = NULL;
1082 /*
1083 * Loop through all streams to find input stream if it exists*
1084 */
1085 for (size_t i = 0; i< streamList->num_streams; i++) {
1086 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1087 if (inputStream != NULL) {
1088 LOGE("Error, Multiple input streams requested");
1089 return -EINVAL;
1090 }
1091 inputStream = streamList->streams[i];
1092 }
1093 }
1094 /*
1095 * Loop through all streams requested in configuration
1096 * Check if unsupported sizes have been requested on any of them
1097 */
1098 for (size_t j = 0; j < streamList->num_streams; j++) {
1099 bool sizeFound = false;
1100 camera3_stream_t *newStream = streamList->streams[j];
1101
1102 uint32_t rotatedHeight = newStream->height;
1103 uint32_t rotatedWidth = newStream->width;
1104 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1105 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1106 rotatedHeight = newStream->width;
1107 rotatedWidth = newStream->height;
1108 }
1109
1110 /*
1111 * Sizes are different for each type of stream format check against
1112 * appropriate table.
1113 */
1114 switch (newStream->format) {
1115 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1116 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1117 case HAL_PIXEL_FORMAT_RAW10:
1118 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1119 for (size_t i = 0; i < count; i++) {
1120 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1121 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1122 sizeFound = true;
1123 break;
1124 }
1125 }
1126 break;
1127 case HAL_PIXEL_FORMAT_BLOB:
1128 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1129 /* Verify set size against generated sizes table */
1130 for (size_t i = 0; i < count; i++) {
1131 if (((int32_t)rotatedWidth ==
1132 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1133 ((int32_t)rotatedHeight ==
1134 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1135 sizeFound = true;
1136 break;
1137 }
1138 }
1139 break;
1140 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1141 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1142 default:
1143 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1144 || newStream->stream_type == CAMERA3_STREAM_INPUT
1145 || IS_USAGE_ZSL(newStream->usage)) {
1146 if (((int32_t)rotatedWidth ==
1147 gCamCapability[mCameraId]->active_array_size.width) &&
1148 ((int32_t)rotatedHeight ==
1149 gCamCapability[mCameraId]->active_array_size.height)) {
1150 sizeFound = true;
1151 break;
1152 }
1153 /* We could potentially break here to enforce ZSL stream
1154 * set from frameworks always is full active array size
1155 * but it is not clear from the spc if framework will always
1156 * follow that, also we have logic to override to full array
1157 * size, so keeping the logic lenient at the moment
1158 */
1159 }
1160 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1161 MAX_SIZES_CNT);
1162 for (size_t i = 0; i < count; i++) {
1163 if (((int32_t)rotatedWidth ==
1164 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1165 ((int32_t)rotatedHeight ==
1166 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1167 sizeFound = true;
1168 break;
1169 }
1170 }
1171 break;
1172 } /* End of switch(newStream->format) */
1173
1174 /* We error out even if a single stream has unsupported size set */
1175 if (!sizeFound) {
1176 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1177 rotatedWidth, rotatedHeight, newStream->format,
1178 gCamCapability[mCameraId]->active_array_size.width,
1179 gCamCapability[mCameraId]->active_array_size.height);
1180 rc = -EINVAL;
1181 break;
1182 }
1183 } /* End of for each stream */
1184 return rc;
1185}
1186
1187/*==============================================================================
1188 * FUNCTION : isSupportChannelNeeded
1189 *
1190 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1191 *
1192 * PARAMETERS :
1193 * @stream_list : streams to be configured
1194 * @stream_config_info : the config info for streams to be configured
1195 *
1196 * RETURN : Boolen true/false decision
1197 *
1198 *==========================================================================*/
1199bool QCamera3HardwareInterface::isSupportChannelNeeded(
1200 camera3_stream_configuration_t *streamList,
1201 cam_stream_size_info_t stream_config_info)
1202{
1203 uint32_t i;
1204 bool pprocRequested = false;
1205 /* Check for conditions where PProc pipeline does not have any streams*/
1206 for (i = 0; i < stream_config_info.num_streams; i++) {
1207 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1208 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1209 pprocRequested = true;
1210 break;
1211 }
1212 }
1213
1214 if (pprocRequested == false )
1215 return true;
1216
1217 /* Dummy stream needed if only raw or jpeg streams present */
1218 for (i = 0; i < streamList->num_streams; i++) {
1219 switch(streamList->streams[i]->format) {
1220 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1221 case HAL_PIXEL_FORMAT_RAW10:
1222 case HAL_PIXEL_FORMAT_RAW16:
1223 case HAL_PIXEL_FORMAT_BLOB:
1224 break;
1225 default:
1226 return false;
1227 }
1228 }
1229 return true;
1230}
1231
1232/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001233 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001234 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001235 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001236 *
1237 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001238 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001239 *
1240 * RETURN : int32_t type of status
1241 * NO_ERROR -- success
1242 * none-zero failure code
1243 *
1244 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001245int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001246{
1247 int32_t rc = NO_ERROR;
1248
1249 cam_dimension_t max_dim = {0, 0};
1250 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1251 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1252 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1253 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1254 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1255 }
1256
1257 clear_metadata_buffer(mParameters);
1258
1259 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1260 max_dim);
1261 if (rc != NO_ERROR) {
1262 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1263 return rc;
1264 }
1265
1266 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1267 if (rc != NO_ERROR) {
1268 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1269 return rc;
1270 }
1271
1272 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001273 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001274
1275 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1276 mParameters);
1277 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001278 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001279 return rc;
1280 }
1281
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001282 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1283 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u", __FUNCTION__,
1284 sensorModeInfo.active_array_size.width, sensorModeInfo.active_array_size.height,
1285 sensorModeInfo.pixel_array_size.width, sensorModeInfo.pixel_array_size.height,
1286 sensorModeInfo.op_pixel_clk);
Thierry Strudel3d639192016-09-09 11:52:26 -07001287
1288 return rc;
1289}
1290
1291/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001292 * FUNCTION : addToPPFeatureMask
1293 *
1294 * DESCRIPTION: add additional features to pp feature mask based on
1295 * stream type and usecase
1296 *
1297 * PARAMETERS :
1298 * @stream_format : stream type for feature mask
1299 * @stream_idx : stream idx within postprocess_mask list to change
1300 *
1301 * RETURN : NULL
1302 *
1303 *==========================================================================*/
1304void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1305 uint32_t stream_idx)
1306{
1307 char feature_mask_value[PROPERTY_VALUE_MAX];
1308 cam_feature_mask_t feature_mask;
1309 int args_converted;
1310 int property_len;
1311
1312 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001313#ifdef _LE_CAMERA_
1314 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1315 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1316 property_len = property_get("persist.camera.hal3.feature",
1317 feature_mask_value, swtnr_feature_mask_value);
1318#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001319 property_len = property_get("persist.camera.hal3.feature",
1320 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001321#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001322 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1323 (feature_mask_value[1] == 'x')) {
1324 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1325 } else {
1326 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1327 }
1328 if (1 != args_converted) {
1329 feature_mask = 0;
1330 LOGE("Wrong feature mask %s", feature_mask_value);
1331 return;
1332 }
1333
1334 switch (stream_format) {
1335 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1336 /* Add LLVD to pp feature mask only if video hint is enabled */
1337 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1338 mStreamConfigInfo.postprocess_mask[stream_idx]
1339 |= CAM_QTI_FEATURE_SW_TNR;
1340 LOGH("Added SW TNR to pp feature mask");
1341 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1342 mStreamConfigInfo.postprocess_mask[stream_idx]
1343 |= CAM_QCOM_FEATURE_LLVD;
1344 LOGH("Added LLVD SeeMore to pp feature mask");
1345 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001346 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1347 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1348 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1349 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001350 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1351 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1352 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1353 CAM_QTI_FEATURE_BINNING_CORRECTION;
1354 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001355 break;
1356 }
1357 default:
1358 break;
1359 }
1360 LOGD("PP feature mask %llx",
1361 mStreamConfigInfo.postprocess_mask[stream_idx]);
1362}
1363
1364/*==============================================================================
1365 * FUNCTION : updateFpsInPreviewBuffer
1366 *
1367 * DESCRIPTION: update FPS information in preview buffer.
1368 *
1369 * PARAMETERS :
1370 * @metadata : pointer to metadata buffer
1371 * @frame_number: frame_number to look for in pending buffer list
1372 *
1373 * RETURN : None
1374 *
1375 *==========================================================================*/
1376void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1377 uint32_t frame_number)
1378{
1379 // Mark all pending buffers for this particular request
1380 // with corresponding framerate information
1381 for (List<PendingBuffersInRequest>::iterator req =
1382 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1383 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1384 for(List<PendingBufferInfo>::iterator j =
1385 req->mPendingBufferList.begin();
1386 j != req->mPendingBufferList.end(); j++) {
1387 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1388 if ((req->frame_number == frame_number) &&
1389 (channel->getStreamTypeMask() &
1390 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1391 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1392 CAM_INTF_PARM_FPS_RANGE, metadata) {
1393 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1394 struct private_handle_t *priv_handle =
1395 (struct private_handle_t *)(*(j->buffer));
1396 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1397 }
1398 }
1399 }
1400 }
1401}
1402
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001403/*==============================================================================
1404 * FUNCTION : updateTimeStampInPendingBuffers
1405 *
1406 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1407 * of a frame number
1408 *
1409 * PARAMETERS :
1410 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1411 * @timestamp : timestamp to be set
1412 *
1413 * RETURN : None
1414 *
1415 *==========================================================================*/
1416void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1417 uint32_t frameNumber, nsecs_t timestamp)
1418{
1419 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1420 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1421 if (req->frame_number != frameNumber)
1422 continue;
1423
1424 for (auto k = req->mPendingBufferList.begin();
1425 k != req->mPendingBufferList.end(); k++ ) {
1426 struct private_handle_t *priv_handle =
1427 (struct private_handle_t *) (*(k->buffer));
1428 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1429 }
1430 }
1431 return;
1432}
1433
Thierry Strudel3d639192016-09-09 11:52:26 -07001434/*===========================================================================
1435 * FUNCTION : configureStreams
1436 *
1437 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1438 * and output streams.
1439 *
1440 * PARAMETERS :
1441 * @stream_list : streams to be configured
1442 *
1443 * RETURN :
1444 *
1445 *==========================================================================*/
1446int QCamera3HardwareInterface::configureStreams(
1447 camera3_stream_configuration_t *streamList)
1448{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001449 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001450 int rc = 0;
1451
1452 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001453 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001454 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001455 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001456
1457 return rc;
1458}
1459
1460/*===========================================================================
1461 * FUNCTION : configureStreamsPerfLocked
1462 *
1463 * DESCRIPTION: configureStreams while perfLock is held.
1464 *
1465 * PARAMETERS :
1466 * @stream_list : streams to be configured
1467 *
1468 * RETURN : int32_t type of status
1469 * NO_ERROR -- success
1470 * none-zero failure code
1471 *==========================================================================*/
1472int QCamera3HardwareInterface::configureStreamsPerfLocked(
1473 camera3_stream_configuration_t *streamList)
1474{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001475 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001476 int rc = 0;
1477
1478 // Sanity check stream_list
1479 if (streamList == NULL) {
1480 LOGE("NULL stream configuration");
1481 return BAD_VALUE;
1482 }
1483 if (streamList->streams == NULL) {
1484 LOGE("NULL stream list");
1485 return BAD_VALUE;
1486 }
1487
1488 if (streamList->num_streams < 1) {
1489 LOGE("Bad number of streams requested: %d",
1490 streamList->num_streams);
1491 return BAD_VALUE;
1492 }
1493
1494 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1495 LOGE("Maximum number of streams %d exceeded: %d",
1496 MAX_NUM_STREAMS, streamList->num_streams);
1497 return BAD_VALUE;
1498 }
1499
1500 mOpMode = streamList->operation_mode;
1501 LOGD("mOpMode: %d", mOpMode);
1502
1503 /* first invalidate all the steams in the mStreamList
1504 * if they appear again, they will be validated */
1505 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1506 it != mStreamInfo.end(); it++) {
1507 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1508 if (channel) {
1509 channel->stop();
1510 }
1511 (*it)->status = INVALID;
1512 }
1513
1514 if (mRawDumpChannel) {
1515 mRawDumpChannel->stop();
1516 delete mRawDumpChannel;
1517 mRawDumpChannel = NULL;
1518 }
1519
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001520 if (mHdrPlusRawSrcChannel) {
1521 mHdrPlusRawSrcChannel->stop();
1522 delete mHdrPlusRawSrcChannel;
1523 mHdrPlusRawSrcChannel = NULL;
1524 }
1525
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 if (mSupportChannel)
1527 mSupportChannel->stop();
1528
1529 if (mAnalysisChannel) {
1530 mAnalysisChannel->stop();
1531 }
1532 if (mMetadataChannel) {
1533 /* If content of mStreamInfo is not 0, there is metadata stream */
1534 mMetadataChannel->stop();
1535 }
1536 if (mChannelHandle) {
1537 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1538 mChannelHandle);
1539 LOGD("stopping channel %d", mChannelHandle);
1540 }
1541
1542 pthread_mutex_lock(&mMutex);
1543
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001544 // Check if HDR+ is enabled.
Thierry Strudel54dc9782017-02-15 12:12:10 -08001545 char hdrp_prop[PROPERTY_VALUE_MAX];
1546 property_get("persist.camera.hdrplus", hdrp_prop, "0");
1547 bool enableHdrPlus = atoi(hdrp_prop);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001548 if (enableHdrPlus) {
1549 ALOGD("%s: HDR+ in Camera HAL enabled.", __FUNCTION__);
1550 // Connect to HDR+ client if not yet.
1551 if (mHdrPlusClient == nullptr) {
1552 mHdrPlusClient = std::make_shared<HdrPlusClient>();
1553 rc = mHdrPlusClient->connect(this);
1554 if (rc < 0) {
1555 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
1556 strerror(-rc), rc);
1557 pthread_mutex_unlock(&mMutex);
1558 return -ENODEV;
1559 }
1560
1561 // Set static metadata.
1562 rc = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
1563 if (rc < 0) {
1564 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
1565 strerror(-rc), rc);
1566 pthread_mutex_unlock(&mMutex);
1567 return -ENODEV;
1568 }
1569 }
1570 } else {
1571 ALOGD("%s: HDR+ in Camera HAL disabled.", __FUNCTION__);
1572 // Disconnect from HDR+ client if HDR+ is not enabled.
1573 if (mHdrPlusClient != nullptr) {
1574 mHdrPlusClient->disconnect();
1575 mHdrPlusClient = nullptr;
1576 }
1577 }
1578
Thierry Strudel3d639192016-09-09 11:52:26 -07001579 // Check state
1580 switch (mState) {
1581 case INITIALIZED:
1582 case CONFIGURED:
1583 case STARTED:
1584 /* valid state */
1585 break;
1586 default:
1587 LOGE("Invalid state %d", mState);
1588 pthread_mutex_unlock(&mMutex);
1589 return -ENODEV;
1590 }
1591
1592 /* Check whether we have video stream */
1593 m_bIs4KVideo = false;
1594 m_bIsVideo = false;
1595 m_bEisSupportedSize = false;
1596 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001597 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001598 bool isZsl = false;
1599 uint32_t videoWidth = 0U;
1600 uint32_t videoHeight = 0U;
1601 size_t rawStreamCnt = 0;
1602 size_t stallStreamCnt = 0;
1603 size_t processedStreamCnt = 0;
1604 // Number of streams on ISP encoder path
1605 size_t numStreamsOnEncoder = 0;
1606 size_t numYuv888OnEncoder = 0;
1607 bool bYuv888OverrideJpeg = false;
1608 cam_dimension_t largeYuv888Size = {0, 0};
1609 cam_dimension_t maxViewfinderSize = {0, 0};
1610 bool bJpegExceeds4K = false;
1611 bool bJpegOnEncoder = false;
1612 bool bUseCommonFeatureMask = false;
1613 cam_feature_mask_t commonFeatureMask = 0;
1614 bool bSmallJpegSize = false;
1615 uint32_t width_ratio;
1616 uint32_t height_ratio;
1617 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1618 camera3_stream_t *inputStream = NULL;
1619 bool isJpeg = false;
1620 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001621 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001622
1623 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1624
1625 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001626 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001627 uint8_t eis_prop_set;
1628 uint32_t maxEisWidth = 0;
1629 uint32_t maxEisHeight = 0;
1630
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001631 // Initialize all instant AEC related variables
1632 mInstantAEC = false;
1633 mResetInstantAEC = false;
1634 mInstantAECSettledFrameNumber = 0;
1635 mAecSkipDisplayFrameBound = 0;
1636 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001637 mCurrFeatureState = 0;
1638 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001639
Thierry Strudel3d639192016-09-09 11:52:26 -07001640 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1641
1642 size_t count = IS_TYPE_MAX;
1643 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1644 for (size_t i = 0; i < count; i++) {
1645 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001646 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1647 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001648 break;
1649 }
1650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001651 count = CAM_OPT_STAB_MAX;
1652 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1653 for (size_t i = 0; i < count; i++) {
1654 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1655 oisSupported = true;
1656 break;
1657 }
1658 }
1659
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001660 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001661 maxEisWidth = MAX_EIS_WIDTH;
1662 maxEisHeight = MAX_EIS_HEIGHT;
1663 }
1664
1665 /* EIS setprop control */
1666 char eis_prop[PROPERTY_VALUE_MAX];
1667 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001668 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001669 eis_prop_set = (uint8_t)atoi(eis_prop);
1670
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001671 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001672 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1673
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001674 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1675 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1676
Thierry Strudel3d639192016-09-09 11:52:26 -07001677 /* stream configurations */
1678 for (size_t i = 0; i < streamList->num_streams; i++) {
1679 camera3_stream_t *newStream = streamList->streams[i];
1680 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1681 "height = %d, rotation = %d, usage = 0x%x",
1682 i, newStream->stream_type, newStream->format,
1683 newStream->width, newStream->height, newStream->rotation,
1684 newStream->usage);
1685 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1686 newStream->stream_type == CAMERA3_STREAM_INPUT){
1687 isZsl = true;
1688 }
1689 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1690 inputStream = newStream;
1691 }
1692
1693 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1694 isJpeg = true;
1695 jpegSize.width = newStream->width;
1696 jpegSize.height = newStream->height;
1697 if (newStream->width > VIDEO_4K_WIDTH ||
1698 newStream->height > VIDEO_4K_HEIGHT)
1699 bJpegExceeds4K = true;
1700 }
1701
1702 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1703 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1704 m_bIsVideo = true;
1705 videoWidth = newStream->width;
1706 videoHeight = newStream->height;
1707 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1708 (VIDEO_4K_HEIGHT <= newStream->height)) {
1709 m_bIs4KVideo = true;
1710 }
1711 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1712 (newStream->height <= maxEisHeight);
1713 }
1714 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1715 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1716 switch (newStream->format) {
1717 case HAL_PIXEL_FORMAT_BLOB:
1718 stallStreamCnt++;
1719 if (isOnEncoder(maxViewfinderSize, newStream->width,
1720 newStream->height)) {
1721 numStreamsOnEncoder++;
1722 bJpegOnEncoder = true;
1723 }
1724 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1725 newStream->width);
1726 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1727 newStream->height);;
1728 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1729 "FATAL: max_downscale_factor cannot be zero and so assert");
1730 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1731 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1732 LOGH("Setting small jpeg size flag to true");
1733 bSmallJpegSize = true;
1734 }
1735 break;
1736 case HAL_PIXEL_FORMAT_RAW10:
1737 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1738 case HAL_PIXEL_FORMAT_RAW16:
1739 rawStreamCnt++;
1740 break;
1741 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1742 processedStreamCnt++;
1743 if (isOnEncoder(maxViewfinderSize, newStream->width,
1744 newStream->height)) {
1745 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1746 !IS_USAGE_ZSL(newStream->usage)) {
1747 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1748 }
1749 numStreamsOnEncoder++;
1750 }
1751 break;
1752 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1753 processedStreamCnt++;
1754 if (isOnEncoder(maxViewfinderSize, newStream->width,
1755 newStream->height)) {
1756 // If Yuv888 size is not greater than 4K, set feature mask
1757 // to SUPERSET so that it support concurrent request on
1758 // YUV and JPEG.
1759 if (newStream->width <= VIDEO_4K_WIDTH &&
1760 newStream->height <= VIDEO_4K_HEIGHT) {
1761 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1762 }
1763 numStreamsOnEncoder++;
1764 numYuv888OnEncoder++;
1765 largeYuv888Size.width = newStream->width;
1766 largeYuv888Size.height = newStream->height;
1767 }
1768 break;
1769 default:
1770 processedStreamCnt++;
1771 if (isOnEncoder(maxViewfinderSize, newStream->width,
1772 newStream->height)) {
1773 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1774 numStreamsOnEncoder++;
1775 }
1776 break;
1777 }
1778
1779 }
1780 }
1781
1782 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1783 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1784 !m_bIsVideo) {
1785 m_bEisEnable = false;
1786 }
1787
Thierry Strudel54dc9782017-02-15 12:12:10 -08001788 uint8_t forceEnableTnr = 0;
1789 char tnr_prop[PROPERTY_VALUE_MAX];
1790 memset(tnr_prop, 0, sizeof(tnr_prop));
1791 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1792 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* Logic to enable/disable TNR based on specific config size/etc.*/
1795 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1796 ((videoWidth == 1920 && videoHeight == 1080) ||
1797 (videoWidth == 1280 && videoHeight == 720)) &&
1798 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1799 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001800 else if (forceEnableTnr)
1801 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001802
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001803 char videoHdrProp[PROPERTY_VALUE_MAX];
1804 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1805 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1806 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1807
1808 if (hdr_mode_prop == 1 && m_bIsVideo &&
1809 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1810 m_bVideoHdrEnabled = true;
1811 else
1812 m_bVideoHdrEnabled = false;
1813
1814
Thierry Strudel3d639192016-09-09 11:52:26 -07001815 /* Check if num_streams is sane */
1816 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1817 rawStreamCnt > MAX_RAW_STREAMS ||
1818 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1819 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1820 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1821 pthread_mutex_unlock(&mMutex);
1822 return -EINVAL;
1823 }
1824 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001825 if (isZsl && m_bIs4KVideo) {
1826 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001827 pthread_mutex_unlock(&mMutex);
1828 return -EINVAL;
1829 }
1830 /* Check if stream sizes are sane */
1831 if (numStreamsOnEncoder > 2) {
1832 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1833 pthread_mutex_unlock(&mMutex);
1834 return -EINVAL;
1835 } else if (1 < numStreamsOnEncoder){
1836 bUseCommonFeatureMask = true;
1837 LOGH("Multiple streams above max viewfinder size, common mask needed");
1838 }
1839
1840 /* Check if BLOB size is greater than 4k in 4k recording case */
1841 if (m_bIs4KVideo && bJpegExceeds4K) {
1842 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1843 pthread_mutex_unlock(&mMutex);
1844 return -EINVAL;
1845 }
1846
1847 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1848 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1849 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1850 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1851 // configurations:
1852 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1853 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1854 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1855 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1856 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1857 __func__);
1858 pthread_mutex_unlock(&mMutex);
1859 return -EINVAL;
1860 }
1861
1862 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1863 // the YUV stream's size is greater or equal to the JPEG size, set common
1864 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1865 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1866 jpegSize.width, jpegSize.height) &&
1867 largeYuv888Size.width > jpegSize.width &&
1868 largeYuv888Size.height > jpegSize.height) {
1869 bYuv888OverrideJpeg = true;
1870 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1871 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1872 }
1873
1874 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1875 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1876 commonFeatureMask);
1877 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1878 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1879
1880 rc = validateStreamDimensions(streamList);
1881 if (rc == NO_ERROR) {
1882 rc = validateStreamRotations(streamList);
1883 }
1884 if (rc != NO_ERROR) {
1885 LOGE("Invalid stream configuration requested!");
1886 pthread_mutex_unlock(&mMutex);
1887 return rc;
1888 }
1889
1890 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1891 for (size_t i = 0; i < streamList->num_streams; i++) {
1892 camera3_stream_t *newStream = streamList->streams[i];
1893 LOGH("newStream type = %d, stream format = %d "
1894 "stream size : %d x %d, stream rotation = %d",
1895 newStream->stream_type, newStream->format,
1896 newStream->width, newStream->height, newStream->rotation);
1897 //if the stream is in the mStreamList validate it
1898 bool stream_exists = false;
1899 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1900 it != mStreamInfo.end(); it++) {
1901 if ((*it)->stream == newStream) {
1902 QCamera3ProcessingChannel *channel =
1903 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1904 stream_exists = true;
1905 if (channel)
1906 delete channel;
1907 (*it)->status = VALID;
1908 (*it)->stream->priv = NULL;
1909 (*it)->channel = NULL;
1910 }
1911 }
1912 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1913 //new stream
1914 stream_info_t* stream_info;
1915 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1916 if (!stream_info) {
1917 LOGE("Could not allocate stream info");
1918 rc = -ENOMEM;
1919 pthread_mutex_unlock(&mMutex);
1920 return rc;
1921 }
1922 stream_info->stream = newStream;
1923 stream_info->status = VALID;
1924 stream_info->channel = NULL;
1925 mStreamInfo.push_back(stream_info);
1926 }
1927 /* Covers Opaque ZSL and API1 F/W ZSL */
1928 if (IS_USAGE_ZSL(newStream->usage)
1929 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1930 if (zslStream != NULL) {
1931 LOGE("Multiple input/reprocess streams requested!");
1932 pthread_mutex_unlock(&mMutex);
1933 return BAD_VALUE;
1934 }
1935 zslStream = newStream;
1936 }
1937 /* Covers YUV reprocess */
1938 if (inputStream != NULL) {
1939 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1940 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1941 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1942 && inputStream->width == newStream->width
1943 && inputStream->height == newStream->height) {
1944 if (zslStream != NULL) {
1945 /* This scenario indicates multiple YUV streams with same size
1946 * as input stream have been requested, since zsl stream handle
1947 * is solely use for the purpose of overriding the size of streams
1948 * which share h/w streams we will just make a guess here as to
1949 * which of the stream is a ZSL stream, this will be refactored
1950 * once we make generic logic for streams sharing encoder output
1951 */
1952 LOGH("Warning, Multiple ip/reprocess streams requested!");
1953 }
1954 zslStream = newStream;
1955 }
1956 }
1957 }
1958
1959 /* If a zsl stream is set, we know that we have configured at least one input or
1960 bidirectional stream */
1961 if (NULL != zslStream) {
1962 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1963 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1964 mInputStreamInfo.format = zslStream->format;
1965 mInputStreamInfo.usage = zslStream->usage;
1966 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1967 mInputStreamInfo.dim.width,
1968 mInputStreamInfo.dim.height,
1969 mInputStreamInfo.format, mInputStreamInfo.usage);
1970 }
1971
1972 cleanAndSortStreamInfo();
1973 if (mMetadataChannel) {
1974 delete mMetadataChannel;
1975 mMetadataChannel = NULL;
1976 }
1977 if (mSupportChannel) {
1978 delete mSupportChannel;
1979 mSupportChannel = NULL;
1980 }
1981
1982 if (mAnalysisChannel) {
1983 delete mAnalysisChannel;
1984 mAnalysisChannel = NULL;
1985 }
1986
1987 if (mDummyBatchChannel) {
1988 delete mDummyBatchChannel;
1989 mDummyBatchChannel = NULL;
1990 }
1991
1992 //Create metadata channel and initialize it
1993 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1994 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1995 gCamCapability[mCameraId]->color_arrangement);
1996 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1997 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001998 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001999 if (mMetadataChannel == NULL) {
2000 LOGE("failed to allocate metadata channel");
2001 rc = -ENOMEM;
2002 pthread_mutex_unlock(&mMutex);
2003 return rc;
2004 }
2005 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2006 if (rc < 0) {
2007 LOGE("metadata channel initialization failed");
2008 delete mMetadataChannel;
2009 mMetadataChannel = NULL;
2010 pthread_mutex_unlock(&mMutex);
2011 return rc;
2012 }
2013
Thierry Strudel3d639192016-09-09 11:52:26 -07002014 bool isRawStreamRequested = false;
2015 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2016 /* Allocate channel objects for the requested streams */
2017 for (size_t i = 0; i < streamList->num_streams; i++) {
2018 camera3_stream_t *newStream = streamList->streams[i];
2019 uint32_t stream_usage = newStream->usage;
2020 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2021 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2022 struct camera_info *p_info = NULL;
2023 pthread_mutex_lock(&gCamLock);
2024 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2025 pthread_mutex_unlock(&gCamLock);
2026 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2027 || IS_USAGE_ZSL(newStream->usage)) &&
2028 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2029 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2030 if (bUseCommonFeatureMask) {
2031 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2032 commonFeatureMask;
2033 } else {
2034 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2035 CAM_QCOM_FEATURE_NONE;
2036 }
2037
2038 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2039 LOGH("Input stream configured, reprocess config");
2040 } else {
2041 //for non zsl streams find out the format
2042 switch (newStream->format) {
2043 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2044 {
2045 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2046 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2047 /* add additional features to pp feature mask */
2048 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2049 mStreamConfigInfo.num_streams);
2050
2051 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2052 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2053 CAM_STREAM_TYPE_VIDEO;
2054 if (m_bTnrEnabled && m_bTnrVideo) {
2055 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2056 CAM_QCOM_FEATURE_CPP_TNR;
2057 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2058 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2059 ~CAM_QCOM_FEATURE_CDS;
2060 }
2061 } else {
2062 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2063 CAM_STREAM_TYPE_PREVIEW;
2064 if (m_bTnrEnabled && m_bTnrPreview) {
2065 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2066 CAM_QCOM_FEATURE_CPP_TNR;
2067 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2068 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2069 ~CAM_QCOM_FEATURE_CDS;
2070 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002071 if(!m_bSwTnrPreview) {
2072 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2073 ~CAM_QTI_FEATURE_SW_TNR;
2074 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002075 padding_info.width_padding = mSurfaceStridePadding;
2076 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002077 previewSize.width = (int32_t)newStream->width;
2078 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002079 }
2080 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2081 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2082 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2083 newStream->height;
2084 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2085 newStream->width;
2086 }
2087 }
2088 break;
2089 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2090 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2091 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2092 if (bUseCommonFeatureMask)
2093 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2094 commonFeatureMask;
2095 else
2096 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2097 CAM_QCOM_FEATURE_NONE;
2098 } else {
2099 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2100 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2101 }
2102 break;
2103 case HAL_PIXEL_FORMAT_BLOB:
2104 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2105 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2106 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2108 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2109 } else {
2110 if (bUseCommonFeatureMask &&
2111 isOnEncoder(maxViewfinderSize, newStream->width,
2112 newStream->height)) {
2113 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2114 } else {
2115 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2116 }
2117 }
2118 if (isZsl) {
2119 if (zslStream) {
2120 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2121 (int32_t)zslStream->width;
2122 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2123 (int32_t)zslStream->height;
2124 } else {
2125 LOGE("Error, No ZSL stream identified");
2126 pthread_mutex_unlock(&mMutex);
2127 return -EINVAL;
2128 }
2129 } else if (m_bIs4KVideo) {
2130 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2131 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2132 } else if (bYuv888OverrideJpeg) {
2133 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2134 (int32_t)largeYuv888Size.width;
2135 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2136 (int32_t)largeYuv888Size.height;
2137 }
2138 break;
2139 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2140 case HAL_PIXEL_FORMAT_RAW16:
2141 case HAL_PIXEL_FORMAT_RAW10:
2142 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2143 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2144 isRawStreamRequested = true;
2145 break;
2146 default:
2147 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2148 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2149 break;
2150 }
2151 }
2152
2153 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2154 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2155 gCamCapability[mCameraId]->color_arrangement);
2156
2157 if (newStream->priv == NULL) {
2158 //New stream, construct channel
2159 switch (newStream->stream_type) {
2160 case CAMERA3_STREAM_INPUT:
2161 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2162 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2163 break;
2164 case CAMERA3_STREAM_BIDIRECTIONAL:
2165 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2166 GRALLOC_USAGE_HW_CAMERA_WRITE;
2167 break;
2168 case CAMERA3_STREAM_OUTPUT:
2169 /* For video encoding stream, set read/write rarely
2170 * flag so that they may be set to un-cached */
2171 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2172 newStream->usage |=
2173 (GRALLOC_USAGE_SW_READ_RARELY |
2174 GRALLOC_USAGE_SW_WRITE_RARELY |
2175 GRALLOC_USAGE_HW_CAMERA_WRITE);
2176 else if (IS_USAGE_ZSL(newStream->usage))
2177 {
2178 LOGD("ZSL usage flag skipping");
2179 }
2180 else if (newStream == zslStream
2181 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2182 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2183 } else
2184 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2185 break;
2186 default:
2187 LOGE("Invalid stream_type %d", newStream->stream_type);
2188 break;
2189 }
2190
2191 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2192 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2193 QCamera3ProcessingChannel *channel = NULL;
2194 switch (newStream->format) {
2195 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2196 if ((newStream->usage &
2197 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2198 (streamList->operation_mode ==
2199 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2200 ) {
2201 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2202 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002203 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002204 this,
2205 newStream,
2206 (cam_stream_type_t)
2207 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2208 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2209 mMetadataChannel,
2210 0); //heap buffers are not required for HFR video channel
2211 if (channel == NULL) {
2212 LOGE("allocation of channel failed");
2213 pthread_mutex_unlock(&mMutex);
2214 return -ENOMEM;
2215 }
2216 //channel->getNumBuffers() will return 0 here so use
2217 //MAX_INFLIGH_HFR_REQUESTS
2218 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2219 newStream->priv = channel;
2220 LOGI("num video buffers in HFR mode: %d",
2221 MAX_INFLIGHT_HFR_REQUESTS);
2222 } else {
2223 /* Copy stream contents in HFR preview only case to create
2224 * dummy batch channel so that sensor streaming is in
2225 * HFR mode */
2226 if (!m_bIsVideo && (streamList->operation_mode ==
2227 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2228 mDummyBatchStream = *newStream;
2229 }
2230 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2231 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002232 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002233 this,
2234 newStream,
2235 (cam_stream_type_t)
2236 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2237 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2238 mMetadataChannel,
2239 MAX_INFLIGHT_REQUESTS);
2240 if (channel == NULL) {
2241 LOGE("allocation of channel failed");
2242 pthread_mutex_unlock(&mMutex);
2243 return -ENOMEM;
2244 }
2245 newStream->max_buffers = channel->getNumBuffers();
2246 newStream->priv = channel;
2247 }
2248 break;
2249 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2250 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2251 mChannelHandle,
2252 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002253 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002254 this,
2255 newStream,
2256 (cam_stream_type_t)
2257 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2258 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2259 mMetadataChannel);
2260 if (channel == NULL) {
2261 LOGE("allocation of YUV channel failed");
2262 pthread_mutex_unlock(&mMutex);
2263 return -ENOMEM;
2264 }
2265 newStream->max_buffers = channel->getNumBuffers();
2266 newStream->priv = channel;
2267 break;
2268 }
2269 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2270 case HAL_PIXEL_FORMAT_RAW16:
2271 case HAL_PIXEL_FORMAT_RAW10:
2272 mRawChannel = new QCamera3RawChannel(
2273 mCameraHandle->camera_handle, mChannelHandle,
2274 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002275 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 this, newStream,
2277 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2278 mMetadataChannel,
2279 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2280 if (mRawChannel == NULL) {
2281 LOGE("allocation of raw channel failed");
2282 pthread_mutex_unlock(&mMutex);
2283 return -ENOMEM;
2284 }
2285 newStream->max_buffers = mRawChannel->getNumBuffers();
2286 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2287 break;
2288 case HAL_PIXEL_FORMAT_BLOB:
2289 // Max live snapshot inflight buffer is 1. This is to mitigate
2290 // frame drop issues for video snapshot. The more buffers being
2291 // allocated, the more frame drops there are.
2292 mPictureChannel = new QCamera3PicChannel(
2293 mCameraHandle->camera_handle, mChannelHandle,
2294 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002295 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2297 m_bIs4KVideo, isZsl, mMetadataChannel,
2298 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2299 if (mPictureChannel == NULL) {
2300 LOGE("allocation of channel failed");
2301 pthread_mutex_unlock(&mMutex);
2302 return -ENOMEM;
2303 }
2304 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2305 newStream->max_buffers = mPictureChannel->getNumBuffers();
2306 mPictureChannel->overrideYuvSize(
2307 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2308 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2309 break;
2310
2311 default:
2312 LOGE("not a supported format 0x%x", newStream->format);
2313 break;
2314 }
2315 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2316 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2317 } else {
2318 LOGE("Error, Unknown stream type");
2319 pthread_mutex_unlock(&mMutex);
2320 return -EINVAL;
2321 }
2322
2323 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2324 if (channel != NULL && channel->isUBWCEnabled()) {
2325 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002326 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2327 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002328 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2329 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2330 }
2331 }
2332
2333 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2334 it != mStreamInfo.end(); it++) {
2335 if ((*it)->stream == newStream) {
2336 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2337 break;
2338 }
2339 }
2340 } else {
2341 // Channel already exists for this stream
2342 // Do nothing for now
2343 }
2344 padding_info = gCamCapability[mCameraId]->padding_info;
2345
2346 /* Do not add entries for input stream in metastream info
2347 * since there is no real stream associated with it
2348 */
2349 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2350 mStreamConfigInfo.num_streams++;
2351 }
2352
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002353 // Create analysis stream all the time, even when h/w support is not available
2354 {
2355 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2356 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2357 gCamCapability[mCameraId]->color_arrangement);
2358 cam_analysis_info_t analysisInfo;
2359 int32_t ret = NO_ERROR;
2360 ret = mCommon.getAnalysisInfo(
2361 FALSE,
2362 analysisFeatureMask,
2363 &analysisInfo);
2364 if (ret == NO_ERROR) {
2365 cam_dimension_t analysisDim;
2366 analysisDim = mCommon.getMatchingDimension(previewSize,
2367 analysisInfo.analysis_recommended_res);
2368
2369 mAnalysisChannel = new QCamera3SupportChannel(
2370 mCameraHandle->camera_handle,
2371 mChannelHandle,
2372 mCameraHandle->ops,
2373 &analysisInfo.analysis_padding_info,
2374 analysisFeatureMask,
2375 CAM_STREAM_TYPE_ANALYSIS,
2376 &analysisDim,
2377 (analysisInfo.analysis_format
2378 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2379 : CAM_FORMAT_YUV_420_NV21),
2380 analysisInfo.hw_analysis_supported,
2381 gCamCapability[mCameraId]->color_arrangement,
2382 this,
2383 0); // force buffer count to 0
2384 } else {
2385 LOGW("getAnalysisInfo failed, ret = %d", ret);
2386 }
2387 if (!mAnalysisChannel) {
2388 LOGW("Analysis channel cannot be created");
2389 }
2390 }
2391
Thierry Strudel3d639192016-09-09 11:52:26 -07002392 //RAW DUMP channel
2393 if (mEnableRawDump && isRawStreamRequested == false){
2394 cam_dimension_t rawDumpSize;
2395 rawDumpSize = getMaxRawSize(mCameraId);
2396 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2397 setPAAFSupport(rawDumpFeatureMask,
2398 CAM_STREAM_TYPE_RAW,
2399 gCamCapability[mCameraId]->color_arrangement);
2400 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2401 mChannelHandle,
2402 mCameraHandle->ops,
2403 rawDumpSize,
2404 &padding_info,
2405 this, rawDumpFeatureMask);
2406 if (!mRawDumpChannel) {
2407 LOGE("Raw Dump channel cannot be created");
2408 pthread_mutex_unlock(&mMutex);
2409 return -ENOMEM;
2410 }
2411 }
2412
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002413 // Initialize HDR+ Raw Source channel.
2414 if (mHdrPlusClient != nullptr) {
2415 if (isRawStreamRequested || mRawDumpChannel) {
2416 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported.",
2417 __FUNCTION__);
2418 mHdrPlusClient->disconnect();
2419 mHdrPlusClient = nullptr;
2420 } else {
2421 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2422 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2423 setPAAFSupport(hdrPlusRawFeatureMask,
2424 CAM_STREAM_TYPE_RAW,
2425 gCamCapability[mCameraId]->color_arrangement);
2426 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2427 mChannelHandle,
2428 mCameraHandle->ops,
2429 rawSize,
2430 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002431 this, hdrPlusRawFeatureMask,
2432 mHdrPlusClient,
2433 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002434 if (!mHdrPlusRawSrcChannel) {
2435 LOGE("HDR+ Raw Source channel cannot be created");
2436 pthread_mutex_unlock(&mMutex);
2437 return -ENOMEM;
2438 }
2439 }
2440 }
2441
Thierry Strudel3d639192016-09-09 11:52:26 -07002442
2443 if (mAnalysisChannel) {
2444 cam_analysis_info_t analysisInfo;
2445 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2446 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2447 CAM_STREAM_TYPE_ANALYSIS;
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2449 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2450 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2451 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2452 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002453 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2455 &analysisInfo);
2456 if (rc != NO_ERROR) {
2457 LOGE("getAnalysisInfo failed, ret = %d", rc);
2458 pthread_mutex_unlock(&mMutex);
2459 return rc;
2460 }
2461 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002462 mCommon.getMatchingDimension(previewSize,
2463 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002464 mStreamConfigInfo.num_streams++;
2465 }
2466
2467 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2468 cam_analysis_info_t supportInfo;
2469 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2470 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2471 setPAAFSupport(callbackFeatureMask,
2472 CAM_STREAM_TYPE_CALLBACK,
2473 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002474 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002475 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002476 if (ret != NO_ERROR) {
2477 /* Ignore the error for Mono camera
2478 * because the PAAF bit mask is only set
2479 * for CAM_STREAM_TYPE_ANALYSIS stream type
2480 */
2481 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2482 LOGW("getAnalysisInfo failed, ret = %d", ret);
2483 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002484 }
2485 mSupportChannel = new QCamera3SupportChannel(
2486 mCameraHandle->camera_handle,
2487 mChannelHandle,
2488 mCameraHandle->ops,
2489 &gCamCapability[mCameraId]->padding_info,
2490 callbackFeatureMask,
2491 CAM_STREAM_TYPE_CALLBACK,
2492 &QCamera3SupportChannel::kDim,
2493 CAM_FORMAT_YUV_420_NV21,
2494 supportInfo.hw_analysis_supported,
2495 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002496 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002497 if (!mSupportChannel) {
2498 LOGE("dummy channel cannot be created");
2499 pthread_mutex_unlock(&mMutex);
2500 return -ENOMEM;
2501 }
2502 }
2503
2504 if (mSupportChannel) {
2505 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2506 QCamera3SupportChannel::kDim;
2507 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2508 CAM_STREAM_TYPE_CALLBACK;
2509 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2510 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2511 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2512 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2513 gCamCapability[mCameraId]->color_arrangement);
2514 mStreamConfigInfo.num_streams++;
2515 }
2516
2517 if (mRawDumpChannel) {
2518 cam_dimension_t rawSize;
2519 rawSize = getMaxRawSize(mCameraId);
2520 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2521 rawSize;
2522 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2523 CAM_STREAM_TYPE_RAW;
2524 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2525 CAM_QCOM_FEATURE_NONE;
2526 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2527 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2528 gCamCapability[mCameraId]->color_arrangement);
2529 mStreamConfigInfo.num_streams++;
2530 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002531
2532 if (mHdrPlusRawSrcChannel) {
2533 cam_dimension_t rawSize;
2534 rawSize = getMaxRawSize(mCameraId);
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2536 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2538 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2539 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2540 gCamCapability[mCameraId]->color_arrangement);
2541 mStreamConfigInfo.num_streams++;
2542 }
2543
Thierry Strudel3d639192016-09-09 11:52:26 -07002544 /* In HFR mode, if video stream is not added, create a dummy channel so that
2545 * ISP can create a batch mode even for preview only case. This channel is
2546 * never 'start'ed (no stream-on), it is only 'initialized' */
2547 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2548 !m_bIsVideo) {
2549 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2550 setPAAFSupport(dummyFeatureMask,
2551 CAM_STREAM_TYPE_VIDEO,
2552 gCamCapability[mCameraId]->color_arrangement);
2553 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2554 mChannelHandle,
2555 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002556 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 this,
2558 &mDummyBatchStream,
2559 CAM_STREAM_TYPE_VIDEO,
2560 dummyFeatureMask,
2561 mMetadataChannel);
2562 if (NULL == mDummyBatchChannel) {
2563 LOGE("creation of mDummyBatchChannel failed."
2564 "Preview will use non-hfr sensor mode ");
2565 }
2566 }
2567 if (mDummyBatchChannel) {
2568 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2569 mDummyBatchStream.width;
2570 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2571 mDummyBatchStream.height;
2572 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2573 CAM_STREAM_TYPE_VIDEO;
2574 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2575 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2576 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2577 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2578 gCamCapability[mCameraId]->color_arrangement);
2579 mStreamConfigInfo.num_streams++;
2580 }
2581
2582 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2583 mStreamConfigInfo.buffer_info.max_buffers =
2584 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2585
2586 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2587 for (pendingRequestIterator i = mPendingRequestsList.begin();
2588 i != mPendingRequestsList.end();) {
2589 i = erasePendingRequest(i);
2590 }
2591 mPendingFrameDropList.clear();
2592 // Initialize/Reset the pending buffers list
2593 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2594 req.mPendingBufferList.clear();
2595 }
2596 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2597
Thierry Strudel3d639192016-09-09 11:52:26 -07002598 mCurJpegMeta.clear();
2599 //Get min frame duration for this streams configuration
2600 deriveMinFrameDuration();
2601
2602 // Update state
2603 mState = CONFIGURED;
2604
2605 pthread_mutex_unlock(&mMutex);
2606
2607 return rc;
2608}
2609
2610/*===========================================================================
2611 * FUNCTION : validateCaptureRequest
2612 *
2613 * DESCRIPTION: validate a capture request from camera service
2614 *
2615 * PARAMETERS :
2616 * @request : request from framework to process
2617 *
2618 * RETURN :
2619 *
2620 *==========================================================================*/
2621int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002622 camera3_capture_request_t *request,
2623 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002624{
2625 ssize_t idx = 0;
2626 const camera3_stream_buffer_t *b;
2627 CameraMetadata meta;
2628
2629 /* Sanity check the request */
2630 if (request == NULL) {
2631 LOGE("NULL capture request");
2632 return BAD_VALUE;
2633 }
2634
2635 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2636 /*settings cannot be null for the first request*/
2637 return BAD_VALUE;
2638 }
2639
2640 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002641 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2642 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 LOGE("Request %d: No output buffers provided!",
2644 __FUNCTION__, frameNumber);
2645 return BAD_VALUE;
2646 }
2647 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2648 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2649 request->num_output_buffers, MAX_NUM_STREAMS);
2650 return BAD_VALUE;
2651 }
2652 if (request->input_buffer != NULL) {
2653 b = request->input_buffer;
2654 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2655 LOGE("Request %d: Buffer %ld: Status not OK!",
2656 frameNumber, (long)idx);
2657 return BAD_VALUE;
2658 }
2659 if (b->release_fence != -1) {
2660 LOGE("Request %d: Buffer %ld: Has a release fence!",
2661 frameNumber, (long)idx);
2662 return BAD_VALUE;
2663 }
2664 if (b->buffer == NULL) {
2665 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2666 frameNumber, (long)idx);
2667 return BAD_VALUE;
2668 }
2669 }
2670
2671 // Validate all buffers
2672 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002673 if (b == NULL) {
2674 return BAD_VALUE;
2675 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002676 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002677 QCamera3ProcessingChannel *channel =
2678 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2679 if (channel == NULL) {
2680 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2681 frameNumber, (long)idx);
2682 return BAD_VALUE;
2683 }
2684 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2685 LOGE("Request %d: Buffer %ld: Status not OK!",
2686 frameNumber, (long)idx);
2687 return BAD_VALUE;
2688 }
2689 if (b->release_fence != -1) {
2690 LOGE("Request %d: Buffer %ld: Has a release fence!",
2691 frameNumber, (long)idx);
2692 return BAD_VALUE;
2693 }
2694 if (b->buffer == NULL) {
2695 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2696 frameNumber, (long)idx);
2697 return BAD_VALUE;
2698 }
2699 if (*(b->buffer) == NULL) {
2700 LOGE("Request %d: Buffer %ld: NULL private handle!",
2701 frameNumber, (long)idx);
2702 return BAD_VALUE;
2703 }
2704 idx++;
2705 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002706 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002707 return NO_ERROR;
2708}
2709
2710/*===========================================================================
2711 * FUNCTION : deriveMinFrameDuration
2712 *
2713 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2714 * on currently configured streams.
2715 *
2716 * PARAMETERS : NONE
2717 *
2718 * RETURN : NONE
2719 *
2720 *==========================================================================*/
2721void QCamera3HardwareInterface::deriveMinFrameDuration()
2722{
2723 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2724
2725 maxJpegDim = 0;
2726 maxProcessedDim = 0;
2727 maxRawDim = 0;
2728
2729 // Figure out maximum jpeg, processed, and raw dimensions
2730 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2731 it != mStreamInfo.end(); it++) {
2732
2733 // Input stream doesn't have valid stream_type
2734 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2735 continue;
2736
2737 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2738 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2739 if (dimension > maxJpegDim)
2740 maxJpegDim = dimension;
2741 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2742 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2743 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2744 if (dimension > maxRawDim)
2745 maxRawDim = dimension;
2746 } else {
2747 if (dimension > maxProcessedDim)
2748 maxProcessedDim = dimension;
2749 }
2750 }
2751
2752 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2753 MAX_SIZES_CNT);
2754
2755 //Assume all jpeg dimensions are in processed dimensions.
2756 if (maxJpegDim > maxProcessedDim)
2757 maxProcessedDim = maxJpegDim;
2758 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2759 if (maxProcessedDim > maxRawDim) {
2760 maxRawDim = INT32_MAX;
2761
2762 for (size_t i = 0; i < count; i++) {
2763 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2764 gCamCapability[mCameraId]->raw_dim[i].height;
2765 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2766 maxRawDim = dimension;
2767 }
2768 }
2769
2770 //Find minimum durations for processed, jpeg, and raw
2771 for (size_t i = 0; i < count; i++) {
2772 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2773 gCamCapability[mCameraId]->raw_dim[i].height) {
2774 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2775 break;
2776 }
2777 }
2778 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2779 for (size_t i = 0; i < count; i++) {
2780 if (maxProcessedDim ==
2781 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2782 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2783 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2784 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2785 break;
2786 }
2787 }
2788}
2789
2790/*===========================================================================
2791 * FUNCTION : getMinFrameDuration
2792 *
2793 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2794 * and current request configuration.
2795 *
2796 * PARAMETERS : @request: requset sent by the frameworks
2797 *
2798 * RETURN : min farme duration for a particular request
2799 *
2800 *==========================================================================*/
2801int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2802{
2803 bool hasJpegStream = false;
2804 bool hasRawStream = false;
2805 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2806 const camera3_stream_t *stream = request->output_buffers[i].stream;
2807 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2808 hasJpegStream = true;
2809 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2810 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2811 stream->format == HAL_PIXEL_FORMAT_RAW16)
2812 hasRawStream = true;
2813 }
2814
2815 if (!hasJpegStream)
2816 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2817 else
2818 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2819}
2820
2821/*===========================================================================
2822 * FUNCTION : handleBuffersDuringFlushLock
2823 *
2824 * DESCRIPTION: Account for buffers returned from back-end during flush
2825 * This function is executed while mMutex is held by the caller.
2826 *
2827 * PARAMETERS :
2828 * @buffer: image buffer for the callback
2829 *
2830 * RETURN :
2831 *==========================================================================*/
2832void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2833{
2834 bool buffer_found = false;
2835 for (List<PendingBuffersInRequest>::iterator req =
2836 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2837 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2838 for (List<PendingBufferInfo>::iterator i =
2839 req->mPendingBufferList.begin();
2840 i != req->mPendingBufferList.end(); i++) {
2841 if (i->buffer == buffer->buffer) {
2842 mPendingBuffersMap.numPendingBufsAtFlush--;
2843 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2844 buffer->buffer, req->frame_number,
2845 mPendingBuffersMap.numPendingBufsAtFlush);
2846 buffer_found = true;
2847 break;
2848 }
2849 }
2850 if (buffer_found) {
2851 break;
2852 }
2853 }
2854 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2855 //signal the flush()
2856 LOGD("All buffers returned to HAL. Continue flush");
2857 pthread_cond_signal(&mBuffersCond);
2858 }
2859}
2860
Thierry Strudel3d639192016-09-09 11:52:26 -07002861/*===========================================================================
2862 * FUNCTION : handleBatchMetadata
2863 *
2864 * DESCRIPTION: Handles metadata buffer callback in batch mode
2865 *
2866 * PARAMETERS : @metadata_buf: metadata buffer
2867 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2868 * the meta buf in this method
2869 *
2870 * RETURN :
2871 *
2872 *==========================================================================*/
2873void QCamera3HardwareInterface::handleBatchMetadata(
2874 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2875{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002876 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002877
2878 if (NULL == metadata_buf) {
2879 LOGE("metadata_buf is NULL");
2880 return;
2881 }
2882 /* In batch mode, the metdata will contain the frame number and timestamp of
2883 * the last frame in the batch. Eg: a batch containing buffers from request
2884 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2885 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2886 * multiple process_capture_results */
2887 metadata_buffer_t *metadata =
2888 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2889 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2890 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2891 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2892 uint32_t frame_number = 0, urgent_frame_number = 0;
2893 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2894 bool invalid_metadata = false;
2895 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2896 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002897 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002898
2899 int32_t *p_frame_number_valid =
2900 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2901 uint32_t *p_frame_number =
2902 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2903 int64_t *p_capture_time =
2904 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2905 int32_t *p_urgent_frame_number_valid =
2906 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2907 uint32_t *p_urgent_frame_number =
2908 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2909
2910 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2911 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2912 (NULL == p_urgent_frame_number)) {
2913 LOGE("Invalid metadata");
2914 invalid_metadata = true;
2915 } else {
2916 frame_number_valid = *p_frame_number_valid;
2917 last_frame_number = *p_frame_number;
2918 last_frame_capture_time = *p_capture_time;
2919 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2920 last_urgent_frame_number = *p_urgent_frame_number;
2921 }
2922
2923 /* In batchmode, when no video buffers are requested, set_parms are sent
2924 * for every capture_request. The difference between consecutive urgent
2925 * frame numbers and frame numbers should be used to interpolate the
2926 * corresponding frame numbers and time stamps */
2927 pthread_mutex_lock(&mMutex);
2928 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002929 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2930 if(idx < 0) {
2931 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2932 last_urgent_frame_number);
2933 mState = ERROR;
2934 pthread_mutex_unlock(&mMutex);
2935 return;
2936 }
2937 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002938 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2939 first_urgent_frame_number;
2940
2941 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2942 urgent_frame_number_valid,
2943 first_urgent_frame_number, last_urgent_frame_number);
2944 }
2945
2946 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002947 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2948 if(idx < 0) {
2949 LOGE("Invalid frame number received: %d. Irrecoverable error",
2950 last_frame_number);
2951 mState = ERROR;
2952 pthread_mutex_unlock(&mMutex);
2953 return;
2954 }
2955 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002956 frameNumDiff = last_frame_number + 1 -
2957 first_frame_number;
2958 mPendingBatchMap.removeItem(last_frame_number);
2959
2960 LOGD("frm: valid: %d frm_num: %d - %d",
2961 frame_number_valid,
2962 first_frame_number, last_frame_number);
2963
2964 }
2965 pthread_mutex_unlock(&mMutex);
2966
2967 if (urgent_frame_number_valid || frame_number_valid) {
2968 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2969 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2970 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2971 urgentFrameNumDiff, last_urgent_frame_number);
2972 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2973 LOGE("frameNumDiff: %d frameNum: %d",
2974 frameNumDiff, last_frame_number);
2975 }
2976
2977 for (size_t i = 0; i < loopCount; i++) {
2978 /* handleMetadataWithLock is called even for invalid_metadata for
2979 * pipeline depth calculation */
2980 if (!invalid_metadata) {
2981 /* Infer frame number. Batch metadata contains frame number of the
2982 * last frame */
2983 if (urgent_frame_number_valid) {
2984 if (i < urgentFrameNumDiff) {
2985 urgent_frame_number =
2986 first_urgent_frame_number + i;
2987 LOGD("inferred urgent frame_number: %d",
2988 urgent_frame_number);
2989 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2990 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2991 } else {
2992 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2993 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2994 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2995 }
2996 }
2997
2998 /* Infer frame number. Batch metadata contains frame number of the
2999 * last frame */
3000 if (frame_number_valid) {
3001 if (i < frameNumDiff) {
3002 frame_number = first_frame_number + i;
3003 LOGD("inferred frame_number: %d", frame_number);
3004 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3005 CAM_INTF_META_FRAME_NUMBER, frame_number);
3006 } else {
3007 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3008 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3009 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3010 }
3011 }
3012
3013 if (last_frame_capture_time) {
3014 //Infer timestamp
3015 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003016 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003017 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003018 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003019 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3020 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3021 LOGD("batch capture_time: %lld, capture_time: %lld",
3022 last_frame_capture_time, capture_time);
3023 }
3024 }
3025 pthread_mutex_lock(&mMutex);
3026 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003027 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003028 (i == 0) /* first metadata in the batch metadata */,
3029 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003030 pthread_mutex_unlock(&mMutex);
3031 }
3032
3033 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003034 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003035 mMetadataChannel->bufDone(metadata_buf);
3036 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003037 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003038 }
3039}
3040
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003041void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3042 camera3_error_msg_code_t errorCode)
3043{
3044 camera3_notify_msg_t notify_msg;
3045 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3046 notify_msg.type = CAMERA3_MSG_ERROR;
3047 notify_msg.message.error.error_code = errorCode;
3048 notify_msg.message.error.error_stream = NULL;
3049 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003050 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003051
3052 return;
3053}
Thierry Strudel3d639192016-09-09 11:52:26 -07003054/*===========================================================================
3055 * FUNCTION : handleMetadataWithLock
3056 *
3057 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3058 *
3059 * PARAMETERS : @metadata_buf: metadata buffer
3060 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3061 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003062 * @firstMetadataInBatch: Boolean to indicate whether this is the
3063 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003064 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3065 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 *
3067 * RETURN :
3068 *
3069 *==========================================================================*/
3070void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003071 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003072 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003073{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003074 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003075 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3076 //during flush do not send metadata from this thread
3077 LOGD("not sending metadata during flush or when mState is error");
3078 if (free_and_bufdone_meta_buf) {
3079 mMetadataChannel->bufDone(metadata_buf);
3080 free(metadata_buf);
3081 }
3082 return;
3083 }
3084
3085 //not in flush
3086 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3087 int32_t frame_number_valid, urgent_frame_number_valid;
3088 uint32_t frame_number, urgent_frame_number;
3089 int64_t capture_time;
3090 nsecs_t currentSysTime;
3091
3092 int32_t *p_frame_number_valid =
3093 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3094 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3095 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3096 int32_t *p_urgent_frame_number_valid =
3097 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3098 uint32_t *p_urgent_frame_number =
3099 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3100 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3101 metadata) {
3102 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3103 *p_frame_number_valid, *p_frame_number);
3104 }
3105
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003106 camera_metadata_t *resultMetadata = nullptr;
3107
Thierry Strudel3d639192016-09-09 11:52:26 -07003108 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3109 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3110 LOGE("Invalid metadata");
3111 if (free_and_bufdone_meta_buf) {
3112 mMetadataChannel->bufDone(metadata_buf);
3113 free(metadata_buf);
3114 }
3115 goto done_metadata;
3116 }
3117 frame_number_valid = *p_frame_number_valid;
3118 frame_number = *p_frame_number;
3119 capture_time = *p_capture_time;
3120 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3121 urgent_frame_number = *p_urgent_frame_number;
3122 currentSysTime = systemTime(CLOCK_MONOTONIC);
3123
3124 // Detect if buffers from any requests are overdue
3125 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003126 int64_t timeout;
3127 {
3128 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3129 // If there is a pending HDR+ request, the following requests may be blocked until the
3130 // HDR+ request is done. So allow a longer timeout.
3131 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3132 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3133 }
3134
3135 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003136 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003137 assert(missed.stream->priv);
3138 if (missed.stream->priv) {
3139 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3140 assert(ch->mStreams[0]);
3141 if (ch->mStreams[0]) {
3142 LOGE("Cancel missing frame = %d, buffer = %p,"
3143 "stream type = %d, stream format = %d",
3144 req.frame_number, missed.buffer,
3145 ch->mStreams[0]->getMyType(), missed.stream->format);
3146 ch->timeoutFrame(req.frame_number);
3147 }
3148 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 }
3150 }
3151 }
3152 //Partial result on process_capture_result for timestamp
3153 if (urgent_frame_number_valid) {
3154 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3155 urgent_frame_number, capture_time);
3156
3157 //Recieved an urgent Frame Number, handle it
3158 //using partial results
3159 for (pendingRequestIterator i =
3160 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3161 LOGD("Iterator Frame = %d urgent frame = %d",
3162 i->frame_number, urgent_frame_number);
3163
3164 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3165 (i->partial_result_cnt == 0)) {
3166 LOGE("Error: HAL missed urgent metadata for frame number %d",
3167 i->frame_number);
3168 }
3169
3170 if (i->frame_number == urgent_frame_number &&
3171 i->bUrgentReceived == 0) {
3172
3173 camera3_capture_result_t result;
3174 memset(&result, 0, sizeof(camera3_capture_result_t));
3175
3176 i->partial_result_cnt++;
3177 i->bUrgentReceived = 1;
3178 // Extract 3A metadata
3179 result.result =
3180 translateCbUrgentMetadataToResultMetadata(metadata);
3181 // Populate metadata result
3182 result.frame_number = urgent_frame_number;
3183 result.num_output_buffers = 0;
3184 result.output_buffers = NULL;
3185 result.partial_result = i->partial_result_cnt;
3186
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003187 if (mHdrPlusClient != nullptr) {
3188 // Notify HDR+ client about the partial metadata.
3189 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3190 result.partial_result == PARTIAL_RESULT_COUNT);
3191 }
3192
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003193 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003194 LOGD("urgent frame_number = %u, capture_time = %lld",
3195 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003196 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3197 // Instant AEC settled for this frame.
3198 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3199 mInstantAECSettledFrameNumber = urgent_frame_number;
3200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003201 free_camera_metadata((camera_metadata_t *)result.result);
3202 break;
3203 }
3204 }
3205 }
3206
3207 if (!frame_number_valid) {
3208 LOGD("Not a valid normal frame number, used as SOF only");
3209 if (free_and_bufdone_meta_buf) {
3210 mMetadataChannel->bufDone(metadata_buf);
3211 free(metadata_buf);
3212 }
3213 goto done_metadata;
3214 }
3215 LOGH("valid frame_number = %u, capture_time = %lld",
3216 frame_number, capture_time);
3217
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003218 // Check whether any stream buffer corresponding to this is dropped or not
3219 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3220 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3221 for (auto & pendingRequest : mPendingRequestsList) {
3222 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3223 mInstantAECSettledFrameNumber)) {
3224 camera3_notify_msg_t notify_msg = {};
3225 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003226 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003227 QCamera3ProcessingChannel *channel =
3228 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003229 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003230 if (p_cam_frame_drop) {
3231 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003232 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003233 // Got the stream ID for drop frame.
3234 dropFrame = true;
3235 break;
3236 }
3237 }
3238 } else {
3239 // This is instant AEC case.
3240 // For instant AEC drop the stream untill AEC is settled.
3241 dropFrame = true;
3242 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003243
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003244 if (dropFrame) {
3245 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3246 if (p_cam_frame_drop) {
3247 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003248 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003249 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003250 } else {
3251 // For instant AEC, inform frame drop and frame number
3252 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3253 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003254 pendingRequest.frame_number, streamID,
3255 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003256 }
3257 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003258 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003259 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003260 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003261 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003262 if (p_cam_frame_drop) {
3263 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003264 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003265 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003266 } else {
3267 // For instant AEC, inform frame drop and frame number
3268 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3269 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003270 pendingRequest.frame_number, streamID,
3271 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003272 }
3273 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003274 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003275 PendingFrameDrop.stream_ID = streamID;
3276 // Add the Frame drop info to mPendingFrameDropList
3277 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003278 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003279 }
3280 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003281 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003282
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003283 for (auto & pendingRequest : mPendingRequestsList) {
3284 // Find the pending request with the frame number.
3285 if (pendingRequest.frame_number == frame_number) {
3286 // Update the sensor timestamp.
3287 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003288
Thierry Strudel3d639192016-09-09 11:52:26 -07003289
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003290 /* Set the timestamp in display metadata so that clients aware of
3291 private_handle such as VT can use this un-modified timestamps.
3292 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003293 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003294
Thierry Strudel3d639192016-09-09 11:52:26 -07003295 // Find channel requiring metadata, meaning internal offline postprocess
3296 // is needed.
3297 //TODO: for now, we don't support two streams requiring metadata at the same time.
3298 // (because we are not making copies, and metadata buffer is not reference counted.
3299 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003300 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3301 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003302 if (iter->need_metadata) {
3303 internalPproc = true;
3304 QCamera3ProcessingChannel *channel =
3305 (QCamera3ProcessingChannel *)iter->stream->priv;
3306 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003307 if(p_is_metabuf_queued != NULL) {
3308 *p_is_metabuf_queued = true;
3309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003310 break;
3311 }
3312 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003313 for (auto itr = pendingRequest.internalRequestList.begin();
3314 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003315 if (itr->need_metadata) {
3316 internalPproc = true;
3317 QCamera3ProcessingChannel *channel =
3318 (QCamera3ProcessingChannel *)itr->stream->priv;
3319 channel->queueReprocMetadata(metadata_buf);
3320 break;
3321 }
3322 }
3323
Thierry Strudel54dc9782017-02-15 12:12:10 -08003324 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003325 resultMetadata = translateFromHalMetadata(metadata,
3326 pendingRequest.timestamp, pendingRequest.request_id,
3327 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3328 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003329 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003330 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003331 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003332 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003333 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003334 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003335
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003336 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003337
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003338 if (pendingRequest.blob_request) {
3339 //Dump tuning metadata if enabled and available
3340 char prop[PROPERTY_VALUE_MAX];
3341 memset(prop, 0, sizeof(prop));
3342 property_get("persist.camera.dumpmetadata", prop, "0");
3343 int32_t enabled = atoi(prop);
3344 if (enabled && metadata->is_tuning_params_valid) {
3345 dumpMetadataToFile(metadata->tuning_params,
3346 mMetaFrameCount,
3347 enabled,
3348 "Snapshot",
3349 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003350 }
3351 }
3352
3353 if (!internalPproc) {
3354 LOGD("couldn't find need_metadata for this metadata");
3355 // Return metadata buffer
3356 if (free_and_bufdone_meta_buf) {
3357 mMetadataChannel->bufDone(metadata_buf);
3358 free(metadata_buf);
3359 }
3360 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003361
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003362 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003363 }
3364 }
3365
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003366 // Try to send out shutter callbacks and capture results.
3367 handlePendingResultsWithLock(frame_number, resultMetadata);
3368 return;
3369
Thierry Strudel3d639192016-09-09 11:52:26 -07003370done_metadata:
3371 for (pendingRequestIterator i = mPendingRequestsList.begin();
3372 i != mPendingRequestsList.end() ;i++) {
3373 i->pipeline_depth++;
3374 }
3375 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3376 unblockRequestIfNecessary();
3377}
3378
3379/*===========================================================================
3380 * FUNCTION : hdrPlusPerfLock
3381 *
3382 * DESCRIPTION: perf lock for HDR+ using custom intent
3383 *
3384 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3385 *
3386 * RETURN : None
3387 *
3388 *==========================================================================*/
3389void QCamera3HardwareInterface::hdrPlusPerfLock(
3390 mm_camera_super_buf_t *metadata_buf)
3391{
3392 if (NULL == metadata_buf) {
3393 LOGE("metadata_buf is NULL");
3394 return;
3395 }
3396 metadata_buffer_t *metadata =
3397 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3398 int32_t *p_frame_number_valid =
3399 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3400 uint32_t *p_frame_number =
3401 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3402
3403 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3404 LOGE("%s: Invalid metadata", __func__);
3405 return;
3406 }
3407
3408 //acquire perf lock for 5 sec after the last HDR frame is captured
3409 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3410 if ((p_frame_number != NULL) &&
3411 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003412 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003413 }
3414 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003415}
3416
3417/*===========================================================================
3418 * FUNCTION : handleInputBufferWithLock
3419 *
3420 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3421 *
3422 * PARAMETERS : @frame_number: frame number of the input buffer
3423 *
3424 * RETURN :
3425 *
3426 *==========================================================================*/
3427void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3428{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003429 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003430 pendingRequestIterator i = mPendingRequestsList.begin();
3431 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3432 i++;
3433 }
3434 if (i != mPendingRequestsList.end() && i->input_buffer) {
3435 //found the right request
3436 if (!i->shutter_notified) {
3437 CameraMetadata settings;
3438 camera3_notify_msg_t notify_msg;
3439 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3440 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3441 if(i->settings) {
3442 settings = i->settings;
3443 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3444 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3445 } else {
3446 LOGE("No timestamp in input settings! Using current one.");
3447 }
3448 } else {
3449 LOGE("Input settings missing!");
3450 }
3451
3452 notify_msg.type = CAMERA3_MSG_SHUTTER;
3453 notify_msg.message.shutter.frame_number = frame_number;
3454 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003455 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003456 i->shutter_notified = true;
3457 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3458 i->frame_number, notify_msg.message.shutter.timestamp);
3459 }
3460
3461 if (i->input_buffer->release_fence != -1) {
3462 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3463 close(i->input_buffer->release_fence);
3464 if (rc != OK) {
3465 LOGE("input buffer sync wait failed %d", rc);
3466 }
3467 }
3468
3469 camera3_capture_result result;
3470 memset(&result, 0, sizeof(camera3_capture_result));
3471 result.frame_number = frame_number;
3472 result.result = i->settings;
3473 result.input_buffer = i->input_buffer;
3474 result.partial_result = PARTIAL_RESULT_COUNT;
3475
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003476 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003477 LOGD("Input request metadata and input buffer frame_number = %u",
3478 i->frame_number);
3479 i = erasePendingRequest(i);
3480 } else {
3481 LOGE("Could not find input request for frame number %d", frame_number);
3482 }
3483}
3484
3485/*===========================================================================
3486 * FUNCTION : handleBufferWithLock
3487 *
3488 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3489 *
3490 * PARAMETERS : @buffer: image buffer for the callback
3491 * @frame_number: frame number of the image buffer
3492 *
3493 * RETURN :
3494 *
3495 *==========================================================================*/
3496void QCamera3HardwareInterface::handleBufferWithLock(
3497 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3498{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003499 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003500
3501 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3502 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3503 }
3504
Thierry Strudel3d639192016-09-09 11:52:26 -07003505 /* Nothing to be done during error state */
3506 if ((ERROR == mState) || (DEINIT == mState)) {
3507 return;
3508 }
3509 if (mFlushPerf) {
3510 handleBuffersDuringFlushLock(buffer);
3511 return;
3512 }
3513 //not in flush
3514 // If the frame number doesn't exist in the pending request list,
3515 // directly send the buffer to the frameworks, and update pending buffers map
3516 // Otherwise, book-keep the buffer.
3517 pendingRequestIterator i = mPendingRequestsList.begin();
3518 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3519 i++;
3520 }
3521 if (i == mPendingRequestsList.end()) {
3522 // Verify all pending requests frame_numbers are greater
3523 for (pendingRequestIterator j = mPendingRequestsList.begin();
3524 j != mPendingRequestsList.end(); j++) {
3525 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3526 LOGW("Error: pending live frame number %d is smaller than %d",
3527 j->frame_number, frame_number);
3528 }
3529 }
3530 camera3_capture_result_t result;
3531 memset(&result, 0, sizeof(camera3_capture_result_t));
3532 result.result = NULL;
3533 result.frame_number = frame_number;
3534 result.num_output_buffers = 1;
3535 result.partial_result = 0;
3536 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3537 m != mPendingFrameDropList.end(); m++) {
3538 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3539 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3540 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3541 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3542 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3543 frame_number, streamID);
3544 m = mPendingFrameDropList.erase(m);
3545 break;
3546 }
3547 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003548 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003549 result.output_buffers = buffer;
3550 LOGH("result frame_number = %d, buffer = %p",
3551 frame_number, buffer->buffer);
3552
3553 mPendingBuffersMap.removeBuf(buffer->buffer);
3554
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003555 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003556 } else {
3557 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003558 if (i->input_buffer->release_fence != -1) {
3559 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3560 close(i->input_buffer->release_fence);
3561 if (rc != OK) {
3562 LOGE("input buffer sync wait failed %d", rc);
3563 }
3564 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003566
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003567 // Put buffer into the pending request
3568 for (auto &requestedBuffer : i->buffers) {
3569 if (requestedBuffer.stream == buffer->stream) {
3570 if (requestedBuffer.buffer != nullptr) {
3571 LOGE("Error: buffer is already set");
3572 } else {
3573 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3574 sizeof(camera3_stream_buffer_t));
3575 *(requestedBuffer.buffer) = *buffer;
3576 LOGH("cache buffer %p at result frame_number %u",
3577 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003578 }
3579 }
3580 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003581
3582 if (i->input_buffer) {
3583 // For a reprocessing request, try to send out shutter callback and result metadata.
3584 handlePendingResultsWithLock(frame_number, nullptr);
3585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003587
3588 if (mPreviewStarted == false) {
3589 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3590 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3591 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3592 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3593 mPreviewStarted = true;
3594
3595 // Set power hint for preview
3596 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3597 }
3598 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003599}
3600
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003601void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3602 const camera_metadata_t *resultMetadata)
3603{
3604 // Find the pending request for this result metadata.
3605 auto requestIter = mPendingRequestsList.begin();
3606 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3607 requestIter++;
3608 }
3609
3610 if (requestIter == mPendingRequestsList.end()) {
3611 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3612 return;
3613 }
3614
3615 // Update the result metadata
3616 requestIter->resultMetadata = resultMetadata;
3617
3618 // Check what type of request this is.
3619 bool liveRequest = false;
3620 if (requestIter->hdrplus) {
3621 // HDR+ request doesn't have partial results.
3622 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3623 } else if (requestIter->input_buffer != nullptr) {
3624 // Reprocessing request result is the same as settings.
3625 requestIter->resultMetadata = requestIter->settings;
3626 // Reprocessing request doesn't have partial results.
3627 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3628 } else {
3629 liveRequest = true;
3630 requestIter->partial_result_cnt++;
3631 mPendingLiveRequest--;
3632
3633 // For a live request, send the metadata to HDR+ client.
3634 if (mHdrPlusClient != nullptr) {
3635 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3636 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3637 }
3638 }
3639
3640 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3641 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3642 bool readyToSend = true;
3643
3644 // Iterate through the pending requests to send out shutter callbacks and results that are
3645 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3646 // live requests that don't have result metadata yet.
3647 auto iter = mPendingRequestsList.begin();
3648 while (iter != mPendingRequestsList.end()) {
3649 // Check if current pending request is ready. If it's not ready, the following pending
3650 // requests are also not ready.
3651 if (readyToSend && iter->resultMetadata == nullptr) {
3652 readyToSend = false;
3653 }
3654
3655 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3656
3657 std::vector<camera3_stream_buffer_t> outputBuffers;
3658
3659 camera3_capture_result_t result = {};
3660 result.frame_number = iter->frame_number;
3661 result.result = iter->resultMetadata;
3662 result.partial_result = iter->partial_result_cnt;
3663
3664 // If this pending buffer has result metadata, we may be able to send out shutter callback
3665 // and result metadata.
3666 if (iter->resultMetadata != nullptr) {
3667 if (!readyToSend) {
3668 // If any of the previous pending request is not ready, this pending request is
3669 // also not ready to send in order to keep shutter callbacks and result metadata
3670 // in order.
3671 iter++;
3672 continue;
3673 }
3674
3675 // Invoke shutter callback if not yet.
3676 if (!iter->shutter_notified) {
3677 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3678
3679 // Find the timestamp in HDR+ result metadata
3680 camera_metadata_ro_entry_t entry;
3681 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3682 ANDROID_SENSOR_TIMESTAMP, &entry);
3683 if (res != OK) {
3684 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3685 __FUNCTION__, iter->frame_number, strerror(-res), res);
3686 } else {
3687 timestamp = entry.data.i64[0];
3688 }
3689
3690 camera3_notify_msg_t notify_msg = {};
3691 notify_msg.type = CAMERA3_MSG_SHUTTER;
3692 notify_msg.message.shutter.frame_number = iter->frame_number;
3693 notify_msg.message.shutter.timestamp = timestamp;
3694 orchestrateNotify(&notify_msg);
3695 iter->shutter_notified = true;
3696 }
3697
3698 result.input_buffer = iter->input_buffer;
3699
3700 // Prepare output buffer array
3701 for (auto bufferInfoIter = iter->buffers.begin();
3702 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3703 if (bufferInfoIter->buffer != nullptr) {
3704
3705 QCamera3Channel *channel =
3706 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3707 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3708
3709 // Check if this buffer is a dropped frame.
3710 auto frameDropIter = mPendingFrameDropList.begin();
3711 while (frameDropIter != mPendingFrameDropList.end()) {
3712 if((frameDropIter->stream_ID == streamID) &&
3713 (frameDropIter->frame_number == frameNumber)) {
3714 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3715 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3716 streamID);
3717 mPendingFrameDropList.erase(frameDropIter);
3718 break;
3719 } else {
3720 frameDropIter++;
3721 }
3722 }
3723
3724 // Check buffer error status
3725 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3726 bufferInfoIter->buffer->buffer);
3727 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3728
3729 outputBuffers.push_back(*(bufferInfoIter->buffer));
3730 free(bufferInfoIter->buffer);
3731 bufferInfoIter->buffer = NULL;
3732 }
3733 }
3734
3735 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3736 result.num_output_buffers = outputBuffers.size();
3737 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3738 // If the result metadata belongs to a live request, notify errors for previous pending
3739 // live requests.
3740 mPendingLiveRequest--;
3741
3742 CameraMetadata dummyMetadata;
3743 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3744 result.result = dummyMetadata.release();
3745
3746 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3747 } else {
3748 iter++;
3749 continue;
3750 }
3751
3752 orchestrateResult(&result);
3753
3754 // For reprocessing, result metadata is the same as settings so do not free it here to
3755 // avoid double free.
3756 if (result.result != iter->settings) {
3757 free_camera_metadata((camera_metadata_t *)result.result);
3758 }
3759 iter->resultMetadata = nullptr;
3760 iter = erasePendingRequest(iter);
3761 }
3762
3763 if (liveRequest) {
3764 for (auto &iter : mPendingRequestsList) {
3765 // Increment pipeline depth for the following pending requests.
3766 if (iter.frame_number > frameNumber) {
3767 iter.pipeline_depth++;
3768 }
3769 }
3770 }
3771
3772 unblockRequestIfNecessary();
3773}
3774
Thierry Strudel3d639192016-09-09 11:52:26 -07003775/*===========================================================================
3776 * FUNCTION : unblockRequestIfNecessary
3777 *
3778 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3779 * that mMutex is held when this function is called.
3780 *
3781 * PARAMETERS :
3782 *
3783 * RETURN :
3784 *
3785 *==========================================================================*/
3786void QCamera3HardwareInterface::unblockRequestIfNecessary()
3787{
3788 // Unblock process_capture_request
3789 pthread_cond_signal(&mRequestCond);
3790}
3791
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003792/*===========================================================================
3793 * FUNCTION : isHdrSnapshotRequest
3794 *
3795 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3796 *
3797 * PARAMETERS : camera3 request structure
3798 *
3799 * RETURN : boolean decision variable
3800 *
3801 *==========================================================================*/
3802bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3803{
3804 if (request == NULL) {
3805 LOGE("Invalid request handle");
3806 assert(0);
3807 return false;
3808 }
3809
3810 if (!mForceHdrSnapshot) {
3811 CameraMetadata frame_settings;
3812 frame_settings = request->settings;
3813
3814 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3815 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3816 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3817 return false;
3818 }
3819 } else {
3820 return false;
3821 }
3822
3823 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3824 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3825 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3826 return false;
3827 }
3828 } else {
3829 return false;
3830 }
3831 }
3832
3833 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3834 if (request->output_buffers[i].stream->format
3835 == HAL_PIXEL_FORMAT_BLOB) {
3836 return true;
3837 }
3838 }
3839
3840 return false;
3841}
3842/*===========================================================================
3843 * FUNCTION : orchestrateRequest
3844 *
3845 * DESCRIPTION: Orchestrates a capture request from camera service
3846 *
3847 * PARAMETERS :
3848 * @request : request from framework to process
3849 *
3850 * RETURN : Error status codes
3851 *
3852 *==========================================================================*/
3853int32_t QCamera3HardwareInterface::orchestrateRequest(
3854 camera3_capture_request_t *request)
3855{
3856
3857 uint32_t originalFrameNumber = request->frame_number;
3858 uint32_t originalOutputCount = request->num_output_buffers;
3859 const camera_metadata_t *original_settings = request->settings;
3860 List<InternalRequest> internallyRequestedStreams;
3861 List<InternalRequest> emptyInternalList;
3862
3863 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3864 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3865 uint32_t internalFrameNumber;
3866 CameraMetadata modified_meta;
3867
3868
3869 /* Add Blob channel to list of internally requested streams */
3870 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3871 if (request->output_buffers[i].stream->format
3872 == HAL_PIXEL_FORMAT_BLOB) {
3873 InternalRequest streamRequested;
3874 streamRequested.meteringOnly = 1;
3875 streamRequested.need_metadata = 0;
3876 streamRequested.stream = request->output_buffers[i].stream;
3877 internallyRequestedStreams.push_back(streamRequested);
3878 }
3879 }
3880 request->num_output_buffers = 0;
3881 auto itr = internallyRequestedStreams.begin();
3882
3883 /* Modify setting to set compensation */
3884 modified_meta = request->settings;
3885 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3886 uint8_t aeLock = 1;
3887 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3888 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3889 camera_metadata_t *modified_settings = modified_meta.release();
3890 request->settings = modified_settings;
3891
3892 /* Capture Settling & -2x frame */
3893 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3894 request->frame_number = internalFrameNumber;
3895 processCaptureRequest(request, internallyRequestedStreams);
3896
3897 request->num_output_buffers = originalOutputCount;
3898 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3899 request->frame_number = internalFrameNumber;
3900 processCaptureRequest(request, emptyInternalList);
3901 request->num_output_buffers = 0;
3902
3903 modified_meta = modified_settings;
3904 expCompensation = 0;
3905 aeLock = 1;
3906 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3907 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3908 modified_settings = modified_meta.release();
3909 request->settings = modified_settings;
3910
3911 /* Capture Settling & 0X frame */
3912
3913 itr = internallyRequestedStreams.begin();
3914 if (itr == internallyRequestedStreams.end()) {
3915 LOGE("Error Internally Requested Stream list is empty");
3916 assert(0);
3917 } else {
3918 itr->need_metadata = 0;
3919 itr->meteringOnly = 1;
3920 }
3921
3922 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3923 request->frame_number = internalFrameNumber;
3924 processCaptureRequest(request, internallyRequestedStreams);
3925
3926 itr = internallyRequestedStreams.begin();
3927 if (itr == internallyRequestedStreams.end()) {
3928 ALOGE("Error Internally Requested Stream list is empty");
3929 assert(0);
3930 } else {
3931 itr->need_metadata = 1;
3932 itr->meteringOnly = 0;
3933 }
3934
3935 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3936 request->frame_number = internalFrameNumber;
3937 processCaptureRequest(request, internallyRequestedStreams);
3938
3939 /* Capture 2X frame*/
3940 modified_meta = modified_settings;
3941 expCompensation = GB_HDR_2X_STEP_EV;
3942 aeLock = 1;
3943 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3944 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3945 modified_settings = modified_meta.release();
3946 request->settings = modified_settings;
3947
3948 itr = internallyRequestedStreams.begin();
3949 if (itr == internallyRequestedStreams.end()) {
3950 ALOGE("Error Internally Requested Stream list is empty");
3951 assert(0);
3952 } else {
3953 itr->need_metadata = 0;
3954 itr->meteringOnly = 1;
3955 }
3956 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3957 request->frame_number = internalFrameNumber;
3958 processCaptureRequest(request, internallyRequestedStreams);
3959
3960 itr = internallyRequestedStreams.begin();
3961 if (itr == internallyRequestedStreams.end()) {
3962 ALOGE("Error Internally Requested Stream list is empty");
3963 assert(0);
3964 } else {
3965 itr->need_metadata = 1;
3966 itr->meteringOnly = 0;
3967 }
3968
3969 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3970 request->frame_number = internalFrameNumber;
3971 processCaptureRequest(request, internallyRequestedStreams);
3972
3973
3974 /* Capture 2X on original streaming config*/
3975 internallyRequestedStreams.clear();
3976
3977 /* Restore original settings pointer */
3978 request->settings = original_settings;
3979 } else {
3980 uint32_t internalFrameNumber;
3981 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3982 request->frame_number = internalFrameNumber;
3983 return processCaptureRequest(request, internallyRequestedStreams);
3984 }
3985
3986 return NO_ERROR;
3987}
3988
3989/*===========================================================================
3990 * FUNCTION : orchestrateResult
3991 *
3992 * DESCRIPTION: Orchestrates a capture result to camera service
3993 *
3994 * PARAMETERS :
3995 * @request : request from framework to process
3996 *
3997 * RETURN :
3998 *
3999 *==========================================================================*/
4000void QCamera3HardwareInterface::orchestrateResult(
4001 camera3_capture_result_t *result)
4002{
4003 uint32_t frameworkFrameNumber;
4004 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4005 frameworkFrameNumber);
4006 if (rc != NO_ERROR) {
4007 LOGE("Cannot find translated frameworkFrameNumber");
4008 assert(0);
4009 } else {
4010 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004011 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004012 } else {
4013 result->frame_number = frameworkFrameNumber;
4014 mCallbackOps->process_capture_result(mCallbackOps, result);
4015 }
4016 }
4017}
4018
4019/*===========================================================================
4020 * FUNCTION : orchestrateNotify
4021 *
4022 * DESCRIPTION: Orchestrates a notify to camera service
4023 *
4024 * PARAMETERS :
4025 * @request : request from framework to process
4026 *
4027 * RETURN :
4028 *
4029 *==========================================================================*/
4030void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4031{
4032 uint32_t frameworkFrameNumber;
4033 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
4034 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
4035 frameworkFrameNumber);
4036 if (rc != NO_ERROR) {
4037 LOGE("Cannot find translated frameworkFrameNumber");
4038 assert(0);
4039 } else {
4040 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004041 LOGD("Internal Request drop the notifyCb");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004042 } else {
4043 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4044 mCallbackOps->notify(mCallbackOps, notify_msg);
4045 }
4046 }
4047}
4048
4049/*===========================================================================
4050 * FUNCTION : FrameNumberRegistry
4051 *
4052 * DESCRIPTION: Constructor
4053 *
4054 * PARAMETERS :
4055 *
4056 * RETURN :
4057 *
4058 *==========================================================================*/
4059FrameNumberRegistry::FrameNumberRegistry()
4060{
4061 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4062}
4063
4064/*===========================================================================
4065 * FUNCTION : ~FrameNumberRegistry
4066 *
4067 * DESCRIPTION: Destructor
4068 *
4069 * PARAMETERS :
4070 *
4071 * RETURN :
4072 *
4073 *==========================================================================*/
4074FrameNumberRegistry::~FrameNumberRegistry()
4075{
4076}
4077
4078/*===========================================================================
4079 * FUNCTION : PurgeOldEntriesLocked
4080 *
4081 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4082 *
4083 * PARAMETERS :
4084 *
4085 * RETURN : NONE
4086 *
4087 *==========================================================================*/
4088void FrameNumberRegistry::purgeOldEntriesLocked()
4089{
4090 while (_register.begin() != _register.end()) {
4091 auto itr = _register.begin();
4092 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4093 _register.erase(itr);
4094 } else {
4095 return;
4096 }
4097 }
4098}
4099
4100/*===========================================================================
4101 * FUNCTION : allocStoreInternalFrameNumber
4102 *
4103 * DESCRIPTION: Method to note down a framework request and associate a new
4104 * internal request number against it
4105 *
4106 * PARAMETERS :
4107 * @fFrameNumber: Identifier given by framework
4108 * @internalFN : Output parameter which will have the newly generated internal
4109 * entry
4110 *
4111 * RETURN : Error code
4112 *
4113 *==========================================================================*/
4114int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4115 uint32_t &internalFrameNumber)
4116{
4117 Mutex::Autolock lock(mRegistryLock);
4118 internalFrameNumber = _nextFreeInternalNumber++;
4119 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4120 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4121 purgeOldEntriesLocked();
4122 return NO_ERROR;
4123}
4124
4125/*===========================================================================
4126 * FUNCTION : generateStoreInternalFrameNumber
4127 *
4128 * DESCRIPTION: Method to associate a new internal request number independent
4129 * of any associate with framework requests
4130 *
4131 * PARAMETERS :
4132 * @internalFrame#: Output parameter which will have the newly generated internal
4133 *
4134 *
4135 * RETURN : Error code
4136 *
4137 *==========================================================================*/
4138int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4139{
4140 Mutex::Autolock lock(mRegistryLock);
4141 internalFrameNumber = _nextFreeInternalNumber++;
4142 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4143 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4144 purgeOldEntriesLocked();
4145 return NO_ERROR;
4146}
4147
4148/*===========================================================================
4149 * FUNCTION : getFrameworkFrameNumber
4150 *
4151 * DESCRIPTION: Method to query the framework framenumber given an internal #
4152 *
4153 * PARAMETERS :
4154 * @internalFrame#: Internal reference
4155 * @frameworkframenumber: Output parameter holding framework frame entry
4156 *
4157 * RETURN : Error code
4158 *
4159 *==========================================================================*/
4160int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4161 uint32_t &frameworkFrameNumber)
4162{
4163 Mutex::Autolock lock(mRegistryLock);
4164 auto itr = _register.find(internalFrameNumber);
4165 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004166 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004167 return -ENOENT;
4168 }
4169
4170 frameworkFrameNumber = itr->second;
4171 purgeOldEntriesLocked();
4172 return NO_ERROR;
4173}
Thierry Strudel3d639192016-09-09 11:52:26 -07004174
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004175status_t QCamera3HardwareInterface::fillPbStreamConfig(
4176 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4177 QCamera3Channel *channel, uint32_t streamIndex) {
4178 if (config == nullptr) {
4179 LOGE("%s: config is null", __FUNCTION__);
4180 return BAD_VALUE;
4181 }
4182
4183 if (channel == nullptr) {
4184 LOGE("%s: channel is null", __FUNCTION__);
4185 return BAD_VALUE;
4186 }
4187
4188 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4189 if (stream == nullptr) {
4190 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4191 return NAME_NOT_FOUND;
4192 }
4193
4194 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4195 if (streamInfo == nullptr) {
4196 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4197 return NAME_NOT_FOUND;
4198 }
4199
4200 config->id = pbStreamId;
4201 config->image.width = streamInfo->dim.width;
4202 config->image.height = streamInfo->dim.height;
4203 config->image.padding = 0;
4204 config->image.format = pbStreamFormat;
4205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004206 uint32_t totalPlaneSize = 0;
4207
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004208 // Fill plane information.
4209 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4210 pbcamera::PlaneConfiguration plane;
4211 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4212 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4213 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004214
4215 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004216 }
4217
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004219 return OK;
4220}
4221
Thierry Strudel3d639192016-09-09 11:52:26 -07004222/*===========================================================================
4223 * FUNCTION : processCaptureRequest
4224 *
4225 * DESCRIPTION: process a capture request from camera service
4226 *
4227 * PARAMETERS :
4228 * @request : request from framework to process
4229 *
4230 * RETURN :
4231 *
4232 *==========================================================================*/
4233int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004234 camera3_capture_request_t *request,
4235 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004236{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004237 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004238 int rc = NO_ERROR;
4239 int32_t request_id;
4240 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004241 bool isVidBufRequested = false;
4242 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004243 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004244
4245 pthread_mutex_lock(&mMutex);
4246
4247 // Validate current state
4248 switch (mState) {
4249 case CONFIGURED:
4250 case STARTED:
4251 /* valid state */
4252 break;
4253
4254 case ERROR:
4255 pthread_mutex_unlock(&mMutex);
4256 handleCameraDeviceError();
4257 return -ENODEV;
4258
4259 default:
4260 LOGE("Invalid state %d", mState);
4261 pthread_mutex_unlock(&mMutex);
4262 return -ENODEV;
4263 }
4264
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004265 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004266 if (rc != NO_ERROR) {
4267 LOGE("incoming request is not valid");
4268 pthread_mutex_unlock(&mMutex);
4269 return rc;
4270 }
4271
4272 meta = request->settings;
4273
4274 // For first capture request, send capture intent, and
4275 // stream on all streams
4276 if (mState == CONFIGURED) {
4277 // send an unconfigure to the backend so that the isp
4278 // resources are deallocated
4279 if (!mFirstConfiguration) {
4280 cam_stream_size_info_t stream_config_info;
4281 int32_t hal_version = CAM_HAL_V3;
4282 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4283 stream_config_info.buffer_info.min_buffers =
4284 MIN_INFLIGHT_REQUESTS;
4285 stream_config_info.buffer_info.max_buffers =
4286 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4287 clear_metadata_buffer(mParameters);
4288 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4289 CAM_INTF_PARM_HAL_VERSION, hal_version);
4290 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4291 CAM_INTF_META_STREAM_INFO, stream_config_info);
4292 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4293 mParameters);
4294 if (rc < 0) {
4295 LOGE("set_parms for unconfigure failed");
4296 pthread_mutex_unlock(&mMutex);
4297 return rc;
4298 }
4299 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004300 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004301 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004302 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004303 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004304 property_get("persist.camera.is_type", is_type_value, "4");
4305 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4306 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4307 property_get("persist.camera.is_type_preview", is_type_value, "4");
4308 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4309 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004310
4311 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4312 int32_t hal_version = CAM_HAL_V3;
4313 uint8_t captureIntent =
4314 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4315 mCaptureIntent = captureIntent;
4316 clear_metadata_buffer(mParameters);
4317 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4318 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4319 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004320 if (mFirstConfiguration) {
4321 // configure instant AEC
4322 // Instant AEC is a session based parameter and it is needed only
4323 // once per complete session after open camera.
4324 // i.e. This is set only once for the first capture request, after open camera.
4325 setInstantAEC(meta);
4326 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004327 uint8_t fwkVideoStabMode=0;
4328 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4329 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4330 }
4331
4332 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4333 // turn it on for video/preview
4334 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4335 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004336 int32_t vsMode;
4337 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4339 rc = BAD_VALUE;
4340 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004341 LOGD("setEis %d", setEis);
4342 bool eis3Supported = false;
4343 size_t count = IS_TYPE_MAX;
4344 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4345 for (size_t i = 0; i < count; i++) {
4346 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4347 eis3Supported = true;
4348 break;
4349 }
4350 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004351
4352 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004353 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004354 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4355 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004356 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4357 is_type = isTypePreview;
4358 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4359 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4360 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004361 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004362 } else {
4363 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004364 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004365 } else {
4366 is_type = IS_TYPE_NONE;
4367 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004368 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004369 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004370 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4371 }
4372 }
4373
4374 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4375 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4376
Thierry Strudel54dc9782017-02-15 12:12:10 -08004377 //Disable tintless only if the property is set to 0
4378 memset(prop, 0, sizeof(prop));
4379 property_get("persist.camera.tintless.enable", prop, "1");
4380 int32_t tintless_value = atoi(prop);
4381
Thierry Strudel3d639192016-09-09 11:52:26 -07004382 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4383 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004384
Thierry Strudel3d639192016-09-09 11:52:26 -07004385 //Disable CDS for HFR mode or if DIS/EIS is on.
4386 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4387 //after every configure_stream
4388 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4389 (m_bIsVideo)) {
4390 int32_t cds = CAM_CDS_MODE_OFF;
4391 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4392 CAM_INTF_PARM_CDS_MODE, cds))
4393 LOGE("Failed to disable CDS for HFR mode");
4394
4395 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004396
4397 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4398 uint8_t* use_av_timer = NULL;
4399
4400 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004401 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004402 use_av_timer = &m_debug_avtimer;
4403 }
4404 else{
4405 use_av_timer =
4406 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004407 if (use_av_timer) {
4408 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4409 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004410 }
4411
4412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4413 rc = BAD_VALUE;
4414 }
4415 }
4416
Thierry Strudel3d639192016-09-09 11:52:26 -07004417 setMobicat();
4418
4419 /* Set fps and hfr mode while sending meta stream info so that sensor
4420 * can configure appropriate streaming mode */
4421 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004422 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4423 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004424 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4425 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004426 if (rc == NO_ERROR) {
4427 int32_t max_fps =
4428 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004429 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004430 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4431 }
4432 /* For HFR, more buffers are dequeued upfront to improve the performance */
4433 if (mBatchSize) {
4434 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4435 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4436 }
4437 }
4438 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004439 LOGE("setHalFpsRange failed");
4440 }
4441 }
4442 if (meta.exists(ANDROID_CONTROL_MODE)) {
4443 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4444 rc = extractSceneMode(meta, metaMode, mParameters);
4445 if (rc != NO_ERROR) {
4446 LOGE("extractSceneMode failed");
4447 }
4448 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004449 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004450
Thierry Strudel04e026f2016-10-10 11:27:36 -07004451 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4452 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4453 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4454 rc = setVideoHdrMode(mParameters, vhdr);
4455 if (rc != NO_ERROR) {
4456 LOGE("setVideoHDR is failed");
4457 }
4458 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004459
Thierry Strudel3d639192016-09-09 11:52:26 -07004460 //TODO: validate the arguments, HSV scenemode should have only the
4461 //advertised fps ranges
4462
4463 /*set the capture intent, hal version, tintless, stream info,
4464 *and disenable parameters to the backend*/
4465 LOGD("set_parms META_STREAM_INFO " );
4466 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4467 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004468 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004469 mStreamConfigInfo.type[i],
4470 mStreamConfigInfo.stream_sizes[i].width,
4471 mStreamConfigInfo.stream_sizes[i].height,
4472 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004473 mStreamConfigInfo.format[i],
4474 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004475 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004476
Thierry Strudel3d639192016-09-09 11:52:26 -07004477 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4478 mParameters);
4479 if (rc < 0) {
4480 LOGE("set_parms failed for hal version, stream info");
4481 }
4482
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004483 cam_sensor_mode_info_t sensor_mode_info;
4484 memset(&sensor_mode_info, 0, sizeof(sensor_mode_info));
4485 rc = getSensorModeInfo(sensor_mode_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07004486 if (rc != NO_ERROR) {
4487 LOGE("Failed to get sensor output size");
4488 pthread_mutex_unlock(&mMutex);
4489 goto error_exit;
4490 }
4491
4492 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4493 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004494 sensor_mode_info.active_array_size.width,
4495 sensor_mode_info.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004496
4497 /* Set batchmode before initializing channel. Since registerBuffer
4498 * internally initializes some of the channels, better set batchmode
4499 * even before first register buffer */
4500 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4501 it != mStreamInfo.end(); it++) {
4502 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4503 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4504 && mBatchSize) {
4505 rc = channel->setBatchSize(mBatchSize);
4506 //Disable per frame map unmap for HFR/batchmode case
4507 rc |= channel->setPerFrameMapUnmap(false);
4508 if (NO_ERROR != rc) {
4509 LOGE("Channel init failed %d", rc);
4510 pthread_mutex_unlock(&mMutex);
4511 goto error_exit;
4512 }
4513 }
4514 }
4515
4516 //First initialize all streams
4517 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4518 it != mStreamInfo.end(); it++) {
4519 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4520 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4521 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004522 setEis) {
4523 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4524 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4525 is_type = mStreamConfigInfo.is_type[i];
4526 break;
4527 }
4528 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004529 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004530 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004531 rc = channel->initialize(IS_TYPE_NONE);
4532 }
4533 if (NO_ERROR != rc) {
4534 LOGE("Channel initialization failed %d", rc);
4535 pthread_mutex_unlock(&mMutex);
4536 goto error_exit;
4537 }
4538 }
4539
4540 if (mRawDumpChannel) {
4541 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4542 if (rc != NO_ERROR) {
4543 LOGE("Error: Raw Dump Channel init failed");
4544 pthread_mutex_unlock(&mMutex);
4545 goto error_exit;
4546 }
4547 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004548 if (mHdrPlusRawSrcChannel) {
4549 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4550 if (rc != NO_ERROR) {
4551 LOGE("Error: HDR+ RAW Source Channel init failed");
4552 pthread_mutex_unlock(&mMutex);
4553 goto error_exit;
4554 }
4555 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004556 if (mSupportChannel) {
4557 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4558 if (rc < 0) {
4559 LOGE("Support channel initialization failed");
4560 pthread_mutex_unlock(&mMutex);
4561 goto error_exit;
4562 }
4563 }
4564 if (mAnalysisChannel) {
4565 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4566 if (rc < 0) {
4567 LOGE("Analysis channel initialization failed");
4568 pthread_mutex_unlock(&mMutex);
4569 goto error_exit;
4570 }
4571 }
4572 if (mDummyBatchChannel) {
4573 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4574 if (rc < 0) {
4575 LOGE("mDummyBatchChannel setBatchSize failed");
4576 pthread_mutex_unlock(&mMutex);
4577 goto error_exit;
4578 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004579 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004580 if (rc < 0) {
4581 LOGE("mDummyBatchChannel initialization failed");
4582 pthread_mutex_unlock(&mMutex);
4583 goto error_exit;
4584 }
4585 }
4586
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004587 // Configure stream for HDR+.
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004588 if (mHdrPlusClient != nullptr) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004589 rc = configureHdrPlusStreamsLocked(sensor_mode_info);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004590 if (rc != OK) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004591 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004592 pthread_mutex_unlock(&mMutex);
4593 goto error_exit;
4594 }
4595 }
4596
Thierry Strudel3d639192016-09-09 11:52:26 -07004597 // Set bundle info
4598 rc = setBundleInfo();
4599 if (rc < 0) {
4600 LOGE("setBundleInfo failed %d", rc);
4601 pthread_mutex_unlock(&mMutex);
4602 goto error_exit;
4603 }
4604
4605 //update settings from app here
4606 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4607 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4608 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4609 }
4610 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4611 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4612 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4613 }
4614 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4615 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4616 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4617
4618 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4619 (mLinkedCameraId != mCameraId) ) {
4620 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4621 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004622 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004623 goto error_exit;
4624 }
4625 }
4626
4627 // add bundle related cameras
4628 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4629 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004630 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4631 &m_pDualCamCmdPtr->bundle_info;
4632 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004633 if (mIsDeviceLinked)
4634 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4635 else
4636 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4637
4638 pthread_mutex_lock(&gCamLock);
4639
4640 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4641 LOGE("Dualcam: Invalid Session Id ");
4642 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004643 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004644 goto error_exit;
4645 }
4646
4647 if (mIsMainCamera == 1) {
4648 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4649 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004650 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004651 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004652 // related session id should be session id of linked session
4653 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4654 } else {
4655 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4656 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004657 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004658 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004659 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4660 }
4661 pthread_mutex_unlock(&gCamLock);
4662
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004663 rc = mCameraHandle->ops->set_dual_cam_cmd(
4664 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004665 if (rc < 0) {
4666 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004667 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004668 goto error_exit;
4669 }
4670 }
4671
4672 //Then start them.
4673 LOGH("Start META Channel");
4674 rc = mMetadataChannel->start();
4675 if (rc < 0) {
4676 LOGE("META channel start failed");
4677 pthread_mutex_unlock(&mMutex);
4678 goto error_exit;
4679 }
4680
4681 if (mAnalysisChannel) {
4682 rc = mAnalysisChannel->start();
4683 if (rc < 0) {
4684 LOGE("Analysis channel start failed");
4685 mMetadataChannel->stop();
4686 pthread_mutex_unlock(&mMutex);
4687 goto error_exit;
4688 }
4689 }
4690
4691 if (mSupportChannel) {
4692 rc = mSupportChannel->start();
4693 if (rc < 0) {
4694 LOGE("Support channel start failed");
4695 mMetadataChannel->stop();
4696 /* Although support and analysis are mutually exclusive today
4697 adding it in anycase for future proofing */
4698 if (mAnalysisChannel) {
4699 mAnalysisChannel->stop();
4700 }
4701 pthread_mutex_unlock(&mMutex);
4702 goto error_exit;
4703 }
4704 }
4705 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4706 it != mStreamInfo.end(); it++) {
4707 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4708 LOGH("Start Processing Channel mask=%d",
4709 channel->getStreamTypeMask());
4710 rc = channel->start();
4711 if (rc < 0) {
4712 LOGE("channel start failed");
4713 pthread_mutex_unlock(&mMutex);
4714 goto error_exit;
4715 }
4716 }
4717
4718 if (mRawDumpChannel) {
4719 LOGD("Starting raw dump stream");
4720 rc = mRawDumpChannel->start();
4721 if (rc != NO_ERROR) {
4722 LOGE("Error Starting Raw Dump Channel");
4723 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4724 it != mStreamInfo.end(); it++) {
4725 QCamera3Channel *channel =
4726 (QCamera3Channel *)(*it)->stream->priv;
4727 LOGH("Stopping Processing Channel mask=%d",
4728 channel->getStreamTypeMask());
4729 channel->stop();
4730 }
4731 if (mSupportChannel)
4732 mSupportChannel->stop();
4733 if (mAnalysisChannel) {
4734 mAnalysisChannel->stop();
4735 }
4736 mMetadataChannel->stop();
4737 pthread_mutex_unlock(&mMutex);
4738 goto error_exit;
4739 }
4740 }
4741
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004742 if (mHdrPlusRawSrcChannel) {
4743 LOGD("Starting HDR+ RAW stream");
4744 rc = mHdrPlusRawSrcChannel->start();
4745 if (rc != NO_ERROR) {
4746 LOGE("Error Starting HDR+ RAW Channel");
4747 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4748 it != mStreamInfo.end(); it++) {
4749 QCamera3Channel *channel =
4750 (QCamera3Channel *)(*it)->stream->priv;
4751 LOGH("Stopping Processing Channel mask=%d",
4752 channel->getStreamTypeMask());
4753 channel->stop();
4754 }
4755 if (mSupportChannel)
4756 mSupportChannel->stop();
4757 if (mAnalysisChannel) {
4758 mAnalysisChannel->stop();
4759 }
4760 if (mRawDumpChannel) {
4761 mRawDumpChannel->stop();
4762 }
4763 mMetadataChannel->stop();
4764 pthread_mutex_unlock(&mMutex);
4765 goto error_exit;
4766 }
4767 }
4768
Thierry Strudel3d639192016-09-09 11:52:26 -07004769 if (mChannelHandle) {
4770
4771 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4772 mChannelHandle);
4773 if (rc != NO_ERROR) {
4774 LOGE("start_channel failed %d", rc);
4775 pthread_mutex_unlock(&mMutex);
4776 goto error_exit;
4777 }
4778 }
4779
4780 goto no_error;
4781error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004782 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 return rc;
4784no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 mWokenUpByDaemon = false;
4786 mPendingLiveRequest = 0;
4787 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004788 }
4789
4790 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004791 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004792
4793 if (mFlushPerf) {
4794 //we cannot accept any requests during flush
4795 LOGE("process_capture_request cannot proceed during flush");
4796 pthread_mutex_unlock(&mMutex);
4797 return NO_ERROR; //should return an error
4798 }
4799
4800 if (meta.exists(ANDROID_REQUEST_ID)) {
4801 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4802 mCurrentRequestId = request_id;
4803 LOGD("Received request with id: %d", request_id);
4804 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4805 LOGE("Unable to find request id field, \
4806 & no previous id available");
4807 pthread_mutex_unlock(&mMutex);
4808 return NAME_NOT_FOUND;
4809 } else {
4810 LOGD("Re-using old request id");
4811 request_id = mCurrentRequestId;
4812 }
4813
4814 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4815 request->num_output_buffers,
4816 request->input_buffer,
4817 frameNumber);
4818 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004819 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 int blob_request = 0;
4821 uint32_t snapshotStreamId = 0;
4822 for (size_t i = 0; i < request->num_output_buffers; i++) {
4823 const camera3_stream_buffer_t& output = request->output_buffers[i];
4824 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4825
4826 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004827 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004828 blob_request = 1;
4829 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4830 }
4831
4832 if (output.acquire_fence != -1) {
4833 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4834 close(output.acquire_fence);
4835 if (rc != OK) {
4836 LOGE("sync wait failed %d", rc);
4837 pthread_mutex_unlock(&mMutex);
4838 return rc;
4839 }
4840 }
4841
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004842 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004844
4845 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4846 isVidBufRequested = true;
4847 }
4848 }
4849
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004850 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4851 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4852 itr++) {
4853 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4854 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4855 channel->getStreamID(channel->getStreamTypeMask());
4856
4857 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4858 isVidBufRequested = true;
4859 }
4860 }
4861
Thierry Strudel3d639192016-09-09 11:52:26 -07004862 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004863 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004864 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 }
4866 if (blob_request && mRawDumpChannel) {
4867 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004868 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004869 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004870 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004871 }
4872
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004873 {
4874 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4875 // Request a RAW buffer if
4876 // 1. mHdrPlusRawSrcChannel is valid.
4877 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
4878 // 3. There is no pending HDR+ request.
4879 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
4880 mHdrPlusPendingRequests.size() == 0) {
4881 streamsArray.stream_request[streamsArray.num_streams].streamID =
4882 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
4883 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4884 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004885 }
4886
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004887 //extract capture intent
4888 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4889 mCaptureIntent =
4890 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4891 }
4892
4893 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4894 mCacMode =
4895 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4896 }
4897
4898 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004899 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004900
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004901 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004902 if (mHdrPlusClient != nullptr &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004903 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
4904 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004905 }
4906
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004907 if (hdrPlusRequest) {
4908 // For a HDR+ request, just set the frame parameters.
4909 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4910 if (rc < 0) {
4911 LOGE("fail to set frame parameters");
4912 pthread_mutex_unlock(&mMutex);
4913 return rc;
4914 }
4915 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004916 /* Parse the settings:
4917 * - For every request in NORMAL MODE
4918 * - For every request in HFR mode during preview only case
4919 * - For first request of every batch in HFR mode during video
4920 * recording. In batchmode the same settings except frame number is
4921 * repeated in each request of the batch.
4922 */
4923 if (!mBatchSize ||
4924 (mBatchSize && !isVidBufRequested) ||
4925 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004926 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 if (rc < 0) {
4928 LOGE("fail to set frame parameters");
4929 pthread_mutex_unlock(&mMutex);
4930 return rc;
4931 }
4932 }
4933 /* For batchMode HFR, setFrameParameters is not called for every
4934 * request. But only frame number of the latest request is parsed.
4935 * Keep track of first and last frame numbers in a batch so that
4936 * metadata for the frame numbers of batch can be duplicated in
4937 * handleBatchMetadta */
4938 if (mBatchSize) {
4939 if (!mToBeQueuedVidBufs) {
4940 //start of the batch
4941 mFirstFrameNumberInBatch = request->frame_number;
4942 }
4943 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4944 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4945 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 return BAD_VALUE;
4948 }
4949 }
4950 if (mNeedSensorRestart) {
4951 /* Unlock the mutex as restartSensor waits on the channels to be
4952 * stopped, which in turn calls stream callback functions -
4953 * handleBufferWithLock and handleMetadataWithLock */
4954 pthread_mutex_unlock(&mMutex);
4955 rc = dynamicUpdateMetaStreamInfo();
4956 if (rc != NO_ERROR) {
4957 LOGE("Restarting the sensor failed");
4958 return BAD_VALUE;
4959 }
4960 mNeedSensorRestart = false;
4961 pthread_mutex_lock(&mMutex);
4962 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004963 if(mResetInstantAEC) {
4964 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4965 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4966 mResetInstantAEC = false;
4967 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004968 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 if (request->input_buffer->acquire_fence != -1) {
4970 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4971 close(request->input_buffer->acquire_fence);
4972 if (rc != OK) {
4973 LOGE("input buffer sync wait failed %d", rc);
4974 pthread_mutex_unlock(&mMutex);
4975 return rc;
4976 }
4977 }
4978 }
4979
4980 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4981 mLastCustIntentFrmNum = frameNumber;
4982 }
4983 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004984 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07004985 pendingRequestIterator latestRequest;
4986 pendingRequest.frame_number = frameNumber;
4987 pendingRequest.num_buffers = request->num_output_buffers;
4988 pendingRequest.request_id = request_id;
4989 pendingRequest.blob_request = blob_request;
4990 pendingRequest.timestamp = 0;
4991 pendingRequest.bUrgentReceived = 0;
4992 if (request->input_buffer) {
4993 pendingRequest.input_buffer =
4994 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4995 *(pendingRequest.input_buffer) = *(request->input_buffer);
4996 pInputBuffer = pendingRequest.input_buffer;
4997 } else {
4998 pendingRequest.input_buffer = NULL;
4999 pInputBuffer = NULL;
5000 }
5001
5002 pendingRequest.pipeline_depth = 0;
5003 pendingRequest.partial_result_cnt = 0;
5004 extractJpegMetadata(mCurJpegMeta, request);
5005 pendingRequest.jpegMetadata = mCurJpegMeta;
5006 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5007 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005008 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005009 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5010 mHybridAeEnable =
5011 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5012 }
5013 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005014 /* DevCamDebug metadata processCaptureRequest */
5015 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5016 mDevCamDebugMetaEnable =
5017 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5018 }
5019 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5020 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005021
5022 //extract CAC info
5023 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5024 mCacMode =
5025 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5026 }
5027 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005028 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005029
5030 PendingBuffersInRequest bufsForCurRequest;
5031 bufsForCurRequest.frame_number = frameNumber;
5032 // Mark current timestamp for the new request
5033 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005034 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005035
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005036 if (hdrPlusRequest) {
5037 // Save settings for this request.
5038 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5039 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5040
5041 // Add to pending HDR+ request queue.
5042 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5043 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5044
5045 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5046 }
5047
Thierry Strudel3d639192016-09-09 11:52:26 -07005048 for (size_t i = 0; i < request->num_output_buffers; i++) {
5049 RequestedBufferInfo requestedBuf;
5050 memset(&requestedBuf, 0, sizeof(requestedBuf));
5051 requestedBuf.stream = request->output_buffers[i].stream;
5052 requestedBuf.buffer = NULL;
5053 pendingRequest.buffers.push_back(requestedBuf);
5054
5055 // Add to buffer handle the pending buffers list
5056 PendingBufferInfo bufferInfo;
5057 bufferInfo.buffer = request->output_buffers[i].buffer;
5058 bufferInfo.stream = request->output_buffers[i].stream;
5059 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5060 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5061 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5062 frameNumber, bufferInfo.buffer,
5063 channel->getStreamTypeMask(), bufferInfo.stream->format);
5064 }
5065 // Add this request packet into mPendingBuffersMap
5066 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5067 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5068 mPendingBuffersMap.get_num_overall_buffers());
5069
5070 latestRequest = mPendingRequestsList.insert(
5071 mPendingRequestsList.end(), pendingRequest);
5072 if(mFlush) {
5073 LOGI("mFlush is true");
5074 pthread_mutex_unlock(&mMutex);
5075 return NO_ERROR;
5076 }
5077
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005078 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5079 // channel.
5080 if (!hdrPlusRequest) {
5081 int indexUsed;
5082 // Notify metadata channel we receive a request
5083 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005085 if(request->input_buffer != NULL){
5086 LOGD("Input request, frame_number %d", frameNumber);
5087 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5088 if (NO_ERROR != rc) {
5089 LOGE("fail to set reproc parameters");
5090 pthread_mutex_unlock(&mMutex);
5091 return rc;
5092 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005093 }
5094
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005095 // Call request on other streams
5096 uint32_t streams_need_metadata = 0;
5097 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5098 for (size_t i = 0; i < request->num_output_buffers; i++) {
5099 const camera3_stream_buffer_t& output = request->output_buffers[i];
5100 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5101
5102 if (channel == NULL) {
5103 LOGW("invalid channel pointer for stream");
5104 continue;
5105 }
5106
5107 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5108 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5109 output.buffer, request->input_buffer, frameNumber);
5110 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005111 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005112 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5113 if (rc < 0) {
5114 LOGE("Fail to request on picture channel");
5115 pthread_mutex_unlock(&mMutex);
5116 return rc;
5117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005119 LOGD("snapshot request with buffer %p, frame_number %d",
5120 output.buffer, frameNumber);
5121 if (!request->settings) {
5122 rc = channel->request(output.buffer, frameNumber,
5123 NULL, mPrevParameters, indexUsed);
5124 } else {
5125 rc = channel->request(output.buffer, frameNumber,
5126 NULL, mParameters, indexUsed);
5127 }
5128 if (rc < 0) {
5129 LOGE("Fail to request on picture channel");
5130 pthread_mutex_unlock(&mMutex);
5131 return rc;
5132 }
5133
5134 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5135 uint32_t j = 0;
5136 for (j = 0; j < streamsArray.num_streams; j++) {
5137 if (streamsArray.stream_request[j].streamID == streamId) {
5138 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5139 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5140 else
5141 streamsArray.stream_request[j].buf_index = indexUsed;
5142 break;
5143 }
5144 }
5145 if (j == streamsArray.num_streams) {
5146 LOGE("Did not find matching stream to update index");
5147 assert(0);
5148 }
5149
5150 pendingBufferIter->need_metadata = true;
5151 streams_need_metadata++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005152 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005153 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5154 bool needMetadata = false;
5155 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5156 rc = yuvChannel->request(output.buffer, frameNumber,
5157 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5158 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005159 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005160 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005161 pthread_mutex_unlock(&mMutex);
5162 return rc;
5163 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005164
5165 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5166 uint32_t j = 0;
5167 for (j = 0; j < streamsArray.num_streams; j++) {
5168 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005169 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5170 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5171 else
5172 streamsArray.stream_request[j].buf_index = indexUsed;
5173 break;
5174 }
5175 }
5176 if (j == streamsArray.num_streams) {
5177 LOGE("Did not find matching stream to update index");
5178 assert(0);
5179 }
5180
5181 pendingBufferIter->need_metadata = needMetadata;
5182 if (needMetadata)
5183 streams_need_metadata += 1;
5184 LOGD("calling YUV channel request, need_metadata is %d",
5185 needMetadata);
5186 } else {
5187 LOGD("request with buffer %p, frame_number %d",
5188 output.buffer, frameNumber);
5189
5190 rc = channel->request(output.buffer, frameNumber, indexUsed);
5191
5192 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5193 uint32_t j = 0;
5194 for (j = 0; j < streamsArray.num_streams; j++) {
5195 if (streamsArray.stream_request[j].streamID == streamId) {
5196 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5197 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5198 else
5199 streamsArray.stream_request[j].buf_index = indexUsed;
5200 break;
5201 }
5202 }
5203 if (j == streamsArray.num_streams) {
5204 LOGE("Did not find matching stream to update index");
5205 assert(0);
5206 }
5207
5208 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5209 && mBatchSize) {
5210 mToBeQueuedVidBufs++;
5211 if (mToBeQueuedVidBufs == mBatchSize) {
5212 channel->queueBatchBuf();
5213 }
5214 }
5215 if (rc < 0) {
5216 LOGE("request failed");
5217 pthread_mutex_unlock(&mMutex);
5218 return rc;
5219 }
5220 }
5221 pendingBufferIter++;
5222 }
5223
5224 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5225 itr++) {
5226 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5227
5228 if (channel == NULL) {
5229 LOGE("invalid channel pointer for stream");
5230 assert(0);
5231 return BAD_VALUE;
5232 }
5233
5234 InternalRequest requestedStream;
5235 requestedStream = (*itr);
5236
5237
5238 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5239 LOGD("snapshot request internally input buffer %p, frame_number %d",
5240 request->input_buffer, frameNumber);
5241 if(request->input_buffer != NULL){
5242 rc = channel->request(NULL, frameNumber,
5243 pInputBuffer, &mReprocMeta, indexUsed, true,
5244 requestedStream.meteringOnly);
5245 if (rc < 0) {
5246 LOGE("Fail to request on picture channel");
5247 pthread_mutex_unlock(&mMutex);
5248 return rc;
5249 }
5250 } else {
5251 LOGD("snapshot request with frame_number %d", frameNumber);
5252 if (!request->settings) {
5253 rc = channel->request(NULL, frameNumber,
5254 NULL, mPrevParameters, indexUsed, true,
5255 requestedStream.meteringOnly);
5256 } else {
5257 rc = channel->request(NULL, frameNumber,
5258 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5259 }
5260 if (rc < 0) {
5261 LOGE("Fail to request on picture channel");
5262 pthread_mutex_unlock(&mMutex);
5263 return rc;
5264 }
5265
5266 if ((*itr).meteringOnly != 1) {
5267 requestedStream.need_metadata = 1;
5268 streams_need_metadata++;
5269 }
5270 }
5271
5272 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5273 uint32_t j = 0;
5274 for (j = 0; j < streamsArray.num_streams; j++) {
5275 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005276 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5277 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5278 else
5279 streamsArray.stream_request[j].buf_index = indexUsed;
5280 break;
5281 }
5282 }
5283 if (j == streamsArray.num_streams) {
5284 LOGE("Did not find matching stream to update index");
5285 assert(0);
5286 }
5287
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005288 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005289 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005290 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005291 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005292 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005293 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005294 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005295
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005296 //If 2 streams have need_metadata set to true, fail the request, unless
5297 //we copy/reference count the metadata buffer
5298 if (streams_need_metadata > 1) {
5299 LOGE("not supporting request in which two streams requires"
5300 " 2 HAL metadata for reprocessing");
5301 pthread_mutex_unlock(&mMutex);
5302 return -EINVAL;
5303 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005304
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005305 if (request->input_buffer == NULL) {
5306 /* Set the parameters to backend:
5307 * - For every request in NORMAL MODE
5308 * - For every request in HFR mode during preview only case
5309 * - Once every batch in HFR mode during video recording
5310 */
5311 if (!mBatchSize ||
5312 (mBatchSize && !isVidBufRequested) ||
5313 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5314 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5315 mBatchSize, isVidBufRequested,
5316 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005317
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005318 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5319 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5320 uint32_t m = 0;
5321 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5322 if (streamsArray.stream_request[k].streamID ==
5323 mBatchedStreamsArray.stream_request[m].streamID)
5324 break;
5325 }
5326 if (m == mBatchedStreamsArray.num_streams) {
5327 mBatchedStreamsArray.stream_request\
5328 [mBatchedStreamsArray.num_streams].streamID =
5329 streamsArray.stream_request[k].streamID;
5330 mBatchedStreamsArray.stream_request\
5331 [mBatchedStreamsArray.num_streams].buf_index =
5332 streamsArray.stream_request[k].buf_index;
5333 mBatchedStreamsArray.num_streams =
5334 mBatchedStreamsArray.num_streams + 1;
5335 }
5336 }
5337 streamsArray = mBatchedStreamsArray;
5338 }
5339 /* Update stream id of all the requested buffers */
5340 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5341 streamsArray)) {
5342 LOGE("Failed to set stream type mask in the parameters");
5343 return BAD_VALUE;
5344 }
5345
5346 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5347 mParameters);
5348 if (rc < 0) {
5349 LOGE("set_parms failed");
5350 }
5351 /* reset to zero coz, the batch is queued */
5352 mToBeQueuedVidBufs = 0;
5353 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5354 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5355 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005356 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5357 uint32_t m = 0;
5358 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5359 if (streamsArray.stream_request[k].streamID ==
5360 mBatchedStreamsArray.stream_request[m].streamID)
5361 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005362 }
5363 if (m == mBatchedStreamsArray.num_streams) {
5364 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5365 streamID = streamsArray.stream_request[k].streamID;
5366 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5367 buf_index = streamsArray.stream_request[k].buf_index;
5368 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5369 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005370 }
5371 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005372 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005373 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005374 }
5375
5376 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5377
5378 mState = STARTED;
5379 // Added a timed condition wait
5380 struct timespec ts;
5381 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005382 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 if (rc < 0) {
5384 isValidTimeout = 0;
5385 LOGE("Error reading the real time clock!!");
5386 }
5387 else {
5388 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005389 int64_t timeout = 5;
5390 {
5391 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5392 // If there is a pending HDR+ request, the following requests may be blocked until the
5393 // HDR+ request is done. So allow a longer timeout.
5394 if (mHdrPlusPendingRequests.size() > 0) {
5395 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5396 }
5397 }
5398 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005399 }
5400 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005401 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005402 (mState != ERROR) && (mState != DEINIT)) {
5403 if (!isValidTimeout) {
5404 LOGD("Blocking on conditional wait");
5405 pthread_cond_wait(&mRequestCond, &mMutex);
5406 }
5407 else {
5408 LOGD("Blocking on timed conditional wait");
5409 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5410 if (rc == ETIMEDOUT) {
5411 rc = -ENODEV;
5412 LOGE("Unblocked on timeout!!!!");
5413 break;
5414 }
5415 }
5416 LOGD("Unblocked");
5417 if (mWokenUpByDaemon) {
5418 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005419 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 break;
5421 }
5422 }
5423 pthread_mutex_unlock(&mMutex);
5424
5425 return rc;
5426}
5427
5428/*===========================================================================
5429 * FUNCTION : dump
5430 *
5431 * DESCRIPTION:
5432 *
5433 * PARAMETERS :
5434 *
5435 *
5436 * RETURN :
5437 *==========================================================================*/
5438void QCamera3HardwareInterface::dump(int fd)
5439{
5440 pthread_mutex_lock(&mMutex);
5441 dprintf(fd, "\n Camera HAL3 information Begin \n");
5442
5443 dprintf(fd, "\nNumber of pending requests: %zu \n",
5444 mPendingRequestsList.size());
5445 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5446 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5447 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5448 for(pendingRequestIterator i = mPendingRequestsList.begin();
5449 i != mPendingRequestsList.end(); i++) {
5450 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5451 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5452 i->input_buffer);
5453 }
5454 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5455 mPendingBuffersMap.get_num_overall_buffers());
5456 dprintf(fd, "-------+------------------\n");
5457 dprintf(fd, " Frame | Stream type mask \n");
5458 dprintf(fd, "-------+------------------\n");
5459 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5460 for(auto &j : req.mPendingBufferList) {
5461 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5462 dprintf(fd, " %5d | %11d \n",
5463 req.frame_number, channel->getStreamTypeMask());
5464 }
5465 }
5466 dprintf(fd, "-------+------------------\n");
5467
5468 dprintf(fd, "\nPending frame drop list: %zu\n",
5469 mPendingFrameDropList.size());
5470 dprintf(fd, "-------+-----------\n");
5471 dprintf(fd, " Frame | Stream ID \n");
5472 dprintf(fd, "-------+-----------\n");
5473 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5474 i != mPendingFrameDropList.end(); i++) {
5475 dprintf(fd, " %5d | %9d \n",
5476 i->frame_number, i->stream_ID);
5477 }
5478 dprintf(fd, "-------+-----------\n");
5479
5480 dprintf(fd, "\n Camera HAL3 information End \n");
5481
5482 /* use dumpsys media.camera as trigger to send update debug level event */
5483 mUpdateDebugLevel = true;
5484 pthread_mutex_unlock(&mMutex);
5485 return;
5486}
5487
5488/*===========================================================================
5489 * FUNCTION : flush
5490 *
5491 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5492 * conditionally restarts channels
5493 *
5494 * PARAMETERS :
5495 * @ restartChannels: re-start all channels
5496 *
5497 *
5498 * RETURN :
5499 * 0 on success
5500 * Error code on failure
5501 *==========================================================================*/
5502int QCamera3HardwareInterface::flush(bool restartChannels)
5503{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005504 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005505 int32_t rc = NO_ERROR;
5506
5507 LOGD("Unblocking Process Capture Request");
5508 pthread_mutex_lock(&mMutex);
5509 mFlush = true;
5510 pthread_mutex_unlock(&mMutex);
5511
5512 rc = stopAllChannels();
5513 // unlink of dualcam
5514 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005515 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5516 &m_pDualCamCmdPtr->bundle_info;
5517 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005518 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5519 pthread_mutex_lock(&gCamLock);
5520
5521 if (mIsMainCamera == 1) {
5522 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5523 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005524 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005525 // related session id should be session id of linked session
5526 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5527 } else {
5528 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5529 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005530 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005531 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5532 }
5533 pthread_mutex_unlock(&gCamLock);
5534
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005535 rc = mCameraHandle->ops->set_dual_cam_cmd(
5536 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005537 if (rc < 0) {
5538 LOGE("Dualcam: Unlink failed, but still proceed to close");
5539 }
5540 }
5541
5542 if (rc < 0) {
5543 LOGE("stopAllChannels failed");
5544 return rc;
5545 }
5546 if (mChannelHandle) {
5547 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5548 mChannelHandle);
5549 }
5550
5551 // Reset bundle info
5552 rc = setBundleInfo();
5553 if (rc < 0) {
5554 LOGE("setBundleInfo failed %d", rc);
5555 return rc;
5556 }
5557
5558 // Mutex Lock
5559 pthread_mutex_lock(&mMutex);
5560
5561 // Unblock process_capture_request
5562 mPendingLiveRequest = 0;
5563 pthread_cond_signal(&mRequestCond);
5564
5565 rc = notifyErrorForPendingRequests();
5566 if (rc < 0) {
5567 LOGE("notifyErrorForPendingRequests failed");
5568 pthread_mutex_unlock(&mMutex);
5569 return rc;
5570 }
5571
5572 mFlush = false;
5573
5574 // Start the Streams/Channels
5575 if (restartChannels) {
5576 rc = startAllChannels();
5577 if (rc < 0) {
5578 LOGE("startAllChannels failed");
5579 pthread_mutex_unlock(&mMutex);
5580 return rc;
5581 }
5582 }
5583
5584 if (mChannelHandle) {
5585 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5586 mChannelHandle);
5587 if (rc < 0) {
5588 LOGE("start_channel failed");
5589 pthread_mutex_unlock(&mMutex);
5590 return rc;
5591 }
5592 }
5593
5594 pthread_mutex_unlock(&mMutex);
5595
5596 return 0;
5597}
5598
5599/*===========================================================================
5600 * FUNCTION : flushPerf
5601 *
5602 * DESCRIPTION: This is the performance optimization version of flush that does
5603 * not use stream off, rather flushes the system
5604 *
5605 * PARAMETERS :
5606 *
5607 *
5608 * RETURN : 0 : success
5609 * -EINVAL: input is malformed (device is not valid)
5610 * -ENODEV: if the device has encountered a serious error
5611 *==========================================================================*/
5612int QCamera3HardwareInterface::flushPerf()
5613{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005614 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005615 int32_t rc = 0;
5616 struct timespec timeout;
5617 bool timed_wait = false;
5618
5619 pthread_mutex_lock(&mMutex);
5620 mFlushPerf = true;
5621 mPendingBuffersMap.numPendingBufsAtFlush =
5622 mPendingBuffersMap.get_num_overall_buffers();
5623 LOGD("Calling flush. Wait for %d buffers to return",
5624 mPendingBuffersMap.numPendingBufsAtFlush);
5625
5626 /* send the flush event to the backend */
5627 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5628 if (rc < 0) {
5629 LOGE("Error in flush: IOCTL failure");
5630 mFlushPerf = false;
5631 pthread_mutex_unlock(&mMutex);
5632 return -ENODEV;
5633 }
5634
5635 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5636 LOGD("No pending buffers in HAL, return flush");
5637 mFlushPerf = false;
5638 pthread_mutex_unlock(&mMutex);
5639 return rc;
5640 }
5641
5642 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005643 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005644 if (rc < 0) {
5645 LOGE("Error reading the real time clock, cannot use timed wait");
5646 } else {
5647 timeout.tv_sec += FLUSH_TIMEOUT;
5648 timed_wait = true;
5649 }
5650
5651 //Block on conditional variable
5652 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5653 LOGD("Waiting on mBuffersCond");
5654 if (!timed_wait) {
5655 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5656 if (rc != 0) {
5657 LOGE("pthread_cond_wait failed due to rc = %s",
5658 strerror(rc));
5659 break;
5660 }
5661 } else {
5662 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5663 if (rc != 0) {
5664 LOGE("pthread_cond_timedwait failed due to rc = %s",
5665 strerror(rc));
5666 break;
5667 }
5668 }
5669 }
5670 if (rc != 0) {
5671 mFlushPerf = false;
5672 pthread_mutex_unlock(&mMutex);
5673 return -ENODEV;
5674 }
5675
5676 LOGD("Received buffers, now safe to return them");
5677
5678 //make sure the channels handle flush
5679 //currently only required for the picture channel to release snapshot resources
5680 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5681 it != mStreamInfo.end(); it++) {
5682 QCamera3Channel *channel = (*it)->channel;
5683 if (channel) {
5684 rc = channel->flush();
5685 if (rc) {
5686 LOGE("Flushing the channels failed with error %d", rc);
5687 // even though the channel flush failed we need to continue and
5688 // return the buffers we have to the framework, however the return
5689 // value will be an error
5690 rc = -ENODEV;
5691 }
5692 }
5693 }
5694
5695 /* notify the frameworks and send errored results */
5696 rc = notifyErrorForPendingRequests();
5697 if (rc < 0) {
5698 LOGE("notifyErrorForPendingRequests failed");
5699 pthread_mutex_unlock(&mMutex);
5700 return rc;
5701 }
5702
5703 //unblock process_capture_request
5704 mPendingLiveRequest = 0;
5705 unblockRequestIfNecessary();
5706
5707 mFlushPerf = false;
5708 pthread_mutex_unlock(&mMutex);
5709 LOGD ("Flush Operation complete. rc = %d", rc);
5710 return rc;
5711}
5712
5713/*===========================================================================
5714 * FUNCTION : handleCameraDeviceError
5715 *
5716 * DESCRIPTION: This function calls internal flush and notifies the error to
5717 * framework and updates the state variable.
5718 *
5719 * PARAMETERS : None
5720 *
5721 * RETURN : NO_ERROR on Success
5722 * Error code on failure
5723 *==========================================================================*/
5724int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5725{
5726 int32_t rc = NO_ERROR;
5727
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005728 {
5729 Mutex::Autolock lock(mFlushLock);
5730 pthread_mutex_lock(&mMutex);
5731 if (mState != ERROR) {
5732 //if mState != ERROR, nothing to be done
5733 pthread_mutex_unlock(&mMutex);
5734 return NO_ERROR;
5735 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005736 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005737
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005738 rc = flush(false /* restart channels */);
5739 if (NO_ERROR != rc) {
5740 LOGE("internal flush to handle mState = ERROR failed");
5741 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005742
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005743 pthread_mutex_lock(&mMutex);
5744 mState = DEINIT;
5745 pthread_mutex_unlock(&mMutex);
5746 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005747
5748 camera3_notify_msg_t notify_msg;
5749 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5750 notify_msg.type = CAMERA3_MSG_ERROR;
5751 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5752 notify_msg.message.error.error_stream = NULL;
5753 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005754 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005755
5756 return rc;
5757}
5758
5759/*===========================================================================
5760 * FUNCTION : captureResultCb
5761 *
5762 * DESCRIPTION: Callback handler for all capture result
5763 * (streams, as well as metadata)
5764 *
5765 * PARAMETERS :
5766 * @metadata : metadata information
5767 * @buffer : actual gralloc buffer to be returned to frameworks.
5768 * NULL if metadata.
5769 *
5770 * RETURN : NONE
5771 *==========================================================================*/
5772void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5773 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5774{
5775 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005776 pthread_mutex_lock(&mMutex);
5777 uint8_t batchSize = mBatchSize;
5778 pthread_mutex_unlock(&mMutex);
5779 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005780 handleBatchMetadata(metadata_buf,
5781 true /* free_and_bufdone_meta_buf */);
5782 } else { /* mBatchSize = 0 */
5783 hdrPlusPerfLock(metadata_buf);
5784 pthread_mutex_lock(&mMutex);
5785 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005786 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08005787 false /* first frame of batch metadata */ ,
5788 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07005789 pthread_mutex_unlock(&mMutex);
5790 }
5791 } else if (isInputBuffer) {
5792 pthread_mutex_lock(&mMutex);
5793 handleInputBufferWithLock(frame_number);
5794 pthread_mutex_unlock(&mMutex);
5795 } else {
5796 pthread_mutex_lock(&mMutex);
5797 handleBufferWithLock(buffer, frame_number);
5798 pthread_mutex_unlock(&mMutex);
5799 }
5800 return;
5801}
5802
5803/*===========================================================================
5804 * FUNCTION : getReprocessibleOutputStreamId
5805 *
5806 * DESCRIPTION: Get source output stream id for the input reprocess stream
5807 * based on size and format, which would be the largest
5808 * output stream if an input stream exists.
5809 *
5810 * PARAMETERS :
5811 * @id : return the stream id if found
5812 *
5813 * RETURN : int32_t type of status
5814 * NO_ERROR -- success
5815 * none-zero failure code
5816 *==========================================================================*/
5817int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5818{
5819 /* check if any output or bidirectional stream with the same size and format
5820 and return that stream */
5821 if ((mInputStreamInfo.dim.width > 0) &&
5822 (mInputStreamInfo.dim.height > 0)) {
5823 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5824 it != mStreamInfo.end(); it++) {
5825
5826 camera3_stream_t *stream = (*it)->stream;
5827 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5828 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5829 (stream->format == mInputStreamInfo.format)) {
5830 // Usage flag for an input stream and the source output stream
5831 // may be different.
5832 LOGD("Found reprocessible output stream! %p", *it);
5833 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5834 stream->usage, mInputStreamInfo.usage);
5835
5836 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5837 if (channel != NULL && channel->mStreams[0]) {
5838 id = channel->mStreams[0]->getMyServerID();
5839 return NO_ERROR;
5840 }
5841 }
5842 }
5843 } else {
5844 LOGD("No input stream, so no reprocessible output stream");
5845 }
5846 return NAME_NOT_FOUND;
5847}
5848
5849/*===========================================================================
5850 * FUNCTION : lookupFwkName
5851 *
5852 * DESCRIPTION: In case the enum is not same in fwk and backend
5853 * make sure the parameter is correctly propogated
5854 *
5855 * PARAMETERS :
5856 * @arr : map between the two enums
5857 * @len : len of the map
5858 * @hal_name : name of the hal_parm to map
5859 *
5860 * RETURN : int type of status
5861 * fwk_name -- success
5862 * none-zero failure code
5863 *==========================================================================*/
5864template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5865 size_t len, halType hal_name)
5866{
5867
5868 for (size_t i = 0; i < len; i++) {
5869 if (arr[i].hal_name == hal_name) {
5870 return arr[i].fwk_name;
5871 }
5872 }
5873
5874 /* Not able to find matching framework type is not necessarily
5875 * an error case. This happens when mm-camera supports more attributes
5876 * than the frameworks do */
5877 LOGH("Cannot find matching framework type");
5878 return NAME_NOT_FOUND;
5879}
5880
5881/*===========================================================================
5882 * FUNCTION : lookupHalName
5883 *
5884 * DESCRIPTION: In case the enum is not same in fwk and backend
5885 * make sure the parameter is correctly propogated
5886 *
5887 * PARAMETERS :
5888 * @arr : map between the two enums
5889 * @len : len of the map
5890 * @fwk_name : name of the hal_parm to map
5891 *
5892 * RETURN : int32_t type of status
5893 * hal_name -- success
5894 * none-zero failure code
5895 *==========================================================================*/
5896template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5897 size_t len, fwkType fwk_name)
5898{
5899 for (size_t i = 0; i < len; i++) {
5900 if (arr[i].fwk_name == fwk_name) {
5901 return arr[i].hal_name;
5902 }
5903 }
5904
5905 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5906 return NAME_NOT_FOUND;
5907}
5908
5909/*===========================================================================
5910 * FUNCTION : lookupProp
5911 *
5912 * DESCRIPTION: lookup a value by its name
5913 *
5914 * PARAMETERS :
5915 * @arr : map between the two enums
5916 * @len : size of the map
5917 * @name : name to be looked up
5918 *
5919 * RETURN : Value if found
5920 * CAM_CDS_MODE_MAX if not found
5921 *==========================================================================*/
5922template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5923 size_t len, const char *name)
5924{
5925 if (name) {
5926 for (size_t i = 0; i < len; i++) {
5927 if (!strcmp(arr[i].desc, name)) {
5928 return arr[i].val;
5929 }
5930 }
5931 }
5932 return CAM_CDS_MODE_MAX;
5933}
5934
5935/*===========================================================================
5936 *
5937 * DESCRIPTION:
5938 *
5939 * PARAMETERS :
5940 * @metadata : metadata information from callback
5941 * @timestamp: metadata buffer timestamp
5942 * @request_id: request id
5943 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005944 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08005945 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5946 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005947 * @pprocDone: whether internal offline postprocsesing is done
5948 *
5949 * RETURN : camera_metadata_t*
5950 * metadata in a format specified by fwk
5951 *==========================================================================*/
5952camera_metadata_t*
5953QCamera3HardwareInterface::translateFromHalMetadata(
5954 metadata_buffer_t *metadata,
5955 nsecs_t timestamp,
5956 int32_t request_id,
5957 const CameraMetadata& jpegMetadata,
5958 uint8_t pipeline_depth,
5959 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005960 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08005961 /* DevCamDebug metadata translateFromHalMetadata argument */
5962 uint8_t DevCamDebug_meta_enable,
5963 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005964 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005965 uint8_t fwk_cacMode,
5966 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005967{
5968 CameraMetadata camMetadata;
5969 camera_metadata_t *resultMetadata;
5970
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005971 if (mBatchSize && !firstMetadataInBatch) {
5972 /* In batch mode, use cached metadata from the first metadata
5973 in the batch */
5974 camMetadata.clear();
5975 camMetadata = mCachedMetadata;
5976 }
5977
Thierry Strudel3d639192016-09-09 11:52:26 -07005978 if (jpegMetadata.entryCount())
5979 camMetadata.append(jpegMetadata);
5980
5981 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5982 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5983 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5984 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005985 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005986 if (mBatchSize == 0) {
5987 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5988 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5989 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005990
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005991 if (mBatchSize && !firstMetadataInBatch) {
5992 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5993 resultMetadata = camMetadata.release();
5994 return resultMetadata;
5995 }
5996
Samuel Ha68ba5172016-12-15 18:41:12 -08005997 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5998 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5999 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6000 // DevCamDebug metadata translateFromHalMetadata AF
6001 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6002 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6003 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6004 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6005 }
6006 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6007 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6008 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6009 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6010 }
6011 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6012 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6013 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6014 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6015 }
6016 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6017 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6018 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6019 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6020 }
6021 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6022 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6023 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6024 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6025 }
6026 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6027 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6028 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6029 *DevCamDebug_af_monitor_pdaf_target_pos;
6030 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6031 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6032 }
6033 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6034 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6035 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6036 *DevCamDebug_af_monitor_pdaf_confidence;
6037 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6038 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6039 }
6040 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6041 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6042 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6043 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6044 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6045 }
6046 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6047 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6048 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6049 *DevCamDebug_af_monitor_tof_target_pos;
6050 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6051 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6052 }
6053 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6054 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6055 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6056 *DevCamDebug_af_monitor_tof_confidence;
6057 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6058 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6059 }
6060 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6061 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6062 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6063 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6064 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6065 }
6066 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6067 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6068 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6069 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6070 &fwk_DevCamDebug_af_monitor_type_select, 1);
6071 }
6072 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6073 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6074 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6075 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6076 &fwk_DevCamDebug_af_monitor_refocus, 1);
6077 }
6078 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6079 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6080 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6081 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6082 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6083 }
6084 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6085 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6086 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6087 *DevCamDebug_af_search_pdaf_target_pos;
6088 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6089 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6090 }
6091 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6092 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6093 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6094 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6095 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6096 }
6097 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6098 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6099 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6100 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6101 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6102 }
6103 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6104 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6105 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6106 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6107 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6108 }
6109 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6110 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6111 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6112 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6113 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6114 }
6115 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6116 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6117 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6118 *DevCamDebug_af_search_tof_target_pos;
6119 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6120 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6121 }
6122 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6123 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6124 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6125 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6126 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6127 }
6128 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6129 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6130 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6131 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6132 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6133 }
6134 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6135 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6136 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6137 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6138 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6139 }
6140 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6141 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6142 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6143 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6144 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6145 }
6146 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6147 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6148 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6149 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6150 &fwk_DevCamDebug_af_search_type_select, 1);
6151 }
6152 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6153 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6154 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6155 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6156 &fwk_DevCamDebug_af_search_next_pos, 1);
6157 }
6158 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6159 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6160 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6161 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6162 &fwk_DevCamDebug_af_search_target_pos, 1);
6163 }
6164 // DevCamDebug metadata translateFromHalMetadata AEC
6165 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6166 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6167 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6168 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6169 }
6170 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6171 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6172 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6173 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6174 }
6175 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6176 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6177 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6178 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6179 }
6180 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6181 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6182 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6183 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6184 }
6185 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6186 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6187 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6188 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6189 }
6190 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6191 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6192 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6193 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6194 }
6195 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6196 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6197 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6198 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6199 }
6200 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6201 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6202 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6203 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6204 }
6205 // DevCamDebug metadata translateFromHalMetadata AWB
6206 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6207 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6208 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6209 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6210 }
6211 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6212 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6213 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6214 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6215 }
6216 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6217 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6218 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6219 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6220 }
6221 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6222 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6223 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6224 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6225 }
6226 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6227 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6228 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6229 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6230 }
6231 }
6232 // atrace_end(ATRACE_TAG_ALWAYS);
6233
Thierry Strudel3d639192016-09-09 11:52:26 -07006234 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6235 int64_t fwk_frame_number = *frame_number;
6236 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6237 }
6238
6239 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6240 int32_t fps_range[2];
6241 fps_range[0] = (int32_t)float_range->min_fps;
6242 fps_range[1] = (int32_t)float_range->max_fps;
6243 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6244 fps_range, 2);
6245 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6246 fps_range[0], fps_range[1]);
6247 }
6248
6249 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6250 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6251 }
6252
6253 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6254 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6255 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6256 *sceneMode);
6257 if (NAME_NOT_FOUND != val) {
6258 uint8_t fwkSceneMode = (uint8_t)val;
6259 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6260 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6261 fwkSceneMode);
6262 }
6263 }
6264
6265 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6266 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6267 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6268 }
6269
6270 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6271 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6272 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6273 }
6274
6275 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6276 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6277 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6278 }
6279
6280 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6281 CAM_INTF_META_EDGE_MODE, metadata) {
6282 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6283 }
6284
6285 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6286 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6287 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6288 }
6289
6290 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6291 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6292 }
6293
6294 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6295 if (0 <= *flashState) {
6296 uint8_t fwk_flashState = (uint8_t) *flashState;
6297 if (!gCamCapability[mCameraId]->flash_available) {
6298 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6299 }
6300 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6301 }
6302 }
6303
6304 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6305 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6306 if (NAME_NOT_FOUND != val) {
6307 uint8_t fwk_flashMode = (uint8_t)val;
6308 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6309 }
6310 }
6311
6312 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6313 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6314 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6315 }
6316
6317 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6318 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6319 }
6320
6321 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6322 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6323 }
6324
6325 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6326 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6327 }
6328
6329 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6330 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6331 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6332 }
6333
6334 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6335 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6336 LOGD("fwk_videoStab = %d", fwk_videoStab);
6337 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6338 } else {
6339 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6340 // and so hardcoding the Video Stab result to OFF mode.
6341 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6342 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006343 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006344 }
6345
6346 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6347 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6348 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6349 }
6350
6351 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6352 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6353 }
6354
Thierry Strudel3d639192016-09-09 11:52:26 -07006355 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6356 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006357 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006358
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006359 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6360 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006361
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006362 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006363 blackLevelAppliedPattern->cam_black_level[0],
6364 blackLevelAppliedPattern->cam_black_level[1],
6365 blackLevelAppliedPattern->cam_black_level[2],
6366 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006367 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6368 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006369
6370#ifndef USE_HAL_3_3
6371 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006372 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6373 // depth space.
6374 fwk_blackLevelInd[0] /= 4.0;
6375 fwk_blackLevelInd[1] /= 4.0;
6376 fwk_blackLevelInd[2] /= 4.0;
6377 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006378 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6379 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006380#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006381 }
6382
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006383#ifndef USE_HAL_3_3
6384 // Fixed whitelevel is used by ISP/Sensor
6385 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6386 &gCamCapability[mCameraId]->white_level, 1);
6387#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006388
6389 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6390 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6391 int32_t scalerCropRegion[4];
6392 scalerCropRegion[0] = hScalerCropRegion->left;
6393 scalerCropRegion[1] = hScalerCropRegion->top;
6394 scalerCropRegion[2] = hScalerCropRegion->width;
6395 scalerCropRegion[3] = hScalerCropRegion->height;
6396
6397 // Adjust crop region from sensor output coordinate system to active
6398 // array coordinate system.
6399 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6400 scalerCropRegion[2], scalerCropRegion[3]);
6401
6402 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6403 }
6404
6405 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6406 LOGD("sensorExpTime = %lld", *sensorExpTime);
6407 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6408 }
6409
6410 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6411 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6412 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6413 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6414 }
6415
6416 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6417 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6418 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6419 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6420 sensorRollingShutterSkew, 1);
6421 }
6422
6423 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6424 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6425 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6426
6427 //calculate the noise profile based on sensitivity
6428 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6429 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6430 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6431 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6432 noise_profile[i] = noise_profile_S;
6433 noise_profile[i+1] = noise_profile_O;
6434 }
6435 LOGD("noise model entry (S, O) is (%f, %f)",
6436 noise_profile_S, noise_profile_O);
6437 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6438 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6439 }
6440
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006441#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006442 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006443 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006444 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006445 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006446 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6447 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6448 }
6449 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006450#endif
6451
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6453 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6454 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6455 }
6456
6457 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6458 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6459 *faceDetectMode);
6460 if (NAME_NOT_FOUND != val) {
6461 uint8_t fwk_faceDetectMode = (uint8_t)val;
6462 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6463
6464 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6465 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6466 CAM_INTF_META_FACE_DETECTION, metadata) {
6467 uint8_t numFaces = MIN(
6468 faceDetectionInfo->num_faces_detected, MAX_ROI);
6469 int32_t faceIds[MAX_ROI];
6470 uint8_t faceScores[MAX_ROI];
6471 int32_t faceRectangles[MAX_ROI * 4];
6472 int32_t faceLandmarks[MAX_ROI * 6];
6473 size_t j = 0, k = 0;
6474
6475 for (size_t i = 0; i < numFaces; i++) {
6476 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6477 // Adjust crop region from sensor output coordinate system to active
6478 // array coordinate system.
6479 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6480 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6481 rect.width, rect.height);
6482
6483 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6484 faceRectangles+j, -1);
6485
6486 j+= 4;
6487 }
6488 if (numFaces <= 0) {
6489 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6490 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6491 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6492 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6493 }
6494
6495 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6496 numFaces);
6497 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6498 faceRectangles, numFaces * 4U);
6499 if (fwk_faceDetectMode ==
6500 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6501 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6502 CAM_INTF_META_FACE_LANDMARK, metadata) {
6503
6504 for (size_t i = 0; i < numFaces; i++) {
6505 // Map the co-ordinate sensor output coordinate system to active
6506 // array coordinate system.
6507 mCropRegionMapper.toActiveArray(
6508 landmarks->face_landmarks[i].left_eye_center.x,
6509 landmarks->face_landmarks[i].left_eye_center.y);
6510 mCropRegionMapper.toActiveArray(
6511 landmarks->face_landmarks[i].right_eye_center.x,
6512 landmarks->face_landmarks[i].right_eye_center.y);
6513 mCropRegionMapper.toActiveArray(
6514 landmarks->face_landmarks[i].mouth_center.x,
6515 landmarks->face_landmarks[i].mouth_center.y);
6516
6517 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006518 k+= TOTAL_LANDMARK_INDICES;
6519 }
6520 } else {
6521 for (size_t i = 0; i < numFaces; i++) {
6522 setInvalidLandmarks(faceLandmarks+k);
6523 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006524 }
6525 }
6526
6527 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6528 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6529 faceLandmarks, numFaces * 6U);
6530 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006531 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6532 CAM_INTF_META_FACE_BLINK, metadata) {
6533 uint8_t detected[MAX_ROI];
6534 uint8_t degree[MAX_ROI * 2];
6535 for (size_t i = 0; i < numFaces; i++) {
6536 detected[i] = blinks->blink[i].blink_detected;
6537 degree[2 * i] = blinks->blink[i].left_blink;
6538 degree[2 * i + 1] = blinks->blink[i].right_blink;
6539 }
6540 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6541 detected, numFaces);
6542 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6543 degree, numFaces * 2);
6544 }
6545 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6546 CAM_INTF_META_FACE_SMILE, metadata) {
6547 uint8_t degree[MAX_ROI];
6548 uint8_t confidence[MAX_ROI];
6549 for (size_t i = 0; i < numFaces; i++) {
6550 degree[i] = smiles->smile[i].smile_degree;
6551 confidence[i] = smiles->smile[i].smile_confidence;
6552 }
6553 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6554 degree, numFaces);
6555 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6556 confidence, numFaces);
6557 }
6558 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6559 CAM_INTF_META_FACE_GAZE, metadata) {
6560 int8_t angle[MAX_ROI];
6561 int32_t direction[MAX_ROI * 3];
6562 int8_t degree[MAX_ROI * 2];
6563 for (size_t i = 0; i < numFaces; i++) {
6564 angle[i] = gazes->gaze[i].gaze_angle;
6565 direction[3 * i] = gazes->gaze[i].updown_dir;
6566 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6567 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6568 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6569 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6570 }
6571 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6572 (uint8_t *)angle, numFaces);
6573 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6574 direction, numFaces * 3);
6575 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6576 (uint8_t *)degree, numFaces * 2);
6577 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006578 }
6579 }
6580 }
6581 }
6582
6583 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6584 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006585 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006586
Thierry Strudel54dc9782017-02-15 12:12:10 -08006587 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006588 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6589 // process histogram statistics info
Thierry Strudel54dc9782017-02-15 12:12:10 -08006590 uint32_t hist_buf[4][CAM_HISTOGRAM_STATS_SIZE];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006591 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006592 cam_histogram_data_t rHistData, grHistData, gbHistData, bHistData;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006593 memset(&rHistData, 0, sizeof(rHistData));
Thierry Strudel54dc9782017-02-15 12:12:10 -08006594 memset(&grHistData, 0, sizeof(grHistData));
6595 memset(&gbHistData, 0, sizeof(gbHistData));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006596 memset(&bHistData, 0, sizeof(bHistData));
6597
6598 switch (stats_data->type) {
6599 case CAM_HISTOGRAM_TYPE_BAYER:
6600 switch (stats_data->bayer_stats.data_type) {
6601 case CAM_STATS_CHANNEL_GR:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006602 rHistData = grHistData = gbHistData = bHistData =
6603 stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006604 break;
6605 case CAM_STATS_CHANNEL_GB:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006606 rHistData = grHistData = gbHistData = bHistData =
6607 stats_data->bayer_stats.gb_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006608 break;
6609 case CAM_STATS_CHANNEL_B:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006610 rHistData = grHistData = gbHistData = bHistData =
6611 stats_data->bayer_stats.b_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006612 break;
6613 case CAM_STATS_CHANNEL_ALL:
6614 rHistData = stats_data->bayer_stats.r_stats;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006615 gbHistData = stats_data->bayer_stats.gb_stats;
6616 grHistData = stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006617 bHistData = stats_data->bayer_stats.b_stats;
6618 break;
6619 case CAM_STATS_CHANNEL_Y:
6620 case CAM_STATS_CHANNEL_R:
6621 default:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006622 rHistData = grHistData = gbHistData = bHistData =
6623 stats_data->bayer_stats.r_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006624 break;
6625 }
6626 break;
6627 case CAM_HISTOGRAM_TYPE_YUV:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006628 rHistData = grHistData = gbHistData = bHistData =
6629 stats_data->yuv_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006630 break;
6631 }
6632
6633 memcpy(hist_buf, rHistData.hist_buf, hist_size);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006634 memcpy(hist_buf[1], gbHistData.hist_buf, hist_size);
6635 memcpy(hist_buf[2], grHistData.hist_buf, hist_size);
6636 memcpy(hist_buf[3], bHistData.hist_buf, hist_size);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006637
Thierry Strudel54dc9782017-02-15 12:12:10 -08006638 camMetadata.update(QCAMERA3_HISTOGRAM_STATS, (int32_t*)hist_buf, hist_size*4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006639 }
6640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006641 }
6642
6643 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6644 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6645 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6646 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6647 }
6648
6649 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6650 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6651 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6652 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6653 }
6654
6655 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6656 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6657 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6658 CAM_MAX_SHADING_MAP_HEIGHT);
6659 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6660 CAM_MAX_SHADING_MAP_WIDTH);
6661 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6662 lensShadingMap->lens_shading, 4U * map_width * map_height);
6663 }
6664
6665 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6666 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6667 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6668 }
6669
6670 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6671 //Populate CAM_INTF_META_TONEMAP_CURVES
6672 /* ch0 = G, ch 1 = B, ch 2 = R*/
6673 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6674 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6675 tonemap->tonemap_points_cnt,
6676 CAM_MAX_TONEMAP_CURVE_SIZE);
6677 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6678 }
6679
6680 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6681 &tonemap->curves[0].tonemap_points[0][0],
6682 tonemap->tonemap_points_cnt * 2);
6683
6684 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6685 &tonemap->curves[1].tonemap_points[0][0],
6686 tonemap->tonemap_points_cnt * 2);
6687
6688 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6689 &tonemap->curves[2].tonemap_points[0][0],
6690 tonemap->tonemap_points_cnt * 2);
6691 }
6692
6693 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6694 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6695 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6696 CC_GAIN_MAX);
6697 }
6698
6699 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6700 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6701 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6702 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6703 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6704 }
6705
6706 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6707 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6708 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6709 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6710 toneCurve->tonemap_points_cnt,
6711 CAM_MAX_TONEMAP_CURVE_SIZE);
6712 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6713 }
6714 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6715 (float*)toneCurve->curve.tonemap_points,
6716 toneCurve->tonemap_points_cnt * 2);
6717 }
6718
6719 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6720 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6721 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6722 predColorCorrectionGains->gains, 4);
6723 }
6724
6725 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6726 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6727 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6728 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6729 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6730 }
6731
6732 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6733 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6734 }
6735
6736 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6737 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6738 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6739 }
6740
6741 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6742 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6743 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6744 }
6745
6746 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6747 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6748 *effectMode);
6749 if (NAME_NOT_FOUND != val) {
6750 uint8_t fwk_effectMode = (uint8_t)val;
6751 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6752 }
6753 }
6754
6755 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6756 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6757 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6758 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6759 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6760 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6761 }
6762 int32_t fwk_testPatternData[4];
6763 fwk_testPatternData[0] = testPatternData->r;
6764 fwk_testPatternData[3] = testPatternData->b;
6765 switch (gCamCapability[mCameraId]->color_arrangement) {
6766 case CAM_FILTER_ARRANGEMENT_RGGB:
6767 case CAM_FILTER_ARRANGEMENT_GRBG:
6768 fwk_testPatternData[1] = testPatternData->gr;
6769 fwk_testPatternData[2] = testPatternData->gb;
6770 break;
6771 case CAM_FILTER_ARRANGEMENT_GBRG:
6772 case CAM_FILTER_ARRANGEMENT_BGGR:
6773 fwk_testPatternData[2] = testPatternData->gr;
6774 fwk_testPatternData[1] = testPatternData->gb;
6775 break;
6776 default:
6777 LOGE("color arrangement %d is not supported",
6778 gCamCapability[mCameraId]->color_arrangement);
6779 break;
6780 }
6781 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6782 }
6783
6784 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6785 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6786 }
6787
6788 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6789 String8 str((const char *)gps_methods);
6790 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6791 }
6792
6793 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6794 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6795 }
6796
6797 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6798 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6799 }
6800
6801 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6802 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6803 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6804 }
6805
6806 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6807 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6808 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6809 }
6810
6811 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6812 int32_t fwk_thumb_size[2];
6813 fwk_thumb_size[0] = thumb_size->width;
6814 fwk_thumb_size[1] = thumb_size->height;
6815 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6816 }
6817
6818 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6819 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6820 privateData,
6821 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6822 }
6823
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006824 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08006825 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006826 meteringMode, 1);
6827 }
6828
Thierry Strudel54dc9782017-02-15 12:12:10 -08006829 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
6830 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
6831 LOGD("hdr_scene_data: %d %f\n",
6832 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
6833 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
6834 float isHdrConfidence = hdr_scene_data->hdr_confidence;
6835 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
6836 &isHdr, 1);
6837 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
6838 &isHdrConfidence, 1);
6839 }
6840
6841
6842
Thierry Strudel3d639192016-09-09 11:52:26 -07006843 if (metadata->is_tuning_params_valid) {
6844 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6845 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6846 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6847
6848
6849 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6850 sizeof(uint32_t));
6851 data += sizeof(uint32_t);
6852
6853 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6854 sizeof(uint32_t));
6855 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6856 data += sizeof(uint32_t);
6857
6858 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6859 sizeof(uint32_t));
6860 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6861 data += sizeof(uint32_t);
6862
6863 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6864 sizeof(uint32_t));
6865 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6866 data += sizeof(uint32_t);
6867
6868 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6869 sizeof(uint32_t));
6870 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6871 data += sizeof(uint32_t);
6872
6873 metadata->tuning_params.tuning_mod3_data_size = 0;
6874 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6875 sizeof(uint32_t));
6876 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6877 data += sizeof(uint32_t);
6878
6879 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6880 TUNING_SENSOR_DATA_MAX);
6881 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6882 count);
6883 data += count;
6884
6885 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6886 TUNING_VFE_DATA_MAX);
6887 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6888 count);
6889 data += count;
6890
6891 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6892 TUNING_CPP_DATA_MAX);
6893 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6894 count);
6895 data += count;
6896
6897 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6898 TUNING_CAC_DATA_MAX);
6899 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6900 count);
6901 data += count;
6902
6903 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6904 (int32_t *)(void *)tuning_meta_data_blob,
6905 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6906 }
6907
6908 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6909 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6910 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6911 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6912 NEUTRAL_COL_POINTS);
6913 }
6914
6915 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6916 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6917 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6918 }
6919
6920 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6921 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6922 // Adjust crop region from sensor output coordinate system to active
6923 // array coordinate system.
6924 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6925 hAeRegions->rect.width, hAeRegions->rect.height);
6926
6927 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6928 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6929 REGIONS_TUPLE_COUNT);
6930 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6931 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6932 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6933 hAeRegions->rect.height);
6934 }
6935
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006936 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6937 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6938 if (NAME_NOT_FOUND != val) {
6939 uint8_t fwkAfMode = (uint8_t)val;
6940 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6941 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6942 } else {
6943 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6944 val);
6945 }
6946 }
6947
Thierry Strudel3d639192016-09-09 11:52:26 -07006948 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6949 uint8_t fwk_afState = (uint8_t) *afState;
6950 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006951 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006952 }
6953
6954 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6955 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6956 }
6957
6958 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6959 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6960 }
6961
6962 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6963 uint8_t fwk_lensState = *lensState;
6964 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6965 }
6966
6967 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6968 /*af regions*/
6969 int32_t afRegions[REGIONS_TUPLE_COUNT];
6970 // Adjust crop region from sensor output coordinate system to active
6971 // array coordinate system.
6972 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6973 hAfRegions->rect.width, hAfRegions->rect.height);
6974
6975 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6976 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6977 REGIONS_TUPLE_COUNT);
6978 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6979 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6980 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6981 hAfRegions->rect.height);
6982 }
6983
6984 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006985 uint32_t ab_mode = *hal_ab_mode;
6986 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6987 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6988 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6989 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006990 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006991 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006992 if (NAME_NOT_FOUND != val) {
6993 uint8_t fwk_ab_mode = (uint8_t)val;
6994 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6995 }
6996 }
6997
6998 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6999 int val = lookupFwkName(SCENE_MODES_MAP,
7000 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7001 if (NAME_NOT_FOUND != val) {
7002 uint8_t fwkBestshotMode = (uint8_t)val;
7003 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7004 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7005 } else {
7006 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7007 }
7008 }
7009
7010 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7011 uint8_t fwk_mode = (uint8_t) *mode;
7012 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7013 }
7014
7015 /* Constant metadata values to be update*/
7016 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7017 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7018
7019 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7020 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7021
7022 int32_t hotPixelMap[2];
7023 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7024
7025 // CDS
7026 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7027 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7028 }
7029
Thierry Strudel04e026f2016-10-10 11:27:36 -07007030 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7031 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007032 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007033 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7034 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7035 } else {
7036 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7037 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007038
7039 if(fwk_hdr != curr_hdr_state) {
7040 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7041 if(fwk_hdr)
7042 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7043 else
7044 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7045 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007046 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7047 }
7048
Thierry Strudel54dc9782017-02-15 12:12:10 -08007049 //binning correction
7050 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7051 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7052 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7053 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7054 }
7055
Thierry Strudel04e026f2016-10-10 11:27:36 -07007056 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007057 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007058 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7059 int8_t is_ir_on = 0;
7060
7061 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7062 if(is_ir_on != curr_ir_state) {
7063 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7064 if(is_ir_on)
7065 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7066 else
7067 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7068 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007069 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007070 }
7071
Thierry Strudel269c81a2016-10-12 12:13:59 -07007072 // AEC SPEED
7073 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7074 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7075 }
7076
7077 // AWB SPEED
7078 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7079 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7080 }
7081
Thierry Strudel3d639192016-09-09 11:52:26 -07007082 // TNR
7083 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7084 uint8_t tnr_enable = tnr->denoise_enable;
7085 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007086 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7087 int8_t is_tnr_on = 0;
7088
7089 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7090 if(is_tnr_on != curr_tnr_state) {
7091 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7092 if(is_tnr_on)
7093 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7094 else
7095 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7096 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007097
7098 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7099 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7100 }
7101
7102 // Reprocess crop data
7103 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7104 uint8_t cnt = crop_data->num_of_streams;
7105 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7106 // mm-qcamera-daemon only posts crop_data for streams
7107 // not linked to pproc. So no valid crop metadata is not
7108 // necessarily an error case.
7109 LOGD("No valid crop metadata entries");
7110 } else {
7111 uint32_t reproc_stream_id;
7112 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7113 LOGD("No reprocessible stream found, ignore crop data");
7114 } else {
7115 int rc = NO_ERROR;
7116 Vector<int32_t> roi_map;
7117 int32_t *crop = new int32_t[cnt*4];
7118 if (NULL == crop) {
7119 rc = NO_MEMORY;
7120 }
7121 if (NO_ERROR == rc) {
7122 int32_t streams_found = 0;
7123 for (size_t i = 0; i < cnt; i++) {
7124 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7125 if (pprocDone) {
7126 // HAL already does internal reprocessing,
7127 // either via reprocessing before JPEG encoding,
7128 // or offline postprocessing for pproc bypass case.
7129 crop[0] = 0;
7130 crop[1] = 0;
7131 crop[2] = mInputStreamInfo.dim.width;
7132 crop[3] = mInputStreamInfo.dim.height;
7133 } else {
7134 crop[0] = crop_data->crop_info[i].crop.left;
7135 crop[1] = crop_data->crop_info[i].crop.top;
7136 crop[2] = crop_data->crop_info[i].crop.width;
7137 crop[3] = crop_data->crop_info[i].crop.height;
7138 }
7139 roi_map.add(crop_data->crop_info[i].roi_map.left);
7140 roi_map.add(crop_data->crop_info[i].roi_map.top);
7141 roi_map.add(crop_data->crop_info[i].roi_map.width);
7142 roi_map.add(crop_data->crop_info[i].roi_map.height);
7143 streams_found++;
7144 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7145 crop[0], crop[1], crop[2], crop[3]);
7146 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7147 crop_data->crop_info[i].roi_map.left,
7148 crop_data->crop_info[i].roi_map.top,
7149 crop_data->crop_info[i].roi_map.width,
7150 crop_data->crop_info[i].roi_map.height);
7151 break;
7152
7153 }
7154 }
7155 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7156 &streams_found, 1);
7157 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7158 crop, (size_t)(streams_found * 4));
7159 if (roi_map.array()) {
7160 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7161 roi_map.array(), roi_map.size());
7162 }
7163 }
7164 if (crop) {
7165 delete [] crop;
7166 }
7167 }
7168 }
7169 }
7170
7171 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7172 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7173 // so hardcoding the CAC result to OFF mode.
7174 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7175 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7176 } else {
7177 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7178 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7179 *cacMode);
7180 if (NAME_NOT_FOUND != val) {
7181 uint8_t resultCacMode = (uint8_t)val;
7182 // check whether CAC result from CB is equal to Framework set CAC mode
7183 // If not equal then set the CAC mode came in corresponding request
7184 if (fwk_cacMode != resultCacMode) {
7185 resultCacMode = fwk_cacMode;
7186 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007187 //Check if CAC is disabled by property
7188 if (m_cacModeDisabled) {
7189 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7190 }
7191
Thierry Strudel3d639192016-09-09 11:52:26 -07007192 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7193 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7194 } else {
7195 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7196 }
7197 }
7198 }
7199
7200 // Post blob of cam_cds_data through vendor tag.
7201 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7202 uint8_t cnt = cdsInfo->num_of_streams;
7203 cam_cds_data_t cdsDataOverride;
7204 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7205 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7206 cdsDataOverride.num_of_streams = 1;
7207 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7208 uint32_t reproc_stream_id;
7209 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7210 LOGD("No reprocessible stream found, ignore cds data");
7211 } else {
7212 for (size_t i = 0; i < cnt; i++) {
7213 if (cdsInfo->cds_info[i].stream_id ==
7214 reproc_stream_id) {
7215 cdsDataOverride.cds_info[0].cds_enable =
7216 cdsInfo->cds_info[i].cds_enable;
7217 break;
7218 }
7219 }
7220 }
7221 } else {
7222 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7223 }
7224 camMetadata.update(QCAMERA3_CDS_INFO,
7225 (uint8_t *)&cdsDataOverride,
7226 sizeof(cam_cds_data_t));
7227 }
7228
7229 // Ldaf calibration data
7230 if (!mLdafCalibExist) {
7231 IF_META_AVAILABLE(uint32_t, ldafCalib,
7232 CAM_INTF_META_LDAF_EXIF, metadata) {
7233 mLdafCalibExist = true;
7234 mLdafCalib[0] = ldafCalib[0];
7235 mLdafCalib[1] = ldafCalib[1];
7236 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7237 ldafCalib[0], ldafCalib[1]);
7238 }
7239 }
7240
Thierry Strudel54dc9782017-02-15 12:12:10 -08007241 // EXIF debug data through vendor tag
7242 /*
7243 * Mobicat Mask can assume 3 values:
7244 * 1 refers to Mobicat data,
7245 * 2 refers to Stats Debug and Exif Debug Data
7246 * 3 refers to Mobicat and Stats Debug Data
7247 * We want to make sure that we are sending Exif debug data
7248 * only when Mobicat Mask is 2.
7249 */
7250 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7251 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7252 (uint8_t *)(void *)mExifParams.debug_params,
7253 sizeof(mm_jpeg_debug_exif_params_t));
7254 }
7255
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007256 // Reprocess and DDM debug data through vendor tag
7257 cam_reprocess_info_t repro_info;
7258 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007259 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7260 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007261 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007262 }
7263 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7264 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007265 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007266 }
7267 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7268 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007269 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007270 }
7271 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7272 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007273 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 }
7275 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7276 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007277 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007278 }
7279 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007280 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007281 }
7282 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7283 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007284 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007285 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007286 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7287 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7288 }
7289 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7290 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7291 }
7292 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7293 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007294
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007295 // INSTANT AEC MODE
7296 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7297 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7298 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7299 }
7300
Shuzhen Wange763e802016-03-31 10:24:29 -07007301 // AF scene change
7302 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7303 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7304 }
7305
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007306 /* In batch mode, cache the first metadata in the batch */
7307 if (mBatchSize && firstMetadataInBatch) {
7308 mCachedMetadata.clear();
7309 mCachedMetadata = camMetadata;
7310 }
7311
Thierry Strudel3d639192016-09-09 11:52:26 -07007312 resultMetadata = camMetadata.release();
7313 return resultMetadata;
7314}
7315
7316/*===========================================================================
7317 * FUNCTION : saveExifParams
7318 *
7319 * DESCRIPTION:
7320 *
7321 * PARAMETERS :
7322 * @metadata : metadata information from callback
7323 *
7324 * RETURN : none
7325 *
7326 *==========================================================================*/
7327void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7328{
7329 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7330 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7331 if (mExifParams.debug_params) {
7332 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7333 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7334 }
7335 }
7336 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7337 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7338 if (mExifParams.debug_params) {
7339 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7340 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7341 }
7342 }
7343 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7344 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7345 if (mExifParams.debug_params) {
7346 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7347 mExifParams.debug_params->af_debug_params_valid = TRUE;
7348 }
7349 }
7350 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7351 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7352 if (mExifParams.debug_params) {
7353 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7354 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7355 }
7356 }
7357 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7358 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7359 if (mExifParams.debug_params) {
7360 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7361 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7362 }
7363 }
7364 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7365 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7366 if (mExifParams.debug_params) {
7367 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7368 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7369 }
7370 }
7371 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7372 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7373 if (mExifParams.debug_params) {
7374 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7375 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7376 }
7377 }
7378 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7379 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7380 if (mExifParams.debug_params) {
7381 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7382 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7383 }
7384 }
7385}
7386
7387/*===========================================================================
7388 * FUNCTION : get3AExifParams
7389 *
7390 * DESCRIPTION:
7391 *
7392 * PARAMETERS : none
7393 *
7394 *
7395 * RETURN : mm_jpeg_exif_params_t
7396 *
7397 *==========================================================================*/
7398mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7399{
7400 return mExifParams;
7401}
7402
7403/*===========================================================================
7404 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7405 *
7406 * DESCRIPTION:
7407 *
7408 * PARAMETERS :
7409 * @metadata : metadata information from callback
7410 *
7411 * RETURN : camera_metadata_t*
7412 * metadata in a format specified by fwk
7413 *==========================================================================*/
7414camera_metadata_t*
7415QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7416 (metadata_buffer_t *metadata)
7417{
7418 CameraMetadata camMetadata;
7419 camera_metadata_t *resultMetadata;
7420
7421
7422 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7423 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7424 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7425 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7426 }
7427
7428 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7429 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7430 &aecTrigger->trigger, 1);
7431 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7432 &aecTrigger->trigger_id, 1);
7433 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7434 aecTrigger->trigger);
7435 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7436 aecTrigger->trigger_id);
7437 }
7438
7439 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7440 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7441 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7442 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7443 }
7444
Thierry Strudel3d639192016-09-09 11:52:26 -07007445 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7446 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7447 &af_trigger->trigger, 1);
7448 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7449 af_trigger->trigger);
7450 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7451 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7452 af_trigger->trigger_id);
7453 }
7454
7455 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7456 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7457 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7458 if (NAME_NOT_FOUND != val) {
7459 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7460 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7461 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7462 } else {
7463 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7464 }
7465 }
7466
7467 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7468 uint32_t aeMode = CAM_AE_MODE_MAX;
7469 int32_t flashMode = CAM_FLASH_MODE_MAX;
7470 int32_t redeye = -1;
7471 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7472 aeMode = *pAeMode;
7473 }
7474 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7475 flashMode = *pFlashMode;
7476 }
7477 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7478 redeye = *pRedeye;
7479 }
7480
7481 if (1 == redeye) {
7482 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7483 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7484 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7485 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7486 flashMode);
7487 if (NAME_NOT_FOUND != val) {
7488 fwk_aeMode = (uint8_t)val;
7489 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7490 } else {
7491 LOGE("Unsupported flash mode %d", flashMode);
7492 }
7493 } else if (aeMode == CAM_AE_MODE_ON) {
7494 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7495 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7496 } else if (aeMode == CAM_AE_MODE_OFF) {
7497 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7498 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7499 } else {
7500 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7501 "flashMode:%d, aeMode:%u!!!",
7502 redeye, flashMode, aeMode);
7503 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007504 if (mInstantAEC) {
7505 // Increment frame Idx count untill a bound reached for instant AEC.
7506 mInstantAecFrameIdxCount++;
7507 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7508 CAM_INTF_META_AEC_INFO, metadata) {
7509 LOGH("ae_params->settled = %d",ae_params->settled);
7510 // If AEC settled, or if number of frames reached bound value,
7511 // should reset instant AEC.
7512 if (ae_params->settled ||
7513 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7514 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7515 mInstantAEC = false;
7516 mResetInstantAEC = true;
7517 mInstantAecFrameIdxCount = 0;
7518 }
7519 }
7520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007521 resultMetadata = camMetadata.release();
7522 return resultMetadata;
7523}
7524
7525/*===========================================================================
7526 * FUNCTION : dumpMetadataToFile
7527 *
7528 * DESCRIPTION: Dumps tuning metadata to file system
7529 *
7530 * PARAMETERS :
7531 * @meta : tuning metadata
7532 * @dumpFrameCount : current dump frame count
7533 * @enabled : Enable mask
7534 *
7535 *==========================================================================*/
7536void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7537 uint32_t &dumpFrameCount,
7538 bool enabled,
7539 const char *type,
7540 uint32_t frameNumber)
7541{
7542 //Some sanity checks
7543 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7544 LOGE("Tuning sensor data size bigger than expected %d: %d",
7545 meta.tuning_sensor_data_size,
7546 TUNING_SENSOR_DATA_MAX);
7547 return;
7548 }
7549
7550 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7551 LOGE("Tuning VFE data size bigger than expected %d: %d",
7552 meta.tuning_vfe_data_size,
7553 TUNING_VFE_DATA_MAX);
7554 return;
7555 }
7556
7557 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7558 LOGE("Tuning CPP data size bigger than expected %d: %d",
7559 meta.tuning_cpp_data_size,
7560 TUNING_CPP_DATA_MAX);
7561 return;
7562 }
7563
7564 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7565 LOGE("Tuning CAC data size bigger than expected %d: %d",
7566 meta.tuning_cac_data_size,
7567 TUNING_CAC_DATA_MAX);
7568 return;
7569 }
7570 //
7571
7572 if(enabled){
7573 char timeBuf[FILENAME_MAX];
7574 char buf[FILENAME_MAX];
7575 memset(buf, 0, sizeof(buf));
7576 memset(timeBuf, 0, sizeof(timeBuf));
7577 time_t current_time;
7578 struct tm * timeinfo;
7579 time (&current_time);
7580 timeinfo = localtime (&current_time);
7581 if (timeinfo != NULL) {
7582 strftime (timeBuf, sizeof(timeBuf),
7583 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7584 }
7585 String8 filePath(timeBuf);
7586 snprintf(buf,
7587 sizeof(buf),
7588 "%dm_%s_%d.bin",
7589 dumpFrameCount,
7590 type,
7591 frameNumber);
7592 filePath.append(buf);
7593 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7594 if (file_fd >= 0) {
7595 ssize_t written_len = 0;
7596 meta.tuning_data_version = TUNING_DATA_VERSION;
7597 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7598 written_len += write(file_fd, data, sizeof(uint32_t));
7599 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7600 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7601 written_len += write(file_fd, data, sizeof(uint32_t));
7602 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7603 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7604 written_len += write(file_fd, data, sizeof(uint32_t));
7605 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7606 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7607 written_len += write(file_fd, data, sizeof(uint32_t));
7608 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7609 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7610 written_len += write(file_fd, data, sizeof(uint32_t));
7611 meta.tuning_mod3_data_size = 0;
7612 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7613 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7614 written_len += write(file_fd, data, sizeof(uint32_t));
7615 size_t total_size = meta.tuning_sensor_data_size;
7616 data = (void *)((uint8_t *)&meta.data);
7617 written_len += write(file_fd, data, total_size);
7618 total_size = meta.tuning_vfe_data_size;
7619 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7620 written_len += write(file_fd, data, total_size);
7621 total_size = meta.tuning_cpp_data_size;
7622 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7623 written_len += write(file_fd, data, total_size);
7624 total_size = meta.tuning_cac_data_size;
7625 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7626 written_len += write(file_fd, data, total_size);
7627 close(file_fd);
7628 }else {
7629 LOGE("fail to open file for metadata dumping");
7630 }
7631 }
7632}
7633
7634/*===========================================================================
7635 * FUNCTION : cleanAndSortStreamInfo
7636 *
7637 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7638 * and sort them such that raw stream is at the end of the list
7639 * This is a workaround for camera daemon constraint.
7640 *
7641 * PARAMETERS : None
7642 *
7643 *==========================================================================*/
7644void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7645{
7646 List<stream_info_t *> newStreamInfo;
7647
7648 /*clean up invalid streams*/
7649 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7650 it != mStreamInfo.end();) {
7651 if(((*it)->status) == INVALID){
7652 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7653 delete channel;
7654 free(*it);
7655 it = mStreamInfo.erase(it);
7656 } else {
7657 it++;
7658 }
7659 }
7660
7661 // Move preview/video/callback/snapshot streams into newList
7662 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7663 it != mStreamInfo.end();) {
7664 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7665 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7666 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7667 newStreamInfo.push_back(*it);
7668 it = mStreamInfo.erase(it);
7669 } else
7670 it++;
7671 }
7672 // Move raw streams into newList
7673 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7674 it != mStreamInfo.end();) {
7675 newStreamInfo.push_back(*it);
7676 it = mStreamInfo.erase(it);
7677 }
7678
7679 mStreamInfo = newStreamInfo;
7680}
7681
7682/*===========================================================================
7683 * FUNCTION : extractJpegMetadata
7684 *
7685 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7686 * JPEG metadata is cached in HAL, and return as part of capture
7687 * result when metadata is returned from camera daemon.
7688 *
7689 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7690 * @request: capture request
7691 *
7692 *==========================================================================*/
7693void QCamera3HardwareInterface::extractJpegMetadata(
7694 CameraMetadata& jpegMetadata,
7695 const camera3_capture_request_t *request)
7696{
7697 CameraMetadata frame_settings;
7698 frame_settings = request->settings;
7699
7700 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7701 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7702 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7703 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7704
7705 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7706 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7707 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7708 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7709
7710 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7711 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7712 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7713 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7714
7715 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7716 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7717 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7718 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7719
7720 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7721 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7722 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7723 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7724
7725 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7726 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7727 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7728 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7729
7730 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7731 int32_t thumbnail_size[2];
7732 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7733 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7734 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7735 int32_t orientation =
7736 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007737 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007738 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7739 int32_t temp;
7740 temp = thumbnail_size[0];
7741 thumbnail_size[0] = thumbnail_size[1];
7742 thumbnail_size[1] = temp;
7743 }
7744 }
7745 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7746 thumbnail_size,
7747 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7748 }
7749
7750}
7751
7752/*===========================================================================
7753 * FUNCTION : convertToRegions
7754 *
7755 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7756 *
7757 * PARAMETERS :
7758 * @rect : cam_rect_t struct to convert
7759 * @region : int32_t destination array
7760 * @weight : if we are converting from cam_area_t, weight is valid
7761 * else weight = -1
7762 *
7763 *==========================================================================*/
7764void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7765 int32_t *region, int weight)
7766{
7767 region[0] = rect.left;
7768 region[1] = rect.top;
7769 region[2] = rect.left + rect.width;
7770 region[3] = rect.top + rect.height;
7771 if (weight > -1) {
7772 region[4] = weight;
7773 }
7774}
7775
7776/*===========================================================================
7777 * FUNCTION : convertFromRegions
7778 *
7779 * DESCRIPTION: helper method to convert from array to cam_rect_t
7780 *
7781 * PARAMETERS :
7782 * @rect : cam_rect_t struct to convert
7783 * @region : int32_t destination array
7784 * @weight : if we are converting from cam_area_t, weight is valid
7785 * else weight = -1
7786 *
7787 *==========================================================================*/
7788void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08007789 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07007790{
Thierry Strudel3d639192016-09-09 11:52:26 -07007791 int32_t x_min = frame_settings.find(tag).data.i32[0];
7792 int32_t y_min = frame_settings.find(tag).data.i32[1];
7793 int32_t x_max = frame_settings.find(tag).data.i32[2];
7794 int32_t y_max = frame_settings.find(tag).data.i32[3];
7795 roi.weight = frame_settings.find(tag).data.i32[4];
7796 roi.rect.left = x_min;
7797 roi.rect.top = y_min;
7798 roi.rect.width = x_max - x_min;
7799 roi.rect.height = y_max - y_min;
7800}
7801
7802/*===========================================================================
7803 * FUNCTION : resetIfNeededROI
7804 *
7805 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7806 * crop region
7807 *
7808 * PARAMETERS :
7809 * @roi : cam_area_t struct to resize
7810 * @scalerCropRegion : cam_crop_region_t region to compare against
7811 *
7812 *
7813 *==========================================================================*/
7814bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7815 const cam_crop_region_t* scalerCropRegion)
7816{
7817 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7818 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7819 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7820 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7821
7822 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7823 * without having this check the calculations below to validate if the roi
7824 * is inside scalar crop region will fail resulting in the roi not being
7825 * reset causing algorithm to continue to use stale roi window
7826 */
7827 if (roi->weight == 0) {
7828 return true;
7829 }
7830
7831 if ((roi_x_max < scalerCropRegion->left) ||
7832 // right edge of roi window is left of scalar crop's left edge
7833 (roi_y_max < scalerCropRegion->top) ||
7834 // bottom edge of roi window is above scalar crop's top edge
7835 (roi->rect.left > crop_x_max) ||
7836 // left edge of roi window is beyond(right) of scalar crop's right edge
7837 (roi->rect.top > crop_y_max)){
7838 // top edge of roi windo is above scalar crop's top edge
7839 return false;
7840 }
7841 if (roi->rect.left < scalerCropRegion->left) {
7842 roi->rect.left = scalerCropRegion->left;
7843 }
7844 if (roi->rect.top < scalerCropRegion->top) {
7845 roi->rect.top = scalerCropRegion->top;
7846 }
7847 if (roi_x_max > crop_x_max) {
7848 roi_x_max = crop_x_max;
7849 }
7850 if (roi_y_max > crop_y_max) {
7851 roi_y_max = crop_y_max;
7852 }
7853 roi->rect.width = roi_x_max - roi->rect.left;
7854 roi->rect.height = roi_y_max - roi->rect.top;
7855 return true;
7856}
7857
7858/*===========================================================================
7859 * FUNCTION : convertLandmarks
7860 *
7861 * DESCRIPTION: helper method to extract the landmarks from face detection info
7862 *
7863 * PARAMETERS :
7864 * @landmark_data : input landmark data to be converted
7865 * @landmarks : int32_t destination array
7866 *
7867 *
7868 *==========================================================================*/
7869void QCamera3HardwareInterface::convertLandmarks(
7870 cam_face_landmarks_info_t landmark_data,
7871 int32_t *landmarks)
7872{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007873 if (landmark_data.is_left_eye_valid) {
7874 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7875 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7876 } else {
7877 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7878 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7879 }
7880
7881 if (landmark_data.is_right_eye_valid) {
7882 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7883 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7884 } else {
7885 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7886 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7887 }
7888
7889 if (landmark_data.is_mouth_valid) {
7890 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7891 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7892 } else {
7893 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7894 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7895 }
7896}
7897
7898/*===========================================================================
7899 * FUNCTION : setInvalidLandmarks
7900 *
7901 * DESCRIPTION: helper method to set invalid landmarks
7902 *
7903 * PARAMETERS :
7904 * @landmarks : int32_t destination array
7905 *
7906 *
7907 *==========================================================================*/
7908void QCamera3HardwareInterface::setInvalidLandmarks(
7909 int32_t *landmarks)
7910{
7911 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7912 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7913 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7914 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7915 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7916 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007917}
7918
7919#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007920
7921/*===========================================================================
7922 * FUNCTION : getCapabilities
7923 *
7924 * DESCRIPTION: query camera capability from back-end
7925 *
7926 * PARAMETERS :
7927 * @ops : mm-interface ops structure
7928 * @cam_handle : camera handle for which we need capability
7929 *
7930 * RETURN : ptr type of capability structure
7931 * capability for success
7932 * NULL for failure
7933 *==========================================================================*/
7934cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7935 uint32_t cam_handle)
7936{
7937 int rc = NO_ERROR;
7938 QCamera3HeapMemory *capabilityHeap = NULL;
7939 cam_capability_t *cap_ptr = NULL;
7940
7941 if (ops == NULL) {
7942 LOGE("Invalid arguments");
7943 return NULL;
7944 }
7945
7946 capabilityHeap = new QCamera3HeapMemory(1);
7947 if (capabilityHeap == NULL) {
7948 LOGE("creation of capabilityHeap failed");
7949 return NULL;
7950 }
7951
7952 /* Allocate memory for capability buffer */
7953 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7954 if(rc != OK) {
7955 LOGE("No memory for cappability");
7956 goto allocate_failed;
7957 }
7958
7959 /* Map memory for capability buffer */
7960 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7961
7962 rc = ops->map_buf(cam_handle,
7963 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7964 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7965 if(rc < 0) {
7966 LOGE("failed to map capability buffer");
7967 rc = FAILED_TRANSACTION;
7968 goto map_failed;
7969 }
7970
7971 /* Query Capability */
7972 rc = ops->query_capability(cam_handle);
7973 if(rc < 0) {
7974 LOGE("failed to query capability");
7975 rc = FAILED_TRANSACTION;
7976 goto query_failed;
7977 }
7978
7979 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7980 if (cap_ptr == NULL) {
7981 LOGE("out of memory");
7982 rc = NO_MEMORY;
7983 goto query_failed;
7984 }
7985
7986 memset(cap_ptr, 0, sizeof(cam_capability_t));
7987 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7988
7989 int index;
7990 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7991 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7992 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7993 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7994 }
7995
7996query_failed:
7997 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7998map_failed:
7999 capabilityHeap->deallocate();
8000allocate_failed:
8001 delete capabilityHeap;
8002
8003 if (rc != NO_ERROR) {
8004 return NULL;
8005 } else {
8006 return cap_ptr;
8007 }
8008}
8009
Thierry Strudel3d639192016-09-09 11:52:26 -07008010/*===========================================================================
8011 * FUNCTION : initCapabilities
8012 *
8013 * DESCRIPTION: initialize camera capabilities in static data struct
8014 *
8015 * PARAMETERS :
8016 * @cameraId : camera Id
8017 *
8018 * RETURN : int32_t type of status
8019 * NO_ERROR -- success
8020 * none-zero failure code
8021 *==========================================================================*/
8022int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8023{
8024 int rc = 0;
8025 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008026 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008027
8028 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8029 if (rc) {
8030 LOGE("camera_open failed. rc = %d", rc);
8031 goto open_failed;
8032 }
8033 if (!cameraHandle) {
8034 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8035 goto open_failed;
8036 }
8037
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008038 handle = get_main_camera_handle(cameraHandle->camera_handle);
8039 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8040 if (gCamCapability[cameraId] == NULL) {
8041 rc = FAILED_TRANSACTION;
8042 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008043 }
8044
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008045 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008046 if (is_dual_camera_by_idx(cameraId)) {
8047 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8048 gCamCapability[cameraId]->aux_cam_cap =
8049 getCapabilities(cameraHandle->ops, handle);
8050 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8051 rc = FAILED_TRANSACTION;
8052 free(gCamCapability[cameraId]);
8053 goto failed_op;
8054 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008055
8056 // Copy the main camera capability to main_cam_cap struct
8057 gCamCapability[cameraId]->main_cam_cap =
8058 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8059 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8060 LOGE("out of memory");
8061 rc = NO_MEMORY;
8062 goto failed_op;
8063 }
8064 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8065 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008066 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008067failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008068 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8069 cameraHandle = NULL;
8070open_failed:
8071 return rc;
8072}
8073
8074/*==========================================================================
8075 * FUNCTION : get3Aversion
8076 *
8077 * DESCRIPTION: get the Q3A S/W version
8078 *
8079 * PARAMETERS :
8080 * @sw_version: Reference of Q3A structure which will hold version info upon
8081 * return
8082 *
8083 * RETURN : None
8084 *
8085 *==========================================================================*/
8086void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8087{
8088 if(gCamCapability[mCameraId])
8089 sw_version = gCamCapability[mCameraId]->q3a_version;
8090 else
8091 LOGE("Capability structure NULL!");
8092}
8093
8094
8095/*===========================================================================
8096 * FUNCTION : initParameters
8097 *
8098 * DESCRIPTION: initialize camera parameters
8099 *
8100 * PARAMETERS :
8101 *
8102 * RETURN : int32_t type of status
8103 * NO_ERROR -- success
8104 * none-zero failure code
8105 *==========================================================================*/
8106int QCamera3HardwareInterface::initParameters()
8107{
8108 int rc = 0;
8109
8110 //Allocate Set Param Buffer
8111 mParamHeap = new QCamera3HeapMemory(1);
8112 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8113 if(rc != OK) {
8114 rc = NO_MEMORY;
8115 LOGE("Failed to allocate SETPARM Heap memory");
8116 delete mParamHeap;
8117 mParamHeap = NULL;
8118 return rc;
8119 }
8120
8121 //Map memory for parameters buffer
8122 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8123 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8124 mParamHeap->getFd(0),
8125 sizeof(metadata_buffer_t),
8126 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8127 if(rc < 0) {
8128 LOGE("failed to map SETPARM buffer");
8129 rc = FAILED_TRANSACTION;
8130 mParamHeap->deallocate();
8131 delete mParamHeap;
8132 mParamHeap = NULL;
8133 return rc;
8134 }
8135
8136 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8137
8138 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8139 return rc;
8140}
8141
8142/*===========================================================================
8143 * FUNCTION : deinitParameters
8144 *
8145 * DESCRIPTION: de-initialize camera parameters
8146 *
8147 * PARAMETERS :
8148 *
8149 * RETURN : NONE
8150 *==========================================================================*/
8151void QCamera3HardwareInterface::deinitParameters()
8152{
8153 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8154 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8155
8156 mParamHeap->deallocate();
8157 delete mParamHeap;
8158 mParamHeap = NULL;
8159
8160 mParameters = NULL;
8161
8162 free(mPrevParameters);
8163 mPrevParameters = NULL;
8164}
8165
8166/*===========================================================================
8167 * FUNCTION : calcMaxJpegSize
8168 *
8169 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8170 *
8171 * PARAMETERS :
8172 *
8173 * RETURN : max_jpeg_size
8174 *==========================================================================*/
8175size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8176{
8177 size_t max_jpeg_size = 0;
8178 size_t temp_width, temp_height;
8179 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8180 MAX_SIZES_CNT);
8181 for (size_t i = 0; i < count; i++) {
8182 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8183 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8184 if (temp_width * temp_height > max_jpeg_size ) {
8185 max_jpeg_size = temp_width * temp_height;
8186 }
8187 }
8188 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8189 return max_jpeg_size;
8190}
8191
8192/*===========================================================================
8193 * FUNCTION : getMaxRawSize
8194 *
8195 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8196 *
8197 * PARAMETERS :
8198 *
8199 * RETURN : Largest supported Raw Dimension
8200 *==========================================================================*/
8201cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8202{
8203 int max_width = 0;
8204 cam_dimension_t maxRawSize;
8205
8206 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8207 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8208 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8209 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8210 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8211 }
8212 }
8213 return maxRawSize;
8214}
8215
8216
8217/*===========================================================================
8218 * FUNCTION : calcMaxJpegDim
8219 *
8220 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8221 *
8222 * PARAMETERS :
8223 *
8224 * RETURN : max_jpeg_dim
8225 *==========================================================================*/
8226cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8227{
8228 cam_dimension_t max_jpeg_dim;
8229 cam_dimension_t curr_jpeg_dim;
8230 max_jpeg_dim.width = 0;
8231 max_jpeg_dim.height = 0;
8232 curr_jpeg_dim.width = 0;
8233 curr_jpeg_dim.height = 0;
8234 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8235 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8236 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8237 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8238 max_jpeg_dim.width * max_jpeg_dim.height ) {
8239 max_jpeg_dim.width = curr_jpeg_dim.width;
8240 max_jpeg_dim.height = curr_jpeg_dim.height;
8241 }
8242 }
8243 return max_jpeg_dim;
8244}
8245
8246/*===========================================================================
8247 * FUNCTION : addStreamConfig
8248 *
8249 * DESCRIPTION: adds the stream configuration to the array
8250 *
8251 * PARAMETERS :
8252 * @available_stream_configs : pointer to stream configuration array
8253 * @scalar_format : scalar format
8254 * @dim : configuration dimension
8255 * @config_type : input or output configuration type
8256 *
8257 * RETURN : NONE
8258 *==========================================================================*/
8259void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8260 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8261{
8262 available_stream_configs.add(scalar_format);
8263 available_stream_configs.add(dim.width);
8264 available_stream_configs.add(dim.height);
8265 available_stream_configs.add(config_type);
8266}
8267
8268/*===========================================================================
8269 * FUNCTION : suppportBurstCapture
8270 *
8271 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8272 *
8273 * PARAMETERS :
8274 * @cameraId : camera Id
8275 *
8276 * RETURN : true if camera supports BURST_CAPTURE
8277 * false otherwise
8278 *==========================================================================*/
8279bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8280{
8281 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8282 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8283 const int32_t highResWidth = 3264;
8284 const int32_t highResHeight = 2448;
8285
8286 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8287 // Maximum resolution images cannot be captured at >= 10fps
8288 // -> not supporting BURST_CAPTURE
8289 return false;
8290 }
8291
8292 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8293 // Maximum resolution images can be captured at >= 20fps
8294 // --> supporting BURST_CAPTURE
8295 return true;
8296 }
8297
8298 // Find the smallest highRes resolution, or largest resolution if there is none
8299 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8300 MAX_SIZES_CNT);
8301 size_t highRes = 0;
8302 while ((highRes + 1 < totalCnt) &&
8303 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8304 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8305 highResWidth * highResHeight)) {
8306 highRes++;
8307 }
8308 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8309 return true;
8310 } else {
8311 return false;
8312 }
8313}
8314
8315/*===========================================================================
8316 * FUNCTION : initStaticMetadata
8317 *
8318 * DESCRIPTION: initialize the static metadata
8319 *
8320 * PARAMETERS :
8321 * @cameraId : camera Id
8322 *
8323 * RETURN : int32_t type of status
8324 * 0 -- success
8325 * non-zero failure code
8326 *==========================================================================*/
8327int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8328{
8329 int rc = 0;
8330 CameraMetadata staticInfo;
8331 size_t count = 0;
8332 bool limitedDevice = false;
8333 char prop[PROPERTY_VALUE_MAX];
8334 bool supportBurst = false;
8335
8336 supportBurst = supportBurstCapture(cameraId);
8337
8338 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8339 * guaranteed or if min fps of max resolution is less than 20 fps, its
8340 * advertised as limited device*/
8341 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8342 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8343 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8344 !supportBurst;
8345
8346 uint8_t supportedHwLvl = limitedDevice ?
8347 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008348#ifndef USE_HAL_3_3
8349 // LEVEL_3 - This device will support level 3.
8350 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8351#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008352 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008353#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008354
8355 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8356 &supportedHwLvl, 1);
8357
8358 bool facingBack = false;
8359 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8360 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8361 facingBack = true;
8362 }
8363 /*HAL 3 only*/
8364 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8365 &gCamCapability[cameraId]->min_focus_distance, 1);
8366
8367 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8368 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8369
8370 /*should be using focal lengths but sensor doesn't provide that info now*/
8371 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8372 &gCamCapability[cameraId]->focal_length,
8373 1);
8374
8375 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8376 gCamCapability[cameraId]->apertures,
8377 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8378
8379 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8380 gCamCapability[cameraId]->filter_densities,
8381 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8382
8383
8384 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8385 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8386 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8387
8388 int32_t lens_shading_map_size[] = {
8389 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8390 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8391 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8392 lens_shading_map_size,
8393 sizeof(lens_shading_map_size)/sizeof(int32_t));
8394
8395 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8396 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8397
8398 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8399 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8400
8401 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8402 &gCamCapability[cameraId]->max_frame_duration, 1);
8403
8404 camera_metadata_rational baseGainFactor = {
8405 gCamCapability[cameraId]->base_gain_factor.numerator,
8406 gCamCapability[cameraId]->base_gain_factor.denominator};
8407 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8408 &baseGainFactor, 1);
8409
8410 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8411 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8412
8413 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8414 gCamCapability[cameraId]->pixel_array_size.height};
8415 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8416 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8417
8418 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8419 gCamCapability[cameraId]->active_array_size.top,
8420 gCamCapability[cameraId]->active_array_size.width,
8421 gCamCapability[cameraId]->active_array_size.height};
8422 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8423 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8424
8425 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8426 &gCamCapability[cameraId]->white_level, 1);
8427
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008428 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8429 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8430 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008431 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008432 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008433
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008434#ifndef USE_HAL_3_3
8435 bool hasBlackRegions = false;
8436 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8437 LOGW("black_region_count: %d is bounded to %d",
8438 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8439 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8440 }
8441 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8442 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8443 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8444 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8445 }
8446 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8447 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8448 hasBlackRegions = true;
8449 }
8450#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008451 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8452 &gCamCapability[cameraId]->flash_charge_duration, 1);
8453
8454 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8455 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8456
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008457 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8458 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8459 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008460 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8461 &timestampSource, 1);
8462
Thierry Strudel54dc9782017-02-15 12:12:10 -08008463 //update histogram vendor data
8464 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008465 &gCamCapability[cameraId]->histogram_size, 1);
8466
Thierry Strudel54dc9782017-02-15 12:12:10 -08008467 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008468 &gCamCapability[cameraId]->max_histogram_count, 1);
8469
8470 int32_t sharpness_map_size[] = {
8471 gCamCapability[cameraId]->sharpness_map_size.width,
8472 gCamCapability[cameraId]->sharpness_map_size.height};
8473
8474 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8475 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8476
8477 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8478 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8479
8480 int32_t scalar_formats[] = {
8481 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8482 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8483 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8484 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8485 HAL_PIXEL_FORMAT_RAW10,
8486 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8487 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8488 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8489 scalar_formats,
8490 scalar_formats_count);
8491
8492 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8493 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8494 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8495 count, MAX_SIZES_CNT, available_processed_sizes);
8496 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8497 available_processed_sizes, count * 2);
8498
8499 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8500 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8501 makeTable(gCamCapability[cameraId]->raw_dim,
8502 count, MAX_SIZES_CNT, available_raw_sizes);
8503 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8504 available_raw_sizes, count * 2);
8505
8506 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8507 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8508 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8509 count, MAX_SIZES_CNT, available_fps_ranges);
8510 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8511 available_fps_ranges, count * 2);
8512
8513 camera_metadata_rational exposureCompensationStep = {
8514 gCamCapability[cameraId]->exp_compensation_step.numerator,
8515 gCamCapability[cameraId]->exp_compensation_step.denominator};
8516 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8517 &exposureCompensationStep, 1);
8518
8519 Vector<uint8_t> availableVstabModes;
8520 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8521 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008522 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008523 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008524 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008526 count = IS_TYPE_MAX;
8527 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8528 for (size_t i = 0; i < count; i++) {
8529 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8530 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8531 eisSupported = true;
8532 break;
8533 }
8534 }
8535 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008536 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8537 }
8538 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8539 availableVstabModes.array(), availableVstabModes.size());
8540
8541 /*HAL 1 and HAL 3 common*/
8542 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8543 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8544 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8545 float maxZoom = maxZoomStep/minZoomStep;
8546 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8547 &maxZoom, 1);
8548
8549 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8550 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8551
8552 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8553 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8554 max3aRegions[2] = 0; /* AF not supported */
8555 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8556 max3aRegions, 3);
8557
8558 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8559 memset(prop, 0, sizeof(prop));
8560 property_get("persist.camera.facedetect", prop, "1");
8561 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8562 LOGD("Support face detection mode: %d",
8563 supportedFaceDetectMode);
8564
8565 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008566 /* support mode should be OFF if max number of face is 0 */
8567 if (maxFaces <= 0) {
8568 supportedFaceDetectMode = 0;
8569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008570 Vector<uint8_t> availableFaceDetectModes;
8571 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8572 if (supportedFaceDetectMode == 1) {
8573 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8574 } else if (supportedFaceDetectMode == 2) {
8575 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8576 } else if (supportedFaceDetectMode == 3) {
8577 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8578 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8579 } else {
8580 maxFaces = 0;
8581 }
8582 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8583 availableFaceDetectModes.array(),
8584 availableFaceDetectModes.size());
8585 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8586 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008587 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8588 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8589 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008590
8591 int32_t exposureCompensationRange[] = {
8592 gCamCapability[cameraId]->exposure_compensation_min,
8593 gCamCapability[cameraId]->exposure_compensation_max};
8594 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8595 exposureCompensationRange,
8596 sizeof(exposureCompensationRange)/sizeof(int32_t));
8597
8598 uint8_t lensFacing = (facingBack) ?
8599 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8600 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8601
8602 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8603 available_thumbnail_sizes,
8604 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8605
8606 /*all sizes will be clubbed into this tag*/
8607 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8608 /*android.scaler.availableStreamConfigurations*/
8609 Vector<int32_t> available_stream_configs;
8610 cam_dimension_t active_array_dim;
8611 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8612 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8613 /* Add input/output stream configurations for each scalar formats*/
8614 for (size_t j = 0; j < scalar_formats_count; j++) {
8615 switch (scalar_formats[j]) {
8616 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8617 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8618 case HAL_PIXEL_FORMAT_RAW10:
8619 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8620 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8621 addStreamConfig(available_stream_configs, scalar_formats[j],
8622 gCamCapability[cameraId]->raw_dim[i],
8623 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8624 }
8625 break;
8626 case HAL_PIXEL_FORMAT_BLOB:
8627 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8628 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8629 addStreamConfig(available_stream_configs, scalar_formats[j],
8630 gCamCapability[cameraId]->picture_sizes_tbl[i],
8631 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8632 }
8633 break;
8634 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8635 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8636 default:
8637 cam_dimension_t largest_picture_size;
8638 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8639 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8640 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8641 addStreamConfig(available_stream_configs, scalar_formats[j],
8642 gCamCapability[cameraId]->picture_sizes_tbl[i],
8643 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8644 /* Book keep largest */
8645 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8646 >= largest_picture_size.width &&
8647 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8648 >= largest_picture_size.height)
8649 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8650 }
8651 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8652 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8653 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8654 addStreamConfig(available_stream_configs, scalar_formats[j],
8655 largest_picture_size,
8656 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8657 }
8658 break;
8659 }
8660 }
8661
8662 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8663 available_stream_configs.array(), available_stream_configs.size());
8664 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8665 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8666
8667 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8668 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8669
8670 /* android.scaler.availableMinFrameDurations */
8671 Vector<int64_t> available_min_durations;
8672 for (size_t j = 0; j < scalar_formats_count; j++) {
8673 switch (scalar_formats[j]) {
8674 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8675 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8676 case HAL_PIXEL_FORMAT_RAW10:
8677 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8678 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8679 available_min_durations.add(scalar_formats[j]);
8680 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8681 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8682 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8683 }
8684 break;
8685 default:
8686 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8687 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8688 available_min_durations.add(scalar_formats[j]);
8689 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8690 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8691 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8692 }
8693 break;
8694 }
8695 }
8696 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8697 available_min_durations.array(), available_min_durations.size());
8698
8699 Vector<int32_t> available_hfr_configs;
8700 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8701 int32_t fps = 0;
8702 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8703 case CAM_HFR_MODE_60FPS:
8704 fps = 60;
8705 break;
8706 case CAM_HFR_MODE_90FPS:
8707 fps = 90;
8708 break;
8709 case CAM_HFR_MODE_120FPS:
8710 fps = 120;
8711 break;
8712 case CAM_HFR_MODE_150FPS:
8713 fps = 150;
8714 break;
8715 case CAM_HFR_MODE_180FPS:
8716 fps = 180;
8717 break;
8718 case CAM_HFR_MODE_210FPS:
8719 fps = 210;
8720 break;
8721 case CAM_HFR_MODE_240FPS:
8722 fps = 240;
8723 break;
8724 case CAM_HFR_MODE_480FPS:
8725 fps = 480;
8726 break;
8727 case CAM_HFR_MODE_OFF:
8728 case CAM_HFR_MODE_MAX:
8729 default:
8730 break;
8731 }
8732
8733 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8734 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8735 /* For each HFR frame rate, need to advertise one variable fps range
8736 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8737 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8738 * set by the app. When video recording is started, [120, 120] is
8739 * set. This way sensor configuration does not change when recording
8740 * is started */
8741
8742 /* (width, height, fps_min, fps_max, batch_size_max) */
8743 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8744 j < MAX_SIZES_CNT; j++) {
8745 available_hfr_configs.add(
8746 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8747 available_hfr_configs.add(
8748 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8749 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8750 available_hfr_configs.add(fps);
8751 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8752
8753 /* (width, height, fps_min, fps_max, batch_size_max) */
8754 available_hfr_configs.add(
8755 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8756 available_hfr_configs.add(
8757 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8758 available_hfr_configs.add(fps);
8759 available_hfr_configs.add(fps);
8760 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8761 }
8762 }
8763 }
8764 //Advertise HFR capability only if the property is set
8765 memset(prop, 0, sizeof(prop));
8766 property_get("persist.camera.hal3hfr.enable", prop, "1");
8767 uint8_t hfrEnable = (uint8_t)atoi(prop);
8768
8769 if(hfrEnable && available_hfr_configs.array()) {
8770 staticInfo.update(
8771 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8772 available_hfr_configs.array(), available_hfr_configs.size());
8773 }
8774
8775 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8776 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8777 &max_jpeg_size, 1);
8778
8779 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8780 size_t size = 0;
8781 count = CAM_EFFECT_MODE_MAX;
8782 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8783 for (size_t i = 0; i < count; i++) {
8784 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8785 gCamCapability[cameraId]->supported_effects[i]);
8786 if (NAME_NOT_FOUND != val) {
8787 avail_effects[size] = (uint8_t)val;
8788 size++;
8789 }
8790 }
8791 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8792 avail_effects,
8793 size);
8794
8795 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8796 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8797 size_t supported_scene_modes_cnt = 0;
8798 count = CAM_SCENE_MODE_MAX;
8799 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8800 for (size_t i = 0; i < count; i++) {
8801 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8802 CAM_SCENE_MODE_OFF) {
8803 int val = lookupFwkName(SCENE_MODES_MAP,
8804 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8805 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08008806
Thierry Strudel3d639192016-09-09 11:52:26 -07008807 if (NAME_NOT_FOUND != val) {
8808 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8809 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8810 supported_scene_modes_cnt++;
8811 }
8812 }
8813 }
8814 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8815 avail_scene_modes,
8816 supported_scene_modes_cnt);
8817
8818 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8819 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8820 supported_scene_modes_cnt,
8821 CAM_SCENE_MODE_MAX,
8822 scene_mode_overrides,
8823 supported_indexes,
8824 cameraId);
8825
8826 if (supported_scene_modes_cnt == 0) {
8827 supported_scene_modes_cnt = 1;
8828 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8829 }
8830
8831 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8832 scene_mode_overrides, supported_scene_modes_cnt * 3);
8833
8834 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8835 ANDROID_CONTROL_MODE_AUTO,
8836 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8837 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8838 available_control_modes,
8839 3);
8840
8841 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8842 size = 0;
8843 count = CAM_ANTIBANDING_MODE_MAX;
8844 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8845 for (size_t i = 0; i < count; i++) {
8846 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8847 gCamCapability[cameraId]->supported_antibandings[i]);
8848 if (NAME_NOT_FOUND != val) {
8849 avail_antibanding_modes[size] = (uint8_t)val;
8850 size++;
8851 }
8852
8853 }
8854 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8855 avail_antibanding_modes,
8856 size);
8857
8858 uint8_t avail_abberation_modes[] = {
8859 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8860 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8861 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8862 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8863 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8864 if (0 == count) {
8865 // If no aberration correction modes are available for a device, this advertise OFF mode
8866 size = 1;
8867 } else {
8868 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8869 // So, advertize all 3 modes if atleast any one mode is supported as per the
8870 // new M requirement
8871 size = 3;
8872 }
8873 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8874 avail_abberation_modes,
8875 size);
8876
8877 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8878 size = 0;
8879 count = CAM_FOCUS_MODE_MAX;
8880 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8881 for (size_t i = 0; i < count; i++) {
8882 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8883 gCamCapability[cameraId]->supported_focus_modes[i]);
8884 if (NAME_NOT_FOUND != val) {
8885 avail_af_modes[size] = (uint8_t)val;
8886 size++;
8887 }
8888 }
8889 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8890 avail_af_modes,
8891 size);
8892
8893 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8894 size = 0;
8895 count = CAM_WB_MODE_MAX;
8896 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8897 for (size_t i = 0; i < count; i++) {
8898 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8899 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8900 gCamCapability[cameraId]->supported_white_balances[i]);
8901 if (NAME_NOT_FOUND != val) {
8902 avail_awb_modes[size] = (uint8_t)val;
8903 size++;
8904 }
8905 }
8906 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8907 avail_awb_modes,
8908 size);
8909
8910 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8911 count = CAM_FLASH_FIRING_LEVEL_MAX;
8912 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8913 count);
8914 for (size_t i = 0; i < count; i++) {
8915 available_flash_levels[i] =
8916 gCamCapability[cameraId]->supported_firing_levels[i];
8917 }
8918 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8919 available_flash_levels, count);
8920
8921 uint8_t flashAvailable;
8922 if (gCamCapability[cameraId]->flash_available)
8923 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8924 else
8925 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8926 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8927 &flashAvailable, 1);
8928
8929 Vector<uint8_t> avail_ae_modes;
8930 count = CAM_AE_MODE_MAX;
8931 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8932 for (size_t i = 0; i < count; i++) {
8933 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8934 }
8935 if (flashAvailable) {
8936 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8937 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008938 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07008939 }
8940 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8941 avail_ae_modes.array(),
8942 avail_ae_modes.size());
8943
8944 int32_t sensitivity_range[2];
8945 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8946 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8947 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8948 sensitivity_range,
8949 sizeof(sensitivity_range) / sizeof(int32_t));
8950
8951 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8952 &gCamCapability[cameraId]->max_analog_sensitivity,
8953 1);
8954
8955 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8956 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8957 &sensor_orientation,
8958 1);
8959
8960 int32_t max_output_streams[] = {
8961 MAX_STALLING_STREAMS,
8962 MAX_PROCESSED_STREAMS,
8963 MAX_RAW_STREAMS};
8964 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8965 max_output_streams,
8966 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8967
8968 uint8_t avail_leds = 0;
8969 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8970 &avail_leds, 0);
8971
8972 uint8_t focus_dist_calibrated;
8973 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8974 gCamCapability[cameraId]->focus_dist_calibrated);
8975 if (NAME_NOT_FOUND != val) {
8976 focus_dist_calibrated = (uint8_t)val;
8977 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8978 &focus_dist_calibrated, 1);
8979 }
8980
8981 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8982 size = 0;
8983 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8984 MAX_TEST_PATTERN_CNT);
8985 for (size_t i = 0; i < count; i++) {
8986 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8987 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8988 if (NAME_NOT_FOUND != testpatternMode) {
8989 avail_testpattern_modes[size] = testpatternMode;
8990 size++;
8991 }
8992 }
8993 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8994 avail_testpattern_modes,
8995 size);
8996
8997 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8998 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8999 &max_pipeline_depth,
9000 1);
9001
9002 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9003 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9004 &partial_result_count,
9005 1);
9006
9007 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9008 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9009
9010 Vector<uint8_t> available_capabilities;
9011 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9012 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9013 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9014 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9015 if (supportBurst) {
9016 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9017 }
9018 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9019 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9020 if (hfrEnable && available_hfr_configs.array()) {
9021 available_capabilities.add(
9022 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9023 }
9024
9025 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9026 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9027 }
9028 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9029 available_capabilities.array(),
9030 available_capabilities.size());
9031
9032 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9033 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9034 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9035 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9036
9037 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9038 &aeLockAvailable, 1);
9039
9040 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9041 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9042 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9043 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9044
9045 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9046 &awbLockAvailable, 1);
9047
9048 int32_t max_input_streams = 1;
9049 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9050 &max_input_streams,
9051 1);
9052
9053 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9054 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9055 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9056 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9057 HAL_PIXEL_FORMAT_YCbCr_420_888};
9058 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9059 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9060
9061 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9062 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9063 &max_latency,
9064 1);
9065
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009066#ifndef USE_HAL_3_3
9067 int32_t isp_sensitivity_range[2];
9068 isp_sensitivity_range[0] =
9069 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9070 isp_sensitivity_range[1] =
9071 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9072 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9073 isp_sensitivity_range,
9074 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9075#endif
9076
Thierry Strudel3d639192016-09-09 11:52:26 -07009077 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9078 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9079 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9080 available_hot_pixel_modes,
9081 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9082
9083 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9084 ANDROID_SHADING_MODE_FAST,
9085 ANDROID_SHADING_MODE_HIGH_QUALITY};
9086 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9087 available_shading_modes,
9088 3);
9089
9090 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9091 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9092 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9093 available_lens_shading_map_modes,
9094 2);
9095
9096 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9097 ANDROID_EDGE_MODE_FAST,
9098 ANDROID_EDGE_MODE_HIGH_QUALITY,
9099 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9100 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9101 available_edge_modes,
9102 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9103
9104 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9105 ANDROID_NOISE_REDUCTION_MODE_FAST,
9106 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9107 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9108 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9109 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9110 available_noise_red_modes,
9111 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9112
9113 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9114 ANDROID_TONEMAP_MODE_FAST,
9115 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9116 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9117 available_tonemap_modes,
9118 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9119
9120 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9121 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9122 available_hot_pixel_map_modes,
9123 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9124
9125 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9126 gCamCapability[cameraId]->reference_illuminant1);
9127 if (NAME_NOT_FOUND != val) {
9128 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9129 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9130 }
9131
9132 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9133 gCamCapability[cameraId]->reference_illuminant2);
9134 if (NAME_NOT_FOUND != val) {
9135 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9136 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9137 }
9138
9139 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9140 (void *)gCamCapability[cameraId]->forward_matrix1,
9141 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9142
9143 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9144 (void *)gCamCapability[cameraId]->forward_matrix2,
9145 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9146
9147 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9148 (void *)gCamCapability[cameraId]->color_transform1,
9149 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9150
9151 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9152 (void *)gCamCapability[cameraId]->color_transform2,
9153 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9154
9155 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9156 (void *)gCamCapability[cameraId]->calibration_transform1,
9157 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9158
9159 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9160 (void *)gCamCapability[cameraId]->calibration_transform2,
9161 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9162
9163 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9164 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9165 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9166 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9167 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9168 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9169 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9170 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9171 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9172 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9173 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9174 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9175 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9176 ANDROID_JPEG_GPS_COORDINATES,
9177 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9178 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9179 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9180 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9181 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9182 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9183 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9184 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9185 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9186 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009187#ifndef USE_HAL_3_3
9188 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9189#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009190 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009191 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009192 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9193 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009194 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009195 /* DevCamDebug metadata request_keys_basic */
9196 DEVCAMDEBUG_META_ENABLE,
9197 /* DevCamDebug metadata end */
9198 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009199
9200 size_t request_keys_cnt =
9201 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9202 Vector<int32_t> available_request_keys;
9203 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9204 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9205 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9206 }
9207
9208 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9209 available_request_keys.array(), available_request_keys.size());
9210
9211 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9212 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9213 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9214 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9215 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9216 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9217 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9218 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9219 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9220 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9221 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9222 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9223 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9224 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9225 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9226 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9227 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009228 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009229 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9230 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9231 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009232 ANDROID_STATISTICS_FACE_SCORES,
9233#ifndef USE_HAL_3_3
9234 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9235#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009236 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009237 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009238 // DevCamDebug metadata result_keys_basic
9239 DEVCAMDEBUG_META_ENABLE,
9240 // DevCamDebug metadata result_keys AF
9241 DEVCAMDEBUG_AF_LENS_POSITION,
9242 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9243 DEVCAMDEBUG_AF_TOF_DISTANCE,
9244 DEVCAMDEBUG_AF_LUMA,
9245 DEVCAMDEBUG_AF_HAF_STATE,
9246 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9247 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9248 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9249 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9250 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9251 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9252 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9253 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9254 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9255 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9256 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9257 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9258 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9259 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9260 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9261 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9262 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9263 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9264 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9265 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9266 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9267 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9268 // DevCamDebug metadata result_keys AEC
9269 DEVCAMDEBUG_AEC_TARGET_LUMA,
9270 DEVCAMDEBUG_AEC_COMP_LUMA,
9271 DEVCAMDEBUG_AEC_AVG_LUMA,
9272 DEVCAMDEBUG_AEC_CUR_LUMA,
9273 DEVCAMDEBUG_AEC_LINECOUNT,
9274 DEVCAMDEBUG_AEC_REAL_GAIN,
9275 DEVCAMDEBUG_AEC_EXP_INDEX,
9276 DEVCAMDEBUG_AEC_LUX_IDX,
9277 // DevCamDebug metadata result_keys AWB
9278 DEVCAMDEBUG_AWB_R_GAIN,
9279 DEVCAMDEBUG_AWB_G_GAIN,
9280 DEVCAMDEBUG_AWB_B_GAIN,
9281 DEVCAMDEBUG_AWB_CCT,
9282 DEVCAMDEBUG_AWB_DECISION,
9283 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009284 };
9285
Thierry Strudel3d639192016-09-09 11:52:26 -07009286 size_t result_keys_cnt =
9287 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9288
9289 Vector<int32_t> available_result_keys;
9290 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9291 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9292 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9293 }
9294 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9295 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9296 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9297 }
9298 if (supportedFaceDetectMode == 1) {
9299 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9300 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9301 } else if ((supportedFaceDetectMode == 2) ||
9302 (supportedFaceDetectMode == 3)) {
9303 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9304 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9305 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009306#ifndef USE_HAL_3_3
9307 if (hasBlackRegions) {
9308 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9309 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9310 }
9311#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009312 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9313 available_result_keys.array(), available_result_keys.size());
9314
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009315 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009316 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9317 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9318 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9319 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9320 ANDROID_SCALER_CROPPING_TYPE,
9321 ANDROID_SYNC_MAX_LATENCY,
9322 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9323 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9324 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9325 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9326 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9327 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9328 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9329 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9330 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9331 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9332 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9333 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9334 ANDROID_LENS_FACING,
9335 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9336 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9337 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9338 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9339 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9340 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9341 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9342 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9343 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9344 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9345 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9346 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9347 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9348 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9349 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9350 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9351 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9352 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9353 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9354 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009355 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009356 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9357 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9358 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9359 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9360 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9361 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9362 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9363 ANDROID_CONTROL_AVAILABLE_MODES,
9364 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9365 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9366 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9367 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009368 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9369#ifndef USE_HAL_3_3
9370 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9371 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9372#endif
9373 };
9374
9375 Vector<int32_t> available_characteristics_keys;
9376 available_characteristics_keys.appendArray(characteristics_keys_basic,
9377 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9378#ifndef USE_HAL_3_3
9379 if (hasBlackRegions) {
9380 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9381 }
9382#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009384 available_characteristics_keys.array(),
9385 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009386
9387 /*available stall durations depend on the hw + sw and will be different for different devices */
9388 /*have to add for raw after implementation*/
9389 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9390 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9391
9392 Vector<int64_t> available_stall_durations;
9393 for (uint32_t j = 0; j < stall_formats_count; j++) {
9394 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9395 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9396 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9397 available_stall_durations.add(stall_formats[j]);
9398 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9399 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9400 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9401 }
9402 } else {
9403 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9404 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9405 available_stall_durations.add(stall_formats[j]);
9406 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9407 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9408 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9409 }
9410 }
9411 }
9412 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9413 available_stall_durations.array(),
9414 available_stall_durations.size());
9415
9416 //QCAMERA3_OPAQUE_RAW
9417 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9418 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9419 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9420 case LEGACY_RAW:
9421 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9422 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9423 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9424 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9425 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9426 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9427 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9428 break;
9429 case MIPI_RAW:
9430 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9431 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9432 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9433 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9434 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9435 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9436 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9437 break;
9438 default:
9439 LOGE("unknown opaque_raw_format %d",
9440 gCamCapability[cameraId]->opaque_raw_fmt);
9441 break;
9442 }
9443 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9444
9445 Vector<int32_t> strides;
9446 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9447 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9448 cam_stream_buf_plane_info_t buf_planes;
9449 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9450 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9451 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9452 &gCamCapability[cameraId]->padding_info, &buf_planes);
9453 strides.add(buf_planes.plane_info.mp[0].stride);
9454 }
9455 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9456 strides.size());
9457
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009458 //TBD: remove the following line once backend advertises zzHDR in feature mask
9459 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009460 //Video HDR default
9461 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9462 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009463 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009464 int32_t vhdr_mode[] = {
9465 QCAMERA3_VIDEO_HDR_MODE_OFF,
9466 QCAMERA3_VIDEO_HDR_MODE_ON};
9467
9468 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9469 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9470 vhdr_mode, vhdr_mode_count);
9471 }
9472
Thierry Strudel3d639192016-09-09 11:52:26 -07009473 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9474 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9475 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9476
9477 uint8_t isMonoOnly =
9478 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9479 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9480 &isMonoOnly, 1);
9481
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009482#ifndef USE_HAL_3_3
9483 Vector<int32_t> opaque_size;
9484 for (size_t j = 0; j < scalar_formats_count; j++) {
9485 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9486 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9487 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9488 cam_stream_buf_plane_info_t buf_planes;
9489
9490 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9491 &gCamCapability[cameraId]->padding_info, &buf_planes);
9492
9493 if (rc == 0) {
9494 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9495 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9496 opaque_size.add(buf_planes.plane_info.frame_len);
9497 }else {
9498 LOGE("raw frame calculation failed!");
9499 }
9500 }
9501 }
9502 }
9503
9504 if ((opaque_size.size() > 0) &&
9505 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9506 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9507 else
9508 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9509#endif
9510
Thierry Strudel04e026f2016-10-10 11:27:36 -07009511 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9512 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9513 size = 0;
9514 count = CAM_IR_MODE_MAX;
9515 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9516 for (size_t i = 0; i < count; i++) {
9517 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9518 gCamCapability[cameraId]->supported_ir_modes[i]);
9519 if (NAME_NOT_FOUND != val) {
9520 avail_ir_modes[size] = (int32_t)val;
9521 size++;
9522 }
9523 }
9524 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9525 avail_ir_modes, size);
9526 }
9527
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009528 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9529 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9530 size = 0;
9531 count = CAM_AEC_CONVERGENCE_MAX;
9532 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9533 for (size_t i = 0; i < count; i++) {
9534 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9535 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9536 if (NAME_NOT_FOUND != val) {
9537 available_instant_aec_modes[size] = (int32_t)val;
9538 size++;
9539 }
9540 }
9541 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9542 available_instant_aec_modes, size);
9543 }
9544
Thierry Strudel54dc9782017-02-15 12:12:10 -08009545 int32_t sharpness_range[] = {
9546 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9547 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9548 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9549
9550 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9551 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9552 size = 0;
9553 count = CAM_BINNING_CORRECTION_MODE_MAX;
9554 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9555 for (size_t i = 0; i < count; i++) {
9556 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9557 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9558 gCamCapability[cameraId]->supported_binning_modes[i]);
9559 if (NAME_NOT_FOUND != val) {
9560 avail_binning_modes[size] = (int32_t)val;
9561 size++;
9562 }
9563 }
9564 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9565 avail_binning_modes, size);
9566 }
9567
9568 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9569 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9570 size = 0;
9571 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9572 for (size_t i = 0; i < count; i++) {
9573 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9574 gCamCapability[cameraId]->supported_aec_modes[i]);
9575 if (NAME_NOT_FOUND != val)
9576 available_aec_modes[size++] = val;
9577 }
9578 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
9579 available_aec_modes, size);
9580 }
9581
9582 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
9583 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
9584 size = 0;
9585 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
9586 for (size_t i = 0; i < count; i++) {
9587 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
9588 gCamCapability[cameraId]->supported_iso_modes[i]);
9589 if (NAME_NOT_FOUND != val)
9590 available_iso_modes[size++] = val;
9591 }
9592 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
9593 available_iso_modes, size);
9594 }
9595
9596 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
9597 for (size_t i = 0; i < count; i++)
9598 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
9599 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
9600 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
9601
9602 int32_t available_saturation_range[4];
9603 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
9604 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
9605 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
9606 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
9607 staticInfo.update(QCAMERA3_SATURATION_RANGE,
9608 available_saturation_range, 4);
9609
9610 uint8_t is_hdr_values[2];
9611 is_hdr_values[0] = 0;
9612 is_hdr_values[1] = 1;
9613 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
9614 is_hdr_values, 2);
9615
9616 float is_hdr_confidence_range[2];
9617 is_hdr_confidence_range[0] = 0.0;
9618 is_hdr_confidence_range[1] = 1.0;
9619 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
9620 is_hdr_confidence_range, 2);
9621
Thierry Strudel3d639192016-09-09 11:52:26 -07009622 gStaticMetadata[cameraId] = staticInfo.release();
9623 return rc;
9624}
9625
9626/*===========================================================================
9627 * FUNCTION : makeTable
9628 *
9629 * DESCRIPTION: make a table of sizes
9630 *
9631 * PARAMETERS :
9632 *
9633 *
9634 *==========================================================================*/
9635void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9636 size_t max_size, int32_t *sizeTable)
9637{
9638 size_t j = 0;
9639 if (size > max_size) {
9640 size = max_size;
9641 }
9642 for (size_t i = 0; i < size; i++) {
9643 sizeTable[j] = dimTable[i].width;
9644 sizeTable[j+1] = dimTable[i].height;
9645 j+=2;
9646 }
9647}
9648
9649/*===========================================================================
9650 * FUNCTION : makeFPSTable
9651 *
9652 * DESCRIPTION: make a table of fps ranges
9653 *
9654 * PARAMETERS :
9655 *
9656 *==========================================================================*/
9657void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9658 size_t max_size, int32_t *fpsRangesTable)
9659{
9660 size_t j = 0;
9661 if (size > max_size) {
9662 size = max_size;
9663 }
9664 for (size_t i = 0; i < size; i++) {
9665 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9666 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9667 j+=2;
9668 }
9669}
9670
9671/*===========================================================================
9672 * FUNCTION : makeOverridesList
9673 *
9674 * DESCRIPTION: make a list of scene mode overrides
9675 *
9676 * PARAMETERS :
9677 *
9678 *
9679 *==========================================================================*/
9680void QCamera3HardwareInterface::makeOverridesList(
9681 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9682 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9683{
9684 /*daemon will give a list of overrides for all scene modes.
9685 However we should send the fwk only the overrides for the scene modes
9686 supported by the framework*/
9687 size_t j = 0;
9688 if (size > max_size) {
9689 size = max_size;
9690 }
9691 size_t focus_count = CAM_FOCUS_MODE_MAX;
9692 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9693 focus_count);
9694 for (size_t i = 0; i < size; i++) {
9695 bool supt = false;
9696 size_t index = supported_indexes[i];
9697 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9698 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9699 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9700 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9701 overridesTable[index].awb_mode);
9702 if (NAME_NOT_FOUND != val) {
9703 overridesList[j+1] = (uint8_t)val;
9704 }
9705 uint8_t focus_override = overridesTable[index].af_mode;
9706 for (size_t k = 0; k < focus_count; k++) {
9707 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9708 supt = true;
9709 break;
9710 }
9711 }
9712 if (supt) {
9713 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9714 focus_override);
9715 if (NAME_NOT_FOUND != val) {
9716 overridesList[j+2] = (uint8_t)val;
9717 }
9718 } else {
9719 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9720 }
9721 j+=3;
9722 }
9723}
9724
9725/*===========================================================================
9726 * FUNCTION : filterJpegSizes
9727 *
9728 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9729 * could be downscaled to
9730 *
9731 * PARAMETERS :
9732 *
9733 * RETURN : length of jpegSizes array
9734 *==========================================================================*/
9735
9736size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9737 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9738 uint8_t downscale_factor)
9739{
9740 if (0 == downscale_factor) {
9741 downscale_factor = 1;
9742 }
9743
9744 int32_t min_width = active_array_size.width / downscale_factor;
9745 int32_t min_height = active_array_size.height / downscale_factor;
9746 size_t jpegSizesCnt = 0;
9747 if (processedSizesCnt > maxCount) {
9748 processedSizesCnt = maxCount;
9749 }
9750 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9751 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9752 jpegSizes[jpegSizesCnt] = processedSizes[i];
9753 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9754 jpegSizesCnt += 2;
9755 }
9756 }
9757 return jpegSizesCnt;
9758}
9759
9760/*===========================================================================
9761 * FUNCTION : computeNoiseModelEntryS
9762 *
9763 * DESCRIPTION: function to map a given sensitivity to the S noise
9764 * model parameters in the DNG noise model.
9765 *
9766 * PARAMETERS : sens : the sensor sensitivity
9767 *
9768 ** RETURN : S (sensor amplification) noise
9769 *
9770 *==========================================================================*/
9771double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9772 double s = gCamCapability[mCameraId]->gradient_S * sens +
9773 gCamCapability[mCameraId]->offset_S;
9774 return ((s < 0.0) ? 0.0 : s);
9775}
9776
9777/*===========================================================================
9778 * FUNCTION : computeNoiseModelEntryO
9779 *
9780 * DESCRIPTION: function to map a given sensitivity to the O noise
9781 * model parameters in the DNG noise model.
9782 *
9783 * PARAMETERS : sens : the sensor sensitivity
9784 *
9785 ** RETURN : O (sensor readout) noise
9786 *
9787 *==========================================================================*/
9788double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9789 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9790 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9791 1.0 : (1.0 * sens / max_analog_sens);
9792 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9793 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9794 return ((o < 0.0) ? 0.0 : o);
9795}
9796
9797/*===========================================================================
9798 * FUNCTION : getSensorSensitivity
9799 *
9800 * DESCRIPTION: convert iso_mode to an integer value
9801 *
9802 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9803 *
9804 ** RETURN : sensitivity supported by sensor
9805 *
9806 *==========================================================================*/
9807int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9808{
9809 int32_t sensitivity;
9810
9811 switch (iso_mode) {
9812 case CAM_ISO_MODE_100:
9813 sensitivity = 100;
9814 break;
9815 case CAM_ISO_MODE_200:
9816 sensitivity = 200;
9817 break;
9818 case CAM_ISO_MODE_400:
9819 sensitivity = 400;
9820 break;
9821 case CAM_ISO_MODE_800:
9822 sensitivity = 800;
9823 break;
9824 case CAM_ISO_MODE_1600:
9825 sensitivity = 1600;
9826 break;
9827 default:
9828 sensitivity = -1;
9829 break;
9830 }
9831 return sensitivity;
9832}
9833
9834/*===========================================================================
9835 * FUNCTION : getCamInfo
9836 *
9837 * DESCRIPTION: query camera capabilities
9838 *
9839 * PARAMETERS :
9840 * @cameraId : camera Id
9841 * @info : camera info struct to be filled in with camera capabilities
9842 *
9843 * RETURN : int type of status
9844 * NO_ERROR -- success
9845 * none-zero failure code
9846 *==========================================================================*/
9847int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9848 struct camera_info *info)
9849{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009850 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009851 int rc = 0;
9852
9853 pthread_mutex_lock(&gCamLock);
9854 if (NULL == gCamCapability[cameraId]) {
9855 rc = initCapabilities(cameraId);
9856 if (rc < 0) {
9857 pthread_mutex_unlock(&gCamLock);
9858 return rc;
9859 }
9860 }
9861
9862 if (NULL == gStaticMetadata[cameraId]) {
9863 rc = initStaticMetadata(cameraId);
9864 if (rc < 0) {
9865 pthread_mutex_unlock(&gCamLock);
9866 return rc;
9867 }
9868 }
9869
9870 switch(gCamCapability[cameraId]->position) {
9871 case CAM_POSITION_BACK:
9872 case CAM_POSITION_BACK_AUX:
9873 info->facing = CAMERA_FACING_BACK;
9874 break;
9875
9876 case CAM_POSITION_FRONT:
9877 case CAM_POSITION_FRONT_AUX:
9878 info->facing = CAMERA_FACING_FRONT;
9879 break;
9880
9881 default:
9882 LOGE("Unknown position type %d for camera id:%d",
9883 gCamCapability[cameraId]->position, cameraId);
9884 rc = -1;
9885 break;
9886 }
9887
9888
9889 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009890#ifndef USE_HAL_3_3
9891 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9892#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009893 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009894#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009895 info->static_camera_characteristics = gStaticMetadata[cameraId];
9896
9897 //For now assume both cameras can operate independently.
9898 info->conflicting_devices = NULL;
9899 info->conflicting_devices_length = 0;
9900
9901 //resource cost is 100 * MIN(1.0, m/M),
9902 //where m is throughput requirement with maximum stream configuration
9903 //and M is CPP maximum throughput.
9904 float max_fps = 0.0;
9905 for (uint32_t i = 0;
9906 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9907 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9908 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9909 }
9910 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9911 gCamCapability[cameraId]->active_array_size.width *
9912 gCamCapability[cameraId]->active_array_size.height * max_fps /
9913 gCamCapability[cameraId]->max_pixel_bandwidth;
9914 info->resource_cost = 100 * MIN(1.0, ratio);
9915 LOGI("camera %d resource cost is %d", cameraId,
9916 info->resource_cost);
9917
9918 pthread_mutex_unlock(&gCamLock);
9919 return rc;
9920}
9921
9922/*===========================================================================
9923 * FUNCTION : translateCapabilityToMetadata
9924 *
9925 * DESCRIPTION: translate the capability into camera_metadata_t
9926 *
9927 * PARAMETERS : type of the request
9928 *
9929 *
9930 * RETURN : success: camera_metadata_t*
9931 * failure: NULL
9932 *
9933 *==========================================================================*/
9934camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9935{
9936 if (mDefaultMetadata[type] != NULL) {
9937 return mDefaultMetadata[type];
9938 }
9939 //first time we are handling this request
9940 //fill up the metadata structure using the wrapper class
9941 CameraMetadata settings;
9942 //translate from cam_capability_t to camera_metadata_tag_t
9943 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9944 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9945 int32_t defaultRequestID = 0;
9946 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9947
9948 /* OIS disable */
9949 char ois_prop[PROPERTY_VALUE_MAX];
9950 memset(ois_prop, 0, sizeof(ois_prop));
9951 property_get("persist.camera.ois.disable", ois_prop, "0");
9952 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9953
9954 /* Force video to use OIS */
9955 char videoOisProp[PROPERTY_VALUE_MAX];
9956 memset(videoOisProp, 0, sizeof(videoOisProp));
9957 property_get("persist.camera.ois.video", videoOisProp, "1");
9958 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009959
9960 // Hybrid AE enable/disable
9961 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9962 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9963 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9964 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9965
Thierry Strudel3d639192016-09-09 11:52:26 -07009966 uint8_t controlIntent = 0;
9967 uint8_t focusMode;
9968 uint8_t vsMode;
9969 uint8_t optStabMode;
9970 uint8_t cacMode;
9971 uint8_t edge_mode;
9972 uint8_t noise_red_mode;
9973 uint8_t tonemap_mode;
9974 bool highQualityModeEntryAvailable = FALSE;
9975 bool fastModeEntryAvailable = FALSE;
9976 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9977 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009978
Thierry Strudel3d639192016-09-09 11:52:26 -07009979 switch (type) {
9980 case CAMERA3_TEMPLATE_PREVIEW:
9981 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9982 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9983 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9984 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9985 edge_mode = ANDROID_EDGE_MODE_FAST;
9986 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9987 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9988 break;
9989 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9990 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9991 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9992 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9993 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9994 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9995 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9996 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9997 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9998 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9999 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10000 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10001 highQualityModeEntryAvailable = TRUE;
10002 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10003 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10004 fastModeEntryAvailable = TRUE;
10005 }
10006 }
10007 if (highQualityModeEntryAvailable) {
10008 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10009 } else if (fastModeEntryAvailable) {
10010 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10011 }
10012 break;
10013 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10014 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10015 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10016 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010017 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10018 edge_mode = ANDROID_EDGE_MODE_FAST;
10019 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10020 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10021 if (forceVideoOis)
10022 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10023 break;
10024 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10025 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10026 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10027 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010028 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10029 edge_mode = ANDROID_EDGE_MODE_FAST;
10030 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10031 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10032 if (forceVideoOis)
10033 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10034 break;
10035 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10036 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10037 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10038 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10039 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10040 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10041 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10042 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10043 break;
10044 case CAMERA3_TEMPLATE_MANUAL:
10045 edge_mode = ANDROID_EDGE_MODE_FAST;
10046 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10047 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10048 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10049 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10050 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10051 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10052 break;
10053 default:
10054 edge_mode = ANDROID_EDGE_MODE_FAST;
10055 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10056 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10057 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10058 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10059 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10060 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10061 break;
10062 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010063 // Set CAC to OFF if underlying device doesn't support
10064 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10065 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10066 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010067 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10068 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10069 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10070 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10071 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10072 }
10073 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
10074
10075 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10076 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10077 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10078 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10079 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10080 || ois_disable)
10081 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10082 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
10083
10084 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10085 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10086
10087 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10088 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10089
10090 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10091 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10092
10093 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10094 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10095
10096 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10097 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10098
10099 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10100 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10101
10102 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10103 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10104
10105 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10106 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10107
10108 /*flash*/
10109 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10110 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10111
10112 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10113 settings.update(ANDROID_FLASH_FIRING_POWER,
10114 &flashFiringLevel, 1);
10115
10116 /* lens */
10117 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10118 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10119
10120 if (gCamCapability[mCameraId]->filter_densities_count) {
10121 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10122 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10123 gCamCapability[mCameraId]->filter_densities_count);
10124 }
10125
10126 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10127 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10128
10129 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
10130 float default_focus_distance = 0;
10131 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
10132 }
10133
10134 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10135 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10136
10137 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10138 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10139
10140 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10141 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10142
10143 /* face detection (default to OFF) */
10144 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10145 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10146
Thierry Strudel54dc9782017-02-15 12:12:10 -080010147 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10148 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010149
10150 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10151 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10152
10153 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10154 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10155
10156 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10157 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
10158
10159 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10160 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10161
10162 /* Exposure time(Update the Min Exposure Time)*/
10163 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10164 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10165
10166 /* frame duration */
10167 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10168 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10169
10170 /* sensitivity */
10171 static const int32_t default_sensitivity = 100;
10172 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010173#ifndef USE_HAL_3_3
10174 static const int32_t default_isp_sensitivity =
10175 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10176 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10177#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010178
10179 /*edge mode*/
10180 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10181
10182 /*noise reduction mode*/
10183 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10184
10185 /*color correction mode*/
10186 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10187 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10188
10189 /*transform matrix mode*/
10190 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10191
10192 int32_t scaler_crop_region[4];
10193 scaler_crop_region[0] = 0;
10194 scaler_crop_region[1] = 0;
10195 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10196 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10197 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10198
10199 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10200 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10201
10202 /*focus distance*/
10203 float focus_distance = 0.0;
10204 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10205
10206 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010207 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010208 float max_range = 0.0;
10209 float max_fixed_fps = 0.0;
10210 int32_t fps_range[2] = {0, 0};
10211 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10212 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010213 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10214 TEMPLATE_MAX_PREVIEW_FPS) {
10215 continue;
10216 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010217 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10218 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10219 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10220 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10221 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10222 if (range > max_range) {
10223 fps_range[0] =
10224 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10225 fps_range[1] =
10226 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10227 max_range = range;
10228 }
10229 } else {
10230 if (range < 0.01 && max_fixed_fps <
10231 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10232 fps_range[0] =
10233 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10234 fps_range[1] =
10235 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10236 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10237 }
10238 }
10239 }
10240 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10241
10242 /*precapture trigger*/
10243 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10244 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10245
10246 /*af trigger*/
10247 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10248 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10249
10250 /* ae & af regions */
10251 int32_t active_region[] = {
10252 gCamCapability[mCameraId]->active_array_size.left,
10253 gCamCapability[mCameraId]->active_array_size.top,
10254 gCamCapability[mCameraId]->active_array_size.left +
10255 gCamCapability[mCameraId]->active_array_size.width,
10256 gCamCapability[mCameraId]->active_array_size.top +
10257 gCamCapability[mCameraId]->active_array_size.height,
10258 0};
10259 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10260 sizeof(active_region) / sizeof(active_region[0]));
10261 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10262 sizeof(active_region) / sizeof(active_region[0]));
10263
10264 /* black level lock */
10265 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10266 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10267
10268 /* lens shading map mode */
10269 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10270 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10271 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10272 }
10273 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10274
10275 //special defaults for manual template
10276 if (type == CAMERA3_TEMPLATE_MANUAL) {
10277 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10278 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10279
10280 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10281 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10282
10283 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10284 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10285
10286 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10287 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10288
10289 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10290 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10291
10292 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10293 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10294 }
10295
10296
10297 /* TNR
10298 * We'll use this location to determine which modes TNR will be set.
10299 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10300 * This is not to be confused with linking on a per stream basis that decision
10301 * is still on per-session basis and will be handled as part of config stream
10302 */
10303 uint8_t tnr_enable = 0;
10304
10305 if (m_bTnrPreview || m_bTnrVideo) {
10306
10307 switch (type) {
10308 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10309 tnr_enable = 1;
10310 break;
10311
10312 default:
10313 tnr_enable = 0;
10314 break;
10315 }
10316
10317 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10318 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10319 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10320
10321 LOGD("TNR:%d with process plate %d for template:%d",
10322 tnr_enable, tnr_process_type, type);
10323 }
10324
10325 //Update Link tags to default
10326 int32_t sync_type = CAM_TYPE_STANDALONE;
10327 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10328
10329 int32_t is_main = 0; //this doesn't matter as app should overwrite
10330 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10331
10332 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10333
10334 /* CDS default */
10335 char prop[PROPERTY_VALUE_MAX];
10336 memset(prop, 0, sizeof(prop));
10337 property_get("persist.camera.CDS", prop, "Auto");
10338 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10339 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10340 if (CAM_CDS_MODE_MAX == cds_mode) {
10341 cds_mode = CAM_CDS_MODE_AUTO;
10342 }
10343
10344 /* Disabling CDS in templates which have TNR enabled*/
10345 if (tnr_enable)
10346 cds_mode = CAM_CDS_MODE_OFF;
10347
10348 int32_t mode = cds_mode;
10349 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010350
Thierry Strudel269c81a2016-10-12 12:13:59 -070010351 /* Manual Convergence AEC Speed is disabled by default*/
10352 float default_aec_speed = 0;
10353 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10354
10355 /* Manual Convergence AWB Speed is disabled by default*/
10356 float default_awb_speed = 0;
10357 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10358
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010359 // Set instant AEC to normal convergence by default
10360 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10361 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10362
Shuzhen Wang19463d72016-03-08 11:09:52 -080010363 /* hybrid ae */
10364 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10365
Thierry Strudel3d639192016-09-09 11:52:26 -070010366 mDefaultMetadata[type] = settings.release();
10367
10368 return mDefaultMetadata[type];
10369}
10370
10371/*===========================================================================
10372 * FUNCTION : setFrameParameters
10373 *
10374 * DESCRIPTION: set parameters per frame as requested in the metadata from
10375 * framework
10376 *
10377 * PARAMETERS :
10378 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010379 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010380 * @blob_request: Whether this request is a blob request or not
10381 *
10382 * RETURN : success: NO_ERROR
10383 * failure:
10384 *==========================================================================*/
10385int QCamera3HardwareInterface::setFrameParameters(
10386 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010387 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010388 int blob_request,
10389 uint32_t snapshotStreamId)
10390{
10391 /*translate from camera_metadata_t type to parm_type_t*/
10392 int rc = 0;
10393 int32_t hal_version = CAM_HAL_V3;
10394
10395 clear_metadata_buffer(mParameters);
10396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10397 LOGE("Failed to set hal version in the parameters");
10398 return BAD_VALUE;
10399 }
10400
10401 /*we need to update the frame number in the parameters*/
10402 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10403 request->frame_number)) {
10404 LOGE("Failed to set the frame number in the parameters");
10405 return BAD_VALUE;
10406 }
10407
10408 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010410 LOGE("Failed to set stream type mask in the parameters");
10411 return BAD_VALUE;
10412 }
10413
10414 if (mUpdateDebugLevel) {
10415 uint32_t dummyDebugLevel = 0;
10416 /* The value of dummyDebugLevel is irrelavent. On
10417 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10419 dummyDebugLevel)) {
10420 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10421 return BAD_VALUE;
10422 }
10423 mUpdateDebugLevel = false;
10424 }
10425
10426 if(request->settings != NULL){
10427 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10428 if (blob_request)
10429 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10430 }
10431
10432 return rc;
10433}
10434
10435/*===========================================================================
10436 * FUNCTION : setReprocParameters
10437 *
10438 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10439 * return it.
10440 *
10441 * PARAMETERS :
10442 * @request : request that needs to be serviced
10443 *
10444 * RETURN : success: NO_ERROR
10445 * failure:
10446 *==========================================================================*/
10447int32_t QCamera3HardwareInterface::setReprocParameters(
10448 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10449 uint32_t snapshotStreamId)
10450{
10451 /*translate from camera_metadata_t type to parm_type_t*/
10452 int rc = 0;
10453
10454 if (NULL == request->settings){
10455 LOGE("Reprocess settings cannot be NULL");
10456 return BAD_VALUE;
10457 }
10458
10459 if (NULL == reprocParam) {
10460 LOGE("Invalid reprocessing metadata buffer");
10461 return BAD_VALUE;
10462 }
10463 clear_metadata_buffer(reprocParam);
10464
10465 /*we need to update the frame number in the parameters*/
10466 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10467 request->frame_number)) {
10468 LOGE("Failed to set the frame number in the parameters");
10469 return BAD_VALUE;
10470 }
10471
10472 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10473 if (rc < 0) {
10474 LOGE("Failed to translate reproc request");
10475 return rc;
10476 }
10477
10478 CameraMetadata frame_settings;
10479 frame_settings = request->settings;
10480 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10481 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10482 int32_t *crop_count =
10483 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10484 int32_t *crop_data =
10485 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10486 int32_t *roi_map =
10487 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10488 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10489 cam_crop_data_t crop_meta;
10490 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10491 crop_meta.num_of_streams = 1;
10492 crop_meta.crop_info[0].crop.left = crop_data[0];
10493 crop_meta.crop_info[0].crop.top = crop_data[1];
10494 crop_meta.crop_info[0].crop.width = crop_data[2];
10495 crop_meta.crop_info[0].crop.height = crop_data[3];
10496
10497 crop_meta.crop_info[0].roi_map.left =
10498 roi_map[0];
10499 crop_meta.crop_info[0].roi_map.top =
10500 roi_map[1];
10501 crop_meta.crop_info[0].roi_map.width =
10502 roi_map[2];
10503 crop_meta.crop_info[0].roi_map.height =
10504 roi_map[3];
10505
10506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10507 rc = BAD_VALUE;
10508 }
10509 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10510 request->input_buffer->stream,
10511 crop_meta.crop_info[0].crop.left,
10512 crop_meta.crop_info[0].crop.top,
10513 crop_meta.crop_info[0].crop.width,
10514 crop_meta.crop_info[0].crop.height);
10515 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10516 request->input_buffer->stream,
10517 crop_meta.crop_info[0].roi_map.left,
10518 crop_meta.crop_info[0].roi_map.top,
10519 crop_meta.crop_info[0].roi_map.width,
10520 crop_meta.crop_info[0].roi_map.height);
10521 } else {
10522 LOGE("Invalid reprocess crop count %d!", *crop_count);
10523 }
10524 } else {
10525 LOGE("No crop data from matching output stream");
10526 }
10527
10528 /* These settings are not needed for regular requests so handle them specially for
10529 reprocess requests; information needed for EXIF tags */
10530 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10531 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10532 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10533 if (NAME_NOT_FOUND != val) {
10534 uint32_t flashMode = (uint32_t)val;
10535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10536 rc = BAD_VALUE;
10537 }
10538 } else {
10539 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10540 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10541 }
10542 } else {
10543 LOGH("No flash mode in reprocess settings");
10544 }
10545
10546 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10547 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10549 rc = BAD_VALUE;
10550 }
10551 } else {
10552 LOGH("No flash state in reprocess settings");
10553 }
10554
10555 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10556 uint8_t *reprocessFlags =
10557 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10559 *reprocessFlags)) {
10560 rc = BAD_VALUE;
10561 }
10562 }
10563
Thierry Strudel54dc9782017-02-15 12:12:10 -080010564 // Add exif debug data to internal metadata
10565 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
10566 mm_jpeg_debug_exif_params_t *debug_params =
10567 (mm_jpeg_debug_exif_params_t *)frame_settings.find
10568 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
10569 // AE
10570 if (debug_params->ae_debug_params_valid == TRUE) {
10571 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
10572 debug_params->ae_debug_params);
10573 }
10574 // AWB
10575 if (debug_params->awb_debug_params_valid == TRUE) {
10576 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
10577 debug_params->awb_debug_params);
10578 }
10579 // AF
10580 if (debug_params->af_debug_params_valid == TRUE) {
10581 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
10582 debug_params->af_debug_params);
10583 }
10584 // ASD
10585 if (debug_params->asd_debug_params_valid == TRUE) {
10586 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
10587 debug_params->asd_debug_params);
10588 }
10589 // Stats
10590 if (debug_params->stats_debug_params_valid == TRUE) {
10591 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
10592 debug_params->stats_debug_params);
10593 }
10594 // BE Stats
10595 if (debug_params->bestats_debug_params_valid == TRUE) {
10596 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
10597 debug_params->bestats_debug_params);
10598 }
10599 // BHIST
10600 if (debug_params->bhist_debug_params_valid == TRUE) {
10601 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
10602 debug_params->bhist_debug_params);
10603 }
10604 // 3A Tuning
10605 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
10606 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
10607 debug_params->q3a_tuning_debug_params);
10608 }
10609 }
10610
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010611 // Add metadata which reprocess needs
10612 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10613 cam_reprocess_info_t *repro_info =
10614 (cam_reprocess_info_t *)frame_settings.find
10615 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010616 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010617 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010618 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010619 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010620 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010621 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010622 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010623 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010624 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010625 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010626 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010627 repro_info->pipeline_flip);
10628 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10629 repro_info->af_roi);
10630 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10631 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010632 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10633 CAM_INTF_PARM_ROTATION metadata then has been added in
10634 translateToHalMetadata. HAL need to keep this new rotation
10635 metadata. Otherwise, the old rotation info saved in the vendor tag
10636 would be used */
10637 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10638 CAM_INTF_PARM_ROTATION, reprocParam) {
10639 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10640 } else {
10641 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010642 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010643 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010644 }
10645
10646 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10647 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10648 roi.width and roi.height would be the final JPEG size.
10649 For now, HAL only checks this for reprocess request */
10650 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10651 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10652 uint8_t *enable =
10653 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10654 if (*enable == TRUE) {
10655 int32_t *crop_data =
10656 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10657 cam_stream_crop_info_t crop_meta;
10658 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10659 crop_meta.stream_id = 0;
10660 crop_meta.crop.left = crop_data[0];
10661 crop_meta.crop.top = crop_data[1];
10662 crop_meta.crop.width = crop_data[2];
10663 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010664 // The JPEG crop roi should match cpp output size
10665 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10666 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10667 crop_meta.roi_map.left = 0;
10668 crop_meta.roi_map.top = 0;
10669 crop_meta.roi_map.width = cpp_crop->crop.width;
10670 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010671 }
10672 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10673 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010674 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010675 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010676 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10677 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010678 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010679 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10680
10681 // Add JPEG scale information
10682 cam_dimension_t scale_dim;
10683 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10684 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10685 int32_t *roi =
10686 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10687 scale_dim.width = roi[2];
10688 scale_dim.height = roi[3];
10689 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10690 scale_dim);
10691 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10692 scale_dim.width, scale_dim.height, mCameraId);
10693 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010694 }
10695 }
10696
10697 return rc;
10698}
10699
10700/*===========================================================================
10701 * FUNCTION : saveRequestSettings
10702 *
10703 * DESCRIPTION: Add any settings that might have changed to the request settings
10704 * and save the settings to be applied on the frame
10705 *
10706 * PARAMETERS :
10707 * @jpegMetadata : the extracted and/or modified jpeg metadata
10708 * @request : request with initial settings
10709 *
10710 * RETURN :
10711 * camera_metadata_t* : pointer to the saved request settings
10712 *==========================================================================*/
10713camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10714 const CameraMetadata &jpegMetadata,
10715 camera3_capture_request_t *request)
10716{
10717 camera_metadata_t *resultMetadata;
10718 CameraMetadata camMetadata;
10719 camMetadata = request->settings;
10720
10721 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10722 int32_t thumbnail_size[2];
10723 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10724 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10725 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10726 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10727 }
10728
10729 if (request->input_buffer != NULL) {
10730 uint8_t reprocessFlags = 1;
10731 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10732 (uint8_t*)&reprocessFlags,
10733 sizeof(reprocessFlags));
10734 }
10735
10736 resultMetadata = camMetadata.release();
10737 return resultMetadata;
10738}
10739
10740/*===========================================================================
10741 * FUNCTION : setHalFpsRange
10742 *
10743 * DESCRIPTION: set FPS range parameter
10744 *
10745 *
10746 * PARAMETERS :
10747 * @settings : Metadata from framework
10748 * @hal_metadata: Metadata buffer
10749 *
10750 *
10751 * RETURN : success: NO_ERROR
10752 * failure:
10753 *==========================================================================*/
10754int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10755 metadata_buffer_t *hal_metadata)
10756{
10757 int32_t rc = NO_ERROR;
10758 cam_fps_range_t fps_range;
10759 fps_range.min_fps = (float)
10760 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10761 fps_range.max_fps = (float)
10762 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10763 fps_range.video_min_fps = fps_range.min_fps;
10764 fps_range.video_max_fps = fps_range.max_fps;
10765
10766 LOGD("aeTargetFpsRange fps: [%f %f]",
10767 fps_range.min_fps, fps_range.max_fps);
10768 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10769 * follows:
10770 * ---------------------------------------------------------------|
10771 * Video stream is absent in configure_streams |
10772 * (Camcorder preview before the first video record |
10773 * ---------------------------------------------------------------|
10774 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10775 * | | | vid_min/max_fps|
10776 * ---------------------------------------------------------------|
10777 * NO | [ 30, 240] | 240 | [240, 240] |
10778 * |-------------|-------------|----------------|
10779 * | [240, 240] | 240 | [240, 240] |
10780 * ---------------------------------------------------------------|
10781 * Video stream is present in configure_streams |
10782 * ---------------------------------------------------------------|
10783 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10784 * | | | vid_min/max_fps|
10785 * ---------------------------------------------------------------|
10786 * NO | [ 30, 240] | 240 | [240, 240] |
10787 * (camcorder prev |-------------|-------------|----------------|
10788 * after video rec | [240, 240] | 240 | [240, 240] |
10789 * is stopped) | | | |
10790 * ---------------------------------------------------------------|
10791 * YES | [ 30, 240] | 240 | [240, 240] |
10792 * |-------------|-------------|----------------|
10793 * | [240, 240] | 240 | [240, 240] |
10794 * ---------------------------------------------------------------|
10795 * When Video stream is absent in configure_streams,
10796 * preview fps = sensor_fps / batchsize
10797 * Eg: for 240fps at batchSize 4, preview = 60fps
10798 * for 120fps at batchSize 4, preview = 30fps
10799 *
10800 * When video stream is present in configure_streams, preview fps is as per
10801 * the ratio of preview buffers to video buffers requested in process
10802 * capture request
10803 */
10804 mBatchSize = 0;
10805 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10806 fps_range.min_fps = fps_range.video_max_fps;
10807 fps_range.video_min_fps = fps_range.video_max_fps;
10808 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10809 fps_range.max_fps);
10810 if (NAME_NOT_FOUND != val) {
10811 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10812 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10813 return BAD_VALUE;
10814 }
10815
10816 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10817 /* If batchmode is currently in progress and the fps changes,
10818 * set the flag to restart the sensor */
10819 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10820 (mHFRVideoFps != fps_range.max_fps)) {
10821 mNeedSensorRestart = true;
10822 }
10823 mHFRVideoFps = fps_range.max_fps;
10824 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10825 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10826 mBatchSize = MAX_HFR_BATCH_SIZE;
10827 }
10828 }
10829 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10830
10831 }
10832 } else {
10833 /* HFR mode is session param in backend/ISP. This should be reset when
10834 * in non-HFR mode */
10835 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10837 return BAD_VALUE;
10838 }
10839 }
10840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10841 return BAD_VALUE;
10842 }
10843 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10844 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10845 return rc;
10846}
10847
10848/*===========================================================================
10849 * FUNCTION : translateToHalMetadata
10850 *
10851 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10852 *
10853 *
10854 * PARAMETERS :
10855 * @request : request sent from framework
10856 *
10857 *
10858 * RETURN : success: NO_ERROR
10859 * failure:
10860 *==========================================================================*/
10861int QCamera3HardwareInterface::translateToHalMetadata
10862 (const camera3_capture_request_t *request,
10863 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080010864 uint32_t snapshotStreamId) {
10865 if (request == nullptr || hal_metadata == nullptr) {
10866 return BAD_VALUE;
10867 }
10868
10869 int64_t minFrameDuration = getMinFrameDuration(request);
10870
10871 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
10872 minFrameDuration);
10873}
10874
10875int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
10876 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
10877 uint32_t snapshotStreamId, int64_t minFrameDuration) {
10878
Thierry Strudel3d639192016-09-09 11:52:26 -070010879 int rc = 0;
10880 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080010881 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070010882
10883 /* Do not change the order of the following list unless you know what you are
10884 * doing.
10885 * The order is laid out in such a way that parameters in the front of the table
10886 * may be used to override the parameters later in the table. Examples are:
10887 * 1. META_MODE should precede AEC/AWB/AF MODE
10888 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10889 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10890 * 4. Any mode should precede it's corresponding settings
10891 */
10892 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10893 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10895 rc = BAD_VALUE;
10896 }
10897 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10898 if (rc != NO_ERROR) {
10899 LOGE("extractSceneMode failed");
10900 }
10901 }
10902
10903 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10904 uint8_t fwk_aeMode =
10905 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10906 uint8_t aeMode;
10907 int32_t redeye;
10908
10909 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10910 aeMode = CAM_AE_MODE_OFF;
10911 } else {
10912 aeMode = CAM_AE_MODE_ON;
10913 }
10914 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10915 redeye = 1;
10916 } else {
10917 redeye = 0;
10918 }
10919
10920 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10921 fwk_aeMode);
10922 if (NAME_NOT_FOUND != val) {
10923 int32_t flashMode = (int32_t)val;
10924 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10925 }
10926
10927 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10929 rc = BAD_VALUE;
10930 }
10931 }
10932
10933 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10934 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10935 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10936 fwk_whiteLevel);
10937 if (NAME_NOT_FOUND != val) {
10938 uint8_t whiteLevel = (uint8_t)val;
10939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10940 rc = BAD_VALUE;
10941 }
10942 }
10943 }
10944
10945 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10946 uint8_t fwk_cacMode =
10947 frame_settings.find(
10948 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10949 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10950 fwk_cacMode);
10951 if (NAME_NOT_FOUND != val) {
10952 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10953 bool entryAvailable = FALSE;
10954 // Check whether Frameworks set CAC mode is supported in device or not
10955 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10956 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10957 entryAvailable = TRUE;
10958 break;
10959 }
10960 }
10961 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10962 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10963 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10964 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10965 if (entryAvailable == FALSE) {
10966 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10967 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10968 } else {
10969 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10970 // High is not supported and so set the FAST as spec say's underlying
10971 // device implementation can be the same for both modes.
10972 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10973 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10974 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10975 // in order to avoid the fps drop due to high quality
10976 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10977 } else {
10978 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10979 }
10980 }
10981 }
10982 LOGD("Final cacMode is %d", cacMode);
10983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10984 rc = BAD_VALUE;
10985 }
10986 } else {
10987 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10988 }
10989 }
10990
10991 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10992 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10993 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10994 fwk_focusMode);
10995 if (NAME_NOT_FOUND != val) {
10996 uint8_t focusMode = (uint8_t)val;
10997 LOGD("set focus mode %d", focusMode);
10998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10999 rc = BAD_VALUE;
11000 }
11001 }
11002 }
11003
11004 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
11005 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11007 focalDistance)) {
11008 rc = BAD_VALUE;
11009 }
11010 }
11011
11012 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11013 uint8_t fwk_antibandingMode =
11014 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11015 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11016 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11017 if (NAME_NOT_FOUND != val) {
11018 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011019 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11020 if (m60HzZone) {
11021 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11022 } else {
11023 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11024 }
11025 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011026 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11027 hal_antibandingMode)) {
11028 rc = BAD_VALUE;
11029 }
11030 }
11031 }
11032
11033 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11034 int32_t expCompensation = frame_settings.find(
11035 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11036 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11037 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11038 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11039 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011040 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11042 expCompensation)) {
11043 rc = BAD_VALUE;
11044 }
11045 }
11046
11047 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11048 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11050 rc = BAD_VALUE;
11051 }
11052 }
11053 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11054 rc = setHalFpsRange(frame_settings, hal_metadata);
11055 if (rc != NO_ERROR) {
11056 LOGE("setHalFpsRange failed");
11057 }
11058 }
11059
11060 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11061 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11063 rc = BAD_VALUE;
11064 }
11065 }
11066
11067 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11068 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11069 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11070 fwk_effectMode);
11071 if (NAME_NOT_FOUND != val) {
11072 uint8_t effectMode = (uint8_t)val;
11073 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11074 rc = BAD_VALUE;
11075 }
11076 }
11077 }
11078
11079 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11080 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11082 colorCorrectMode)) {
11083 rc = BAD_VALUE;
11084 }
11085 }
11086
11087 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11088 cam_color_correct_gains_t colorCorrectGains;
11089 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11090 colorCorrectGains.gains[i] =
11091 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11092 }
11093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11094 colorCorrectGains)) {
11095 rc = BAD_VALUE;
11096 }
11097 }
11098
11099 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11100 cam_color_correct_matrix_t colorCorrectTransform;
11101 cam_rational_type_t transform_elem;
11102 size_t num = 0;
11103 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11104 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11105 transform_elem.numerator =
11106 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11107 transform_elem.denominator =
11108 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11109 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11110 num++;
11111 }
11112 }
11113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11114 colorCorrectTransform)) {
11115 rc = BAD_VALUE;
11116 }
11117 }
11118
11119 cam_trigger_t aecTrigger;
11120 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11121 aecTrigger.trigger_id = -1;
11122 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11123 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11124 aecTrigger.trigger =
11125 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11126 aecTrigger.trigger_id =
11127 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11129 aecTrigger)) {
11130 rc = BAD_VALUE;
11131 }
11132 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11133 aecTrigger.trigger, aecTrigger.trigger_id);
11134 }
11135
11136 /*af_trigger must come with a trigger id*/
11137 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11138 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11139 cam_trigger_t af_trigger;
11140 af_trigger.trigger =
11141 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11142 af_trigger.trigger_id =
11143 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11144 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11145 rc = BAD_VALUE;
11146 }
11147 LOGD("AfTrigger: %d AfTriggerID: %d",
11148 af_trigger.trigger, af_trigger.trigger_id);
11149 }
11150
11151 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11152 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11154 rc = BAD_VALUE;
11155 }
11156 }
11157 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11158 cam_edge_application_t edge_application;
11159 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011160
Thierry Strudel3d639192016-09-09 11:52:26 -070011161 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11162 edge_application.sharpness = 0;
11163 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011164 edge_application.sharpness =
11165 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11166 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11167 int32_t sharpness =
11168 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11169 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11170 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11171 LOGD("Setting edge mode sharpness %d", sharpness);
11172 edge_application.sharpness = sharpness;
11173 }
11174 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011175 }
11176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11177 rc = BAD_VALUE;
11178 }
11179 }
11180
11181 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11182 int32_t respectFlashMode = 1;
11183 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11184 uint8_t fwk_aeMode =
11185 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11186 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11187 respectFlashMode = 0;
11188 LOGH("AE Mode controls flash, ignore android.flash.mode");
11189 }
11190 }
11191 if (respectFlashMode) {
11192 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11193 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11194 LOGH("flash mode after mapping %d", val);
11195 // To check: CAM_INTF_META_FLASH_MODE usage
11196 if (NAME_NOT_FOUND != val) {
11197 uint8_t flashMode = (uint8_t)val;
11198 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11199 rc = BAD_VALUE;
11200 }
11201 }
11202 }
11203 }
11204
11205 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11206 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11208 rc = BAD_VALUE;
11209 }
11210 }
11211
11212 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11213 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11215 flashFiringTime)) {
11216 rc = BAD_VALUE;
11217 }
11218 }
11219
11220 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11221 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11223 hotPixelMode)) {
11224 rc = BAD_VALUE;
11225 }
11226 }
11227
11228 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11229 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11231 lensAperture)) {
11232 rc = BAD_VALUE;
11233 }
11234 }
11235
11236 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11237 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11239 filterDensity)) {
11240 rc = BAD_VALUE;
11241 }
11242 }
11243
11244 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11245 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11247 focalLength)) {
11248 rc = BAD_VALUE;
11249 }
11250 }
11251
11252 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11253 uint8_t optStabMode =
11254 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11256 optStabMode)) {
11257 rc = BAD_VALUE;
11258 }
11259 }
11260
11261 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11262 uint8_t videoStabMode =
11263 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11264 LOGD("videoStabMode from APP = %d", videoStabMode);
11265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11266 videoStabMode)) {
11267 rc = BAD_VALUE;
11268 }
11269 }
11270
11271
11272 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11273 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11275 noiseRedMode)) {
11276 rc = BAD_VALUE;
11277 }
11278 }
11279
11280 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11281 float reprocessEffectiveExposureFactor =
11282 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11284 reprocessEffectiveExposureFactor)) {
11285 rc = BAD_VALUE;
11286 }
11287 }
11288
11289 cam_crop_region_t scalerCropRegion;
11290 bool scalerCropSet = false;
11291 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11292 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11293 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11294 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11295 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11296
11297 // Map coordinate system from active array to sensor output.
11298 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11299 scalerCropRegion.width, scalerCropRegion.height);
11300
11301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11302 scalerCropRegion)) {
11303 rc = BAD_VALUE;
11304 }
11305 scalerCropSet = true;
11306 }
11307
11308 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11309 int64_t sensorExpTime =
11310 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11311 LOGD("setting sensorExpTime %lld", sensorExpTime);
11312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11313 sensorExpTime)) {
11314 rc = BAD_VALUE;
11315 }
11316 }
11317
11318 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11319 int64_t sensorFrameDuration =
11320 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011321 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11322 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11323 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11324 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11326 sensorFrameDuration)) {
11327 rc = BAD_VALUE;
11328 }
11329 }
11330
11331 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11332 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11333 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11334 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11335 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11336 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11337 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11339 sensorSensitivity)) {
11340 rc = BAD_VALUE;
11341 }
11342 }
11343
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011344#ifndef USE_HAL_3_3
11345 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11346 int32_t ispSensitivity =
11347 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11348 if (ispSensitivity <
11349 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11350 ispSensitivity =
11351 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11352 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11353 }
11354 if (ispSensitivity >
11355 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11356 ispSensitivity =
11357 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11358 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11359 }
11360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11361 ispSensitivity)) {
11362 rc = BAD_VALUE;
11363 }
11364 }
11365#endif
11366
Thierry Strudel3d639192016-09-09 11:52:26 -070011367 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11368 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11370 rc = BAD_VALUE;
11371 }
11372 }
11373
11374 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11375 uint8_t fwk_facedetectMode =
11376 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11377
11378 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11379 fwk_facedetectMode);
11380
11381 if (NAME_NOT_FOUND != val) {
11382 uint8_t facedetectMode = (uint8_t)val;
11383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11384 facedetectMode)) {
11385 rc = BAD_VALUE;
11386 }
11387 }
11388 }
11389
Thierry Strudel54dc9782017-02-15 12:12:10 -080011390 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011391 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011392 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11394 histogramMode)) {
11395 rc = BAD_VALUE;
11396 }
11397 }
11398
11399 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11400 uint8_t sharpnessMapMode =
11401 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11402 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11403 sharpnessMapMode)) {
11404 rc = BAD_VALUE;
11405 }
11406 }
11407
11408 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11409 uint8_t tonemapMode =
11410 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11412 rc = BAD_VALUE;
11413 }
11414 }
11415 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11416 /*All tonemap channels will have the same number of points*/
11417 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11418 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11419 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11420 cam_rgb_tonemap_curves tonemapCurves;
11421 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11422 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11423 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11424 tonemapCurves.tonemap_points_cnt,
11425 CAM_MAX_TONEMAP_CURVE_SIZE);
11426 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11427 }
11428
11429 /* ch0 = G*/
11430 size_t point = 0;
11431 cam_tonemap_curve_t tonemapCurveGreen;
11432 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11433 for (size_t j = 0; j < 2; j++) {
11434 tonemapCurveGreen.tonemap_points[i][j] =
11435 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11436 point++;
11437 }
11438 }
11439 tonemapCurves.curves[0] = tonemapCurveGreen;
11440
11441 /* ch 1 = B */
11442 point = 0;
11443 cam_tonemap_curve_t tonemapCurveBlue;
11444 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11445 for (size_t j = 0; j < 2; j++) {
11446 tonemapCurveBlue.tonemap_points[i][j] =
11447 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11448 point++;
11449 }
11450 }
11451 tonemapCurves.curves[1] = tonemapCurveBlue;
11452
11453 /* ch 2 = R */
11454 point = 0;
11455 cam_tonemap_curve_t tonemapCurveRed;
11456 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11457 for (size_t j = 0; j < 2; j++) {
11458 tonemapCurveRed.tonemap_points[i][j] =
11459 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11460 point++;
11461 }
11462 }
11463 tonemapCurves.curves[2] = tonemapCurveRed;
11464
11465 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11466 tonemapCurves)) {
11467 rc = BAD_VALUE;
11468 }
11469 }
11470
11471 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11472 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11474 captureIntent)) {
11475 rc = BAD_VALUE;
11476 }
11477 }
11478
11479 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11480 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11482 blackLevelLock)) {
11483 rc = BAD_VALUE;
11484 }
11485 }
11486
11487 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11488 uint8_t lensShadingMapMode =
11489 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11491 lensShadingMapMode)) {
11492 rc = BAD_VALUE;
11493 }
11494 }
11495
11496 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11497 cam_area_t roi;
11498 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011499 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011500
11501 // Map coordinate system from active array to sensor output.
11502 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11503 roi.rect.height);
11504
11505 if (scalerCropSet) {
11506 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11507 }
11508 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11509 rc = BAD_VALUE;
11510 }
11511 }
11512
11513 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11514 cam_area_t roi;
11515 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011516 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011517
11518 // Map coordinate system from active array to sensor output.
11519 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11520 roi.rect.height);
11521
11522 if (scalerCropSet) {
11523 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11524 }
11525 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11526 rc = BAD_VALUE;
11527 }
11528 }
11529
11530 // CDS for non-HFR non-video mode
11531 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11532 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11533 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11534 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11535 LOGE("Invalid CDS mode %d!", *fwk_cds);
11536 } else {
11537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11538 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11539 rc = BAD_VALUE;
11540 }
11541 }
11542 }
11543
Thierry Strudel04e026f2016-10-10 11:27:36 -070011544 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011545 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011546 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011547 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11548 }
11549 if (m_bVideoHdrEnabled)
11550 vhdr = CAM_VIDEO_HDR_MODE_ON;
11551
Thierry Strudel54dc9782017-02-15 12:12:10 -080011552 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
11553
11554 if(vhdr != curr_hdr_state)
11555 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
11556
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011557 rc = setVideoHdrMode(mParameters, vhdr);
11558 if (rc != NO_ERROR) {
11559 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011560 }
11561
11562 //IR
11563 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11564 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11565 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011566 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
11567 uint8_t isIRon = 0;
11568
11569 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011570 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11571 LOGE("Invalid IR mode %d!", fwk_ir);
11572 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011573 if(isIRon != curr_ir_state )
11574 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
11575
Thierry Strudel04e026f2016-10-10 11:27:36 -070011576 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11577 CAM_INTF_META_IR_MODE, fwk_ir)) {
11578 rc = BAD_VALUE;
11579 }
11580 }
11581 }
11582
Thierry Strudel54dc9782017-02-15 12:12:10 -080011583 //Binning Correction Mode
11584 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
11585 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
11586 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
11587 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
11588 || (0 > fwk_binning_correction)) {
11589 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
11590 } else {
11591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11592 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
11593 rc = BAD_VALUE;
11594 }
11595 }
11596 }
11597
Thierry Strudel269c81a2016-10-12 12:13:59 -070011598 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11599 float aec_speed;
11600 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11601 LOGD("AEC Speed :%f", aec_speed);
11602 if ( aec_speed < 0 ) {
11603 LOGE("Invalid AEC mode %f!", aec_speed);
11604 } else {
11605 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11606 aec_speed)) {
11607 rc = BAD_VALUE;
11608 }
11609 }
11610 }
11611
11612 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11613 float awb_speed;
11614 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11615 LOGD("AWB Speed :%f", awb_speed);
11616 if ( awb_speed < 0 ) {
11617 LOGE("Invalid AWB mode %f!", awb_speed);
11618 } else {
11619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11620 awb_speed)) {
11621 rc = BAD_VALUE;
11622 }
11623 }
11624 }
11625
Thierry Strudel3d639192016-09-09 11:52:26 -070011626 // TNR
11627 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11628 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11629 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011630 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070011631 cam_denoise_param_t tnr;
11632 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11633 tnr.process_plates =
11634 (cam_denoise_process_type_t)frame_settings.find(
11635 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11636 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011637
11638 if(b_TnrRequested != curr_tnr_state)
11639 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
11640
Thierry Strudel3d639192016-09-09 11:52:26 -070011641 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11642 rc = BAD_VALUE;
11643 }
11644 }
11645
Thierry Strudel54dc9782017-02-15 12:12:10 -080011646 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011647 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011648 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011649 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11650 *exposure_metering_mode)) {
11651 rc = BAD_VALUE;
11652 }
11653 }
11654
Thierry Strudel3d639192016-09-09 11:52:26 -070011655 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11656 int32_t fwk_testPatternMode =
11657 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11658 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11659 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11660
11661 if (NAME_NOT_FOUND != testPatternMode) {
11662 cam_test_pattern_data_t testPatternData;
11663 memset(&testPatternData, 0, sizeof(testPatternData));
11664 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11665 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11666 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11667 int32_t *fwk_testPatternData =
11668 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11669 testPatternData.r = fwk_testPatternData[0];
11670 testPatternData.b = fwk_testPatternData[3];
11671 switch (gCamCapability[mCameraId]->color_arrangement) {
11672 case CAM_FILTER_ARRANGEMENT_RGGB:
11673 case CAM_FILTER_ARRANGEMENT_GRBG:
11674 testPatternData.gr = fwk_testPatternData[1];
11675 testPatternData.gb = fwk_testPatternData[2];
11676 break;
11677 case CAM_FILTER_ARRANGEMENT_GBRG:
11678 case CAM_FILTER_ARRANGEMENT_BGGR:
11679 testPatternData.gr = fwk_testPatternData[2];
11680 testPatternData.gb = fwk_testPatternData[1];
11681 break;
11682 default:
11683 LOGE("color arrangement %d is not supported",
11684 gCamCapability[mCameraId]->color_arrangement);
11685 break;
11686 }
11687 }
11688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11689 testPatternData)) {
11690 rc = BAD_VALUE;
11691 }
11692 } else {
11693 LOGE("Invalid framework sensor test pattern mode %d",
11694 fwk_testPatternMode);
11695 }
11696 }
11697
11698 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11699 size_t count = 0;
11700 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11701 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11702 gps_coords.data.d, gps_coords.count, count);
11703 if (gps_coords.count != count) {
11704 rc = BAD_VALUE;
11705 }
11706 }
11707
11708 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11709 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11710 size_t count = 0;
11711 const char *gps_methods_src = (const char *)
11712 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11713 memset(gps_methods, '\0', sizeof(gps_methods));
11714 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11715 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11716 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11717 if (GPS_PROCESSING_METHOD_SIZE != count) {
11718 rc = BAD_VALUE;
11719 }
11720 }
11721
11722 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11723 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11725 gps_timestamp)) {
11726 rc = BAD_VALUE;
11727 }
11728 }
11729
11730 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11731 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11732 cam_rotation_info_t rotation_info;
11733 if (orientation == 0) {
11734 rotation_info.rotation = ROTATE_0;
11735 } else if (orientation == 90) {
11736 rotation_info.rotation = ROTATE_90;
11737 } else if (orientation == 180) {
11738 rotation_info.rotation = ROTATE_180;
11739 } else if (orientation == 270) {
11740 rotation_info.rotation = ROTATE_270;
11741 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011742 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011743 rotation_info.streamId = snapshotStreamId;
11744 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11746 rc = BAD_VALUE;
11747 }
11748 }
11749
11750 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11751 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11753 rc = BAD_VALUE;
11754 }
11755 }
11756
11757 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11758 uint32_t thumb_quality = (uint32_t)
11759 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11760 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11761 thumb_quality)) {
11762 rc = BAD_VALUE;
11763 }
11764 }
11765
11766 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11767 cam_dimension_t dim;
11768 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11769 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11771 rc = BAD_VALUE;
11772 }
11773 }
11774
11775 // Internal metadata
11776 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11777 size_t count = 0;
11778 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11779 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11780 privatedata.data.i32, privatedata.count, count);
11781 if (privatedata.count != count) {
11782 rc = BAD_VALUE;
11783 }
11784 }
11785
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011786 // ISO/Exposure Priority
11787 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11788 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11789 cam_priority_mode_t mode =
11790 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11791 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11792 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11793 use_iso_exp_pty.previewOnly = FALSE;
11794 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11795 use_iso_exp_pty.value = *ptr;
11796
11797 if(CAM_ISO_PRIORITY == mode) {
11798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11799 use_iso_exp_pty)) {
11800 rc = BAD_VALUE;
11801 }
11802 }
11803 else {
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11805 use_iso_exp_pty)) {
11806 rc = BAD_VALUE;
11807 }
11808 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080011809
11810 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
11811 rc = BAD_VALUE;
11812 }
11813 }
11814 } else {
11815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
11816 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011817 }
11818 }
11819
11820 // Saturation
11821 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11822 int32_t* use_saturation =
11823 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11825 rc = BAD_VALUE;
11826 }
11827 }
11828
Thierry Strudel3d639192016-09-09 11:52:26 -070011829 // EV step
11830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11831 gCamCapability[mCameraId]->exp_compensation_step)) {
11832 rc = BAD_VALUE;
11833 }
11834
11835 // CDS info
11836 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11837 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11838 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11839
11840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11841 CAM_INTF_META_CDS_DATA, *cdsData)) {
11842 rc = BAD_VALUE;
11843 }
11844 }
11845
Shuzhen Wang19463d72016-03-08 11:09:52 -080011846 // Hybrid AE
11847 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11848 uint8_t *hybrid_ae = (uint8_t *)
11849 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11850
11851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11852 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11853 rc = BAD_VALUE;
11854 }
11855 }
11856
Thierry Strudel3d639192016-09-09 11:52:26 -070011857 return rc;
11858}
11859
11860/*===========================================================================
11861 * FUNCTION : captureResultCb
11862 *
11863 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11864 *
11865 * PARAMETERS :
11866 * @frame : frame information from mm-camera-interface
11867 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11868 * @userdata: userdata
11869 *
11870 * RETURN : NONE
11871 *==========================================================================*/
11872void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11873 camera3_stream_buffer_t *buffer,
11874 uint32_t frame_number, bool isInputBuffer, void *userdata)
11875{
11876 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11877 if (hw == NULL) {
11878 LOGE("Invalid hw %p", hw);
11879 return;
11880 }
11881
11882 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11883 return;
11884}
11885
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011886/*===========================================================================
11887 * FUNCTION : setBufferErrorStatus
11888 *
11889 * DESCRIPTION: Callback handler for channels to report any buffer errors
11890 *
11891 * PARAMETERS :
11892 * @ch : Channel on which buffer error is reported from
11893 * @frame_number : frame number on which buffer error is reported on
11894 * @buffer_status : buffer error status
11895 * @userdata: userdata
11896 *
11897 * RETURN : NONE
11898 *==========================================================================*/
11899void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11900 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11901{
11902 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11903 if (hw == NULL) {
11904 LOGE("Invalid hw %p", hw);
11905 return;
11906 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011907
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011908 hw->setBufferErrorStatus(ch, frame_number, err);
11909 return;
11910}
11911
11912void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11913 uint32_t frameNumber, camera3_buffer_status_t err)
11914{
11915 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11916 pthread_mutex_lock(&mMutex);
11917
11918 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11919 if (req.frame_number != frameNumber)
11920 continue;
11921 for (auto& k : req.mPendingBufferList) {
11922 if(k.stream->priv == ch) {
11923 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11924 }
11925 }
11926 }
11927
11928 pthread_mutex_unlock(&mMutex);
11929 return;
11930}
Thierry Strudel3d639192016-09-09 11:52:26 -070011931/*===========================================================================
11932 * FUNCTION : initialize
11933 *
11934 * DESCRIPTION: Pass framework callback pointers to HAL
11935 *
11936 * PARAMETERS :
11937 *
11938 *
11939 * RETURN : Success : 0
11940 * Failure: -ENODEV
11941 *==========================================================================*/
11942
11943int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11944 const camera3_callback_ops_t *callback_ops)
11945{
11946 LOGD("E");
11947 QCamera3HardwareInterface *hw =
11948 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11949 if (!hw) {
11950 LOGE("NULL camera device");
11951 return -ENODEV;
11952 }
11953
11954 int rc = hw->initialize(callback_ops);
11955 LOGD("X");
11956 return rc;
11957}
11958
11959/*===========================================================================
11960 * FUNCTION : configure_streams
11961 *
11962 * DESCRIPTION:
11963 *
11964 * PARAMETERS :
11965 *
11966 *
11967 * RETURN : Success: 0
11968 * Failure: -EINVAL (if stream configuration is invalid)
11969 * -ENODEV (fatal error)
11970 *==========================================================================*/
11971
11972int QCamera3HardwareInterface::configure_streams(
11973 const struct camera3_device *device,
11974 camera3_stream_configuration_t *stream_list)
11975{
11976 LOGD("E");
11977 QCamera3HardwareInterface *hw =
11978 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11979 if (!hw) {
11980 LOGE("NULL camera device");
11981 return -ENODEV;
11982 }
11983 int rc = hw->configureStreams(stream_list);
11984 LOGD("X");
11985 return rc;
11986}
11987
11988/*===========================================================================
11989 * FUNCTION : construct_default_request_settings
11990 *
11991 * DESCRIPTION: Configure a settings buffer to meet the required use case
11992 *
11993 * PARAMETERS :
11994 *
11995 *
11996 * RETURN : Success: Return valid metadata
11997 * Failure: Return NULL
11998 *==========================================================================*/
11999const camera_metadata_t* QCamera3HardwareInterface::
12000 construct_default_request_settings(const struct camera3_device *device,
12001 int type)
12002{
12003
12004 LOGD("E");
12005 camera_metadata_t* fwk_metadata = NULL;
12006 QCamera3HardwareInterface *hw =
12007 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12008 if (!hw) {
12009 LOGE("NULL camera device");
12010 return NULL;
12011 }
12012
12013 fwk_metadata = hw->translateCapabilityToMetadata(type);
12014
12015 LOGD("X");
12016 return fwk_metadata;
12017}
12018
12019/*===========================================================================
12020 * FUNCTION : process_capture_request
12021 *
12022 * DESCRIPTION:
12023 *
12024 * PARAMETERS :
12025 *
12026 *
12027 * RETURN :
12028 *==========================================================================*/
12029int QCamera3HardwareInterface::process_capture_request(
12030 const struct camera3_device *device,
12031 camera3_capture_request_t *request)
12032{
12033 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012034 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012035 QCamera3HardwareInterface *hw =
12036 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12037 if (!hw) {
12038 LOGE("NULL camera device");
12039 return -EINVAL;
12040 }
12041
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012042 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012043 LOGD("X");
12044 return rc;
12045}
12046
12047/*===========================================================================
12048 * FUNCTION : dump
12049 *
12050 * DESCRIPTION:
12051 *
12052 * PARAMETERS :
12053 *
12054 *
12055 * RETURN :
12056 *==========================================================================*/
12057
12058void QCamera3HardwareInterface::dump(
12059 const struct camera3_device *device, int fd)
12060{
12061 /* Log level property is read when "adb shell dumpsys media.camera" is
12062 called so that the log level can be controlled without restarting
12063 the media server */
12064 getLogLevel();
12065
12066 LOGD("E");
12067 QCamera3HardwareInterface *hw =
12068 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12069 if (!hw) {
12070 LOGE("NULL camera device");
12071 return;
12072 }
12073
12074 hw->dump(fd);
12075 LOGD("X");
12076 return;
12077}
12078
12079/*===========================================================================
12080 * FUNCTION : flush
12081 *
12082 * DESCRIPTION:
12083 *
12084 * PARAMETERS :
12085 *
12086 *
12087 * RETURN :
12088 *==========================================================================*/
12089
12090int QCamera3HardwareInterface::flush(
12091 const struct camera3_device *device)
12092{
12093 int rc;
12094 LOGD("E");
12095 QCamera3HardwareInterface *hw =
12096 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12097 if (!hw) {
12098 LOGE("NULL camera device");
12099 return -EINVAL;
12100 }
12101
12102 pthread_mutex_lock(&hw->mMutex);
12103 // Validate current state
12104 switch (hw->mState) {
12105 case STARTED:
12106 /* valid state */
12107 break;
12108
12109 case ERROR:
12110 pthread_mutex_unlock(&hw->mMutex);
12111 hw->handleCameraDeviceError();
12112 return -ENODEV;
12113
12114 default:
12115 LOGI("Flush returned during state %d", hw->mState);
12116 pthread_mutex_unlock(&hw->mMutex);
12117 return 0;
12118 }
12119 pthread_mutex_unlock(&hw->mMutex);
12120
12121 rc = hw->flush(true /* restart channels */ );
12122 LOGD("X");
12123 return rc;
12124}
12125
12126/*===========================================================================
12127 * FUNCTION : close_camera_device
12128 *
12129 * DESCRIPTION:
12130 *
12131 * PARAMETERS :
12132 *
12133 *
12134 * RETURN :
12135 *==========================================================================*/
12136int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12137{
12138 int ret = NO_ERROR;
12139 QCamera3HardwareInterface *hw =
12140 reinterpret_cast<QCamera3HardwareInterface *>(
12141 reinterpret_cast<camera3_device_t *>(device)->priv);
12142 if (!hw) {
12143 LOGE("NULL camera device");
12144 return BAD_VALUE;
12145 }
12146
12147 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12148 delete hw;
12149 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012150 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012151 return ret;
12152}
12153
12154/*===========================================================================
12155 * FUNCTION : getWaveletDenoiseProcessPlate
12156 *
12157 * DESCRIPTION: query wavelet denoise process plate
12158 *
12159 * PARAMETERS : None
12160 *
12161 * RETURN : WNR prcocess plate value
12162 *==========================================================================*/
12163cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12164{
12165 char prop[PROPERTY_VALUE_MAX];
12166 memset(prop, 0, sizeof(prop));
12167 property_get("persist.denoise.process.plates", prop, "0");
12168 int processPlate = atoi(prop);
12169 switch(processPlate) {
12170 case 0:
12171 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12172 case 1:
12173 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12174 case 2:
12175 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12176 case 3:
12177 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12178 default:
12179 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12180 }
12181}
12182
12183
12184/*===========================================================================
12185 * FUNCTION : getTemporalDenoiseProcessPlate
12186 *
12187 * DESCRIPTION: query temporal denoise process plate
12188 *
12189 * PARAMETERS : None
12190 *
12191 * RETURN : TNR prcocess plate value
12192 *==========================================================================*/
12193cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12194{
12195 char prop[PROPERTY_VALUE_MAX];
12196 memset(prop, 0, sizeof(prop));
12197 property_get("persist.tnr.process.plates", prop, "0");
12198 int processPlate = atoi(prop);
12199 switch(processPlate) {
12200 case 0:
12201 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12202 case 1:
12203 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12204 case 2:
12205 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12206 case 3:
12207 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12208 default:
12209 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12210 }
12211}
12212
12213
12214/*===========================================================================
12215 * FUNCTION : extractSceneMode
12216 *
12217 * DESCRIPTION: Extract scene mode from frameworks set metadata
12218 *
12219 * PARAMETERS :
12220 * @frame_settings: CameraMetadata reference
12221 * @metaMode: ANDROID_CONTORL_MODE
12222 * @hal_metadata: hal metadata structure
12223 *
12224 * RETURN : None
12225 *==========================================================================*/
12226int32_t QCamera3HardwareInterface::extractSceneMode(
12227 const CameraMetadata &frame_settings, uint8_t metaMode,
12228 metadata_buffer_t *hal_metadata)
12229{
12230 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012231 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12232
12233 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12234 LOGD("Ignoring control mode OFF_KEEP_STATE");
12235 return NO_ERROR;
12236 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012237
12238 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12239 camera_metadata_ro_entry entry =
12240 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12241 if (0 == entry.count)
12242 return rc;
12243
12244 uint8_t fwk_sceneMode = entry.data.u8[0];
12245
12246 int val = lookupHalName(SCENE_MODES_MAP,
12247 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12248 fwk_sceneMode);
12249 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012250 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012251 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012252 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012253 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012254
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012255 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12256 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12257 }
12258
12259 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12260 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012261 cam_hdr_param_t hdr_params;
12262 hdr_params.hdr_enable = 1;
12263 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12264 hdr_params.hdr_need_1x = false;
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12266 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012270
Thierry Strudel3d639192016-09-09 11:52:26 -070012271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12272 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12273 rc = BAD_VALUE;
12274 }
12275 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012276
12277 if (mForceHdrSnapshot) {
12278 cam_hdr_param_t hdr_params;
12279 hdr_params.hdr_enable = 1;
12280 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12281 hdr_params.hdr_need_1x = false;
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12283 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12284 rc = BAD_VALUE;
12285 }
12286 }
12287
Thierry Strudel3d639192016-09-09 11:52:26 -070012288 return rc;
12289}
12290
12291/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012292 * FUNCTION : setVideoHdrMode
12293 *
12294 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12295 *
12296 * PARAMETERS :
12297 * @hal_metadata: hal metadata structure
12298 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12299 *
12300 * RETURN : None
12301 *==========================================================================*/
12302int32_t QCamera3HardwareInterface::setVideoHdrMode(
12303 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12304{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012305 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12306 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12307 }
12308
12309 LOGE("Invalid Video HDR mode %d!", vhdr);
12310 return BAD_VALUE;
12311}
12312
12313/*===========================================================================
12314 * FUNCTION : setSensorHDR
12315 *
12316 * DESCRIPTION: Enable/disable sensor HDR.
12317 *
12318 * PARAMETERS :
12319 * @hal_metadata: hal metadata structure
12320 * @enable: boolean whether to enable/disable sensor HDR
12321 *
12322 * RETURN : None
12323 *==========================================================================*/
12324int32_t QCamera3HardwareInterface::setSensorHDR(
12325 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12326{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012327 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012328 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12329
12330 if (enable) {
12331 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12332 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12333 #ifdef _LE_CAMERA_
12334 //Default to staggered HDR for IOT
12335 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12336 #else
12337 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12338 #endif
12339 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12340 }
12341
12342 bool isSupported = false;
12343 switch (sensor_hdr) {
12344 case CAM_SENSOR_HDR_IN_SENSOR:
12345 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12346 CAM_QCOM_FEATURE_SENSOR_HDR) {
12347 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012348 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012349 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012350 break;
12351 case CAM_SENSOR_HDR_ZIGZAG:
12352 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12353 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12354 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012355 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012356 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012357 break;
12358 case CAM_SENSOR_HDR_STAGGERED:
12359 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12360 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12361 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012362 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012363 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012364 break;
12365 case CAM_SENSOR_HDR_OFF:
12366 isSupported = true;
12367 LOGD("Turning off sensor HDR");
12368 break;
12369 default:
12370 LOGE("HDR mode %d not supported", sensor_hdr);
12371 rc = BAD_VALUE;
12372 break;
12373 }
12374
12375 if(isSupported) {
12376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12377 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12378 rc = BAD_VALUE;
12379 } else {
12380 if(!isVideoHdrEnable)
12381 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012382 }
12383 }
12384 return rc;
12385}
12386
12387/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012388 * FUNCTION : needRotationReprocess
12389 *
12390 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12391 *
12392 * PARAMETERS : none
12393 *
12394 * RETURN : true: needed
12395 * false: no need
12396 *==========================================================================*/
12397bool QCamera3HardwareInterface::needRotationReprocess()
12398{
12399 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12400 // current rotation is not zero, and pp has the capability to process rotation
12401 LOGH("need do reprocess for rotation");
12402 return true;
12403 }
12404
12405 return false;
12406}
12407
12408/*===========================================================================
12409 * FUNCTION : needReprocess
12410 *
12411 * DESCRIPTION: if reprocess in needed
12412 *
12413 * PARAMETERS : none
12414 *
12415 * RETURN : true: needed
12416 * false: no need
12417 *==========================================================================*/
12418bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12419{
12420 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12421 // TODO: add for ZSL HDR later
12422 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12423 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12424 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12425 return true;
12426 } else {
12427 LOGH("already post processed frame");
12428 return false;
12429 }
12430 }
12431 return needRotationReprocess();
12432}
12433
12434/*===========================================================================
12435 * FUNCTION : needJpegExifRotation
12436 *
12437 * DESCRIPTION: if rotation from jpeg is needed
12438 *
12439 * PARAMETERS : none
12440 *
12441 * RETURN : true: needed
12442 * false: no need
12443 *==========================================================================*/
12444bool QCamera3HardwareInterface::needJpegExifRotation()
12445{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012446 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012447 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12448 LOGD("Need use Jpeg EXIF Rotation");
12449 return true;
12450 }
12451 return false;
12452}
12453
12454/*===========================================================================
12455 * FUNCTION : addOfflineReprocChannel
12456 *
12457 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12458 * coming from input channel
12459 *
12460 * PARAMETERS :
12461 * @config : reprocess configuration
12462 * @inputChHandle : pointer to the input (source) channel
12463 *
12464 *
12465 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12466 *==========================================================================*/
12467QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12468 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12469{
12470 int32_t rc = NO_ERROR;
12471 QCamera3ReprocessChannel *pChannel = NULL;
12472
12473 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012474 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12475 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012476 if (NULL == pChannel) {
12477 LOGE("no mem for reprocess channel");
12478 return NULL;
12479 }
12480
12481 rc = pChannel->initialize(IS_TYPE_NONE);
12482 if (rc != NO_ERROR) {
12483 LOGE("init reprocess channel failed, ret = %d", rc);
12484 delete pChannel;
12485 return NULL;
12486 }
12487
12488 // pp feature config
12489 cam_pp_feature_config_t pp_config;
12490 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12491
12492 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12493 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12494 & CAM_QCOM_FEATURE_DSDN) {
12495 //Use CPP CDS incase h/w supports it.
12496 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12497 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12498 }
12499 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12500 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12501 }
12502
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012503 if (config.hdr_param.hdr_enable) {
12504 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12505 pp_config.hdr_param = config.hdr_param;
12506 }
12507
12508 if (mForceHdrSnapshot) {
12509 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12510 pp_config.hdr_param.hdr_enable = 1;
12511 pp_config.hdr_param.hdr_need_1x = 0;
12512 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12513 }
12514
Thierry Strudel3d639192016-09-09 11:52:26 -070012515 rc = pChannel->addReprocStreamsFromSource(pp_config,
12516 config,
12517 IS_TYPE_NONE,
12518 mMetadataChannel);
12519
12520 if (rc != NO_ERROR) {
12521 delete pChannel;
12522 return NULL;
12523 }
12524 return pChannel;
12525}
12526
12527/*===========================================================================
12528 * FUNCTION : getMobicatMask
12529 *
12530 * DESCRIPTION: returns mobicat mask
12531 *
12532 * PARAMETERS : none
12533 *
12534 * RETURN : mobicat mask
12535 *
12536 *==========================================================================*/
12537uint8_t QCamera3HardwareInterface::getMobicatMask()
12538{
12539 return m_MobicatMask;
12540}
12541
12542/*===========================================================================
12543 * FUNCTION : setMobicat
12544 *
12545 * DESCRIPTION: set Mobicat on/off.
12546 *
12547 * PARAMETERS :
12548 * @params : none
12549 *
12550 * RETURN : int32_t type of status
12551 * NO_ERROR -- success
12552 * none-zero failure code
12553 *==========================================================================*/
12554int32_t QCamera3HardwareInterface::setMobicat()
12555{
12556 char value [PROPERTY_VALUE_MAX];
12557 property_get("persist.camera.mobicat", value, "0");
12558 int32_t ret = NO_ERROR;
12559 uint8_t enableMobi = (uint8_t)atoi(value);
12560
12561 if (enableMobi) {
12562 tune_cmd_t tune_cmd;
12563 tune_cmd.type = SET_RELOAD_CHROMATIX;
12564 tune_cmd.module = MODULE_ALL;
12565 tune_cmd.value = TRUE;
12566 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12567 CAM_INTF_PARM_SET_VFE_COMMAND,
12568 tune_cmd);
12569
12570 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12571 CAM_INTF_PARM_SET_PP_COMMAND,
12572 tune_cmd);
12573 }
12574 m_MobicatMask = enableMobi;
12575
12576 return ret;
12577}
12578
12579/*===========================================================================
12580* FUNCTION : getLogLevel
12581*
12582* DESCRIPTION: Reads the log level property into a variable
12583*
12584* PARAMETERS :
12585* None
12586*
12587* RETURN :
12588* None
12589*==========================================================================*/
12590void QCamera3HardwareInterface::getLogLevel()
12591{
12592 char prop[PROPERTY_VALUE_MAX];
12593 uint32_t globalLogLevel = 0;
12594
12595 property_get("persist.camera.hal.debug", prop, "0");
12596 int val = atoi(prop);
12597 if (0 <= val) {
12598 gCamHal3LogLevel = (uint32_t)val;
12599 }
12600
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012601 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012602 gKpiDebugLevel = atoi(prop);
12603
12604 property_get("persist.camera.global.debug", prop, "0");
12605 val = atoi(prop);
12606 if (0 <= val) {
12607 globalLogLevel = (uint32_t)val;
12608 }
12609
12610 /* Highest log level among hal.logs and global.logs is selected */
12611 if (gCamHal3LogLevel < globalLogLevel)
12612 gCamHal3LogLevel = globalLogLevel;
12613
12614 return;
12615}
12616
12617/*===========================================================================
12618 * FUNCTION : validateStreamRotations
12619 *
12620 * DESCRIPTION: Check if the rotations requested are supported
12621 *
12622 * PARAMETERS :
12623 * @stream_list : streams to be configured
12624 *
12625 * RETURN : NO_ERROR on success
12626 * -EINVAL on failure
12627 *
12628 *==========================================================================*/
12629int QCamera3HardwareInterface::validateStreamRotations(
12630 camera3_stream_configuration_t *streamList)
12631{
12632 int rc = NO_ERROR;
12633
12634 /*
12635 * Loop through all streams requested in configuration
12636 * Check if unsupported rotations have been requested on any of them
12637 */
12638 for (size_t j = 0; j < streamList->num_streams; j++){
12639 camera3_stream_t *newStream = streamList->streams[j];
12640
12641 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12642 bool isImplDef = (newStream->format ==
12643 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12644 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12645 isImplDef);
12646
12647 if (isRotated && (!isImplDef || isZsl)) {
12648 LOGE("Error: Unsupported rotation of %d requested for stream"
12649 "type:%d and stream format:%d",
12650 newStream->rotation, newStream->stream_type,
12651 newStream->format);
12652 rc = -EINVAL;
12653 break;
12654 }
12655 }
12656
12657 return rc;
12658}
12659
12660/*===========================================================================
12661* FUNCTION : getFlashInfo
12662*
12663* DESCRIPTION: Retrieve information about whether the device has a flash.
12664*
12665* PARAMETERS :
12666* @cameraId : Camera id to query
12667* @hasFlash : Boolean indicating whether there is a flash device
12668* associated with given camera
12669* @flashNode : If a flash device exists, this will be its device node.
12670*
12671* RETURN :
12672* None
12673*==========================================================================*/
12674void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12675 bool& hasFlash,
12676 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12677{
12678 cam_capability_t* camCapability = gCamCapability[cameraId];
12679 if (NULL == camCapability) {
12680 hasFlash = false;
12681 flashNode[0] = '\0';
12682 } else {
12683 hasFlash = camCapability->flash_available;
12684 strlcpy(flashNode,
12685 (char*)camCapability->flash_dev_name,
12686 QCAMERA_MAX_FILEPATH_LENGTH);
12687 }
12688}
12689
12690/*===========================================================================
12691* FUNCTION : getEepromVersionInfo
12692*
12693* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12694*
12695* PARAMETERS : None
12696*
12697* RETURN : string describing EEPROM version
12698* "\0" if no such info available
12699*==========================================================================*/
12700const char *QCamera3HardwareInterface::getEepromVersionInfo()
12701{
12702 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12703}
12704
12705/*===========================================================================
12706* FUNCTION : getLdafCalib
12707*
12708* DESCRIPTION: Retrieve Laser AF calibration data
12709*
12710* PARAMETERS : None
12711*
12712* RETURN : Two uint32_t describing laser AF calibration data
12713* NULL if none is available.
12714*==========================================================================*/
12715const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12716{
12717 if (mLdafCalibExist) {
12718 return &mLdafCalib[0];
12719 } else {
12720 return NULL;
12721 }
12722}
12723
12724/*===========================================================================
12725 * FUNCTION : dynamicUpdateMetaStreamInfo
12726 *
12727 * DESCRIPTION: This function:
12728 * (1) stops all the channels
12729 * (2) returns error on pending requests and buffers
12730 * (3) sends metastream_info in setparams
12731 * (4) starts all channels
12732 * This is useful when sensor has to be restarted to apply any
12733 * settings such as frame rate from a different sensor mode
12734 *
12735 * PARAMETERS : None
12736 *
12737 * RETURN : NO_ERROR on success
12738 * Error codes on failure
12739 *
12740 *==========================================================================*/
12741int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12742{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012743 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012744 int rc = NO_ERROR;
12745
12746 LOGD("E");
12747
12748 rc = stopAllChannels();
12749 if (rc < 0) {
12750 LOGE("stopAllChannels failed");
12751 return rc;
12752 }
12753
12754 rc = notifyErrorForPendingRequests();
12755 if (rc < 0) {
12756 LOGE("notifyErrorForPendingRequests failed");
12757 return rc;
12758 }
12759
12760 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12761 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12762 "Format:%d",
12763 mStreamConfigInfo.type[i],
12764 mStreamConfigInfo.stream_sizes[i].width,
12765 mStreamConfigInfo.stream_sizes[i].height,
12766 mStreamConfigInfo.postprocess_mask[i],
12767 mStreamConfigInfo.format[i]);
12768 }
12769
12770 /* Send meta stream info once again so that ISP can start */
12771 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12772 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12773 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12774 mParameters);
12775 if (rc < 0) {
12776 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12777 }
12778
12779 rc = startAllChannels();
12780 if (rc < 0) {
12781 LOGE("startAllChannels failed");
12782 return rc;
12783 }
12784
12785 LOGD("X");
12786 return rc;
12787}
12788
12789/*===========================================================================
12790 * FUNCTION : stopAllChannels
12791 *
12792 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12793 *
12794 * PARAMETERS : None
12795 *
12796 * RETURN : NO_ERROR on success
12797 * Error codes on failure
12798 *
12799 *==========================================================================*/
12800int32_t QCamera3HardwareInterface::stopAllChannels()
12801{
12802 int32_t rc = NO_ERROR;
12803
12804 LOGD("Stopping all channels");
12805 // Stop the Streams/Channels
12806 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12807 it != mStreamInfo.end(); it++) {
12808 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12809 if (channel) {
12810 channel->stop();
12811 }
12812 (*it)->status = INVALID;
12813 }
12814
12815 if (mSupportChannel) {
12816 mSupportChannel->stop();
12817 }
12818 if (mAnalysisChannel) {
12819 mAnalysisChannel->stop();
12820 }
12821 if (mRawDumpChannel) {
12822 mRawDumpChannel->stop();
12823 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012824 if (mHdrPlusRawSrcChannel) {
12825 mHdrPlusRawSrcChannel->stop();
12826 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012827 if (mMetadataChannel) {
12828 /* If content of mStreamInfo is not 0, there is metadata stream */
12829 mMetadataChannel->stop();
12830 }
12831
12832 LOGD("All channels stopped");
12833 return rc;
12834}
12835
12836/*===========================================================================
12837 * FUNCTION : startAllChannels
12838 *
12839 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12840 *
12841 * PARAMETERS : None
12842 *
12843 * RETURN : NO_ERROR on success
12844 * Error codes on failure
12845 *
12846 *==========================================================================*/
12847int32_t QCamera3HardwareInterface::startAllChannels()
12848{
12849 int32_t rc = NO_ERROR;
12850
12851 LOGD("Start all channels ");
12852 // Start the Streams/Channels
12853 if (mMetadataChannel) {
12854 /* If content of mStreamInfo is not 0, there is metadata stream */
12855 rc = mMetadataChannel->start();
12856 if (rc < 0) {
12857 LOGE("META channel start failed");
12858 return rc;
12859 }
12860 }
12861 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12862 it != mStreamInfo.end(); it++) {
12863 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12864 if (channel) {
12865 rc = channel->start();
12866 if (rc < 0) {
12867 LOGE("channel start failed");
12868 return rc;
12869 }
12870 }
12871 }
12872 if (mAnalysisChannel) {
12873 mAnalysisChannel->start();
12874 }
12875 if (mSupportChannel) {
12876 rc = mSupportChannel->start();
12877 if (rc < 0) {
12878 LOGE("Support channel start failed");
12879 return rc;
12880 }
12881 }
12882 if (mRawDumpChannel) {
12883 rc = mRawDumpChannel->start();
12884 if (rc < 0) {
12885 LOGE("RAW dump channel start failed");
12886 return rc;
12887 }
12888 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012889 if (mHdrPlusRawSrcChannel) {
12890 rc = mHdrPlusRawSrcChannel->start();
12891 if (rc < 0) {
12892 LOGE("HDR+ RAW channel start failed");
12893 return rc;
12894 }
12895 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012896
12897 LOGD("All channels started");
12898 return rc;
12899}
12900
12901/*===========================================================================
12902 * FUNCTION : notifyErrorForPendingRequests
12903 *
12904 * DESCRIPTION: This function sends error for all the pending requests/buffers
12905 *
12906 * PARAMETERS : None
12907 *
12908 * RETURN : Error codes
12909 * NO_ERROR on success
12910 *
12911 *==========================================================================*/
12912int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12913{
12914 int32_t rc = NO_ERROR;
12915 unsigned int frameNum = 0;
12916 camera3_capture_result_t result;
12917 camera3_stream_buffer_t *pStream_Buf = NULL;
12918
12919 memset(&result, 0, sizeof(camera3_capture_result_t));
12920
12921 if (mPendingRequestsList.size() > 0) {
12922 pendingRequestIterator i = mPendingRequestsList.begin();
12923 frameNum = i->frame_number;
12924 } else {
12925 /* There might still be pending buffers even though there are
12926 no pending requests. Setting the frameNum to MAX so that
12927 all the buffers with smaller frame numbers are returned */
12928 frameNum = UINT_MAX;
12929 }
12930
12931 LOGH("Oldest frame num on mPendingRequestsList = %u",
12932 frameNum);
12933
12934 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12935 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12936
12937 if (req->frame_number < frameNum) {
12938 // Send Error notify to frameworks for each buffer for which
12939 // metadata buffer is already sent
12940 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12941 req->frame_number, req->mPendingBufferList.size());
12942
12943 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12944 if (NULL == pStream_Buf) {
12945 LOGE("No memory for pending buffers array");
12946 return NO_MEMORY;
12947 }
12948 memset(pStream_Buf, 0,
12949 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12950 result.result = NULL;
12951 result.frame_number = req->frame_number;
12952 result.num_output_buffers = req->mPendingBufferList.size();
12953 result.output_buffers = pStream_Buf;
12954
12955 size_t index = 0;
12956 for (auto info = req->mPendingBufferList.begin();
12957 info != req->mPendingBufferList.end(); ) {
12958
12959 camera3_notify_msg_t notify_msg;
12960 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12961 notify_msg.type = CAMERA3_MSG_ERROR;
12962 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12963 notify_msg.message.error.error_stream = info->stream;
12964 notify_msg.message.error.frame_number = req->frame_number;
12965 pStream_Buf[index].acquire_fence = -1;
12966 pStream_Buf[index].release_fence = -1;
12967 pStream_Buf[index].buffer = info->buffer;
12968 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12969 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012970 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012971 index++;
12972 // Remove buffer from list
12973 info = req->mPendingBufferList.erase(info);
12974 }
12975
12976 // Remove this request from Map
12977 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12978 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12979 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12980
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012981 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012982
12983 delete [] pStream_Buf;
12984 } else {
12985
12986 // Go through the pending requests info and send error request to framework
12987 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12988
12989 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12990
12991 // Send error notify to frameworks
12992 camera3_notify_msg_t notify_msg;
12993 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12994 notify_msg.type = CAMERA3_MSG_ERROR;
12995 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12996 notify_msg.message.error.error_stream = NULL;
12997 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012998 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012999
13000 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13001 if (NULL == pStream_Buf) {
13002 LOGE("No memory for pending buffers array");
13003 return NO_MEMORY;
13004 }
13005 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13006
13007 result.result = NULL;
13008 result.frame_number = req->frame_number;
13009 result.input_buffer = i->input_buffer;
13010 result.num_output_buffers = req->mPendingBufferList.size();
13011 result.output_buffers = pStream_Buf;
13012
13013 size_t index = 0;
13014 for (auto info = req->mPendingBufferList.begin();
13015 info != req->mPendingBufferList.end(); ) {
13016 pStream_Buf[index].acquire_fence = -1;
13017 pStream_Buf[index].release_fence = -1;
13018 pStream_Buf[index].buffer = info->buffer;
13019 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13020 pStream_Buf[index].stream = info->stream;
13021 index++;
13022 // Remove buffer from list
13023 info = req->mPendingBufferList.erase(info);
13024 }
13025
13026 // Remove this request from Map
13027 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13028 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13029 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13030
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013031 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013032 delete [] pStream_Buf;
13033 i = erasePendingRequest(i);
13034 }
13035 }
13036
13037 /* Reset pending frame Drop list and requests list */
13038 mPendingFrameDropList.clear();
13039
13040 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13041 req.mPendingBufferList.clear();
13042 }
13043 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013044 LOGH("Cleared all the pending buffers ");
13045
13046 return rc;
13047}
13048
13049bool QCamera3HardwareInterface::isOnEncoder(
13050 const cam_dimension_t max_viewfinder_size,
13051 uint32_t width, uint32_t height)
13052{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013053 return ((width > (uint32_t)max_viewfinder_size.width) ||
13054 (height > (uint32_t)max_viewfinder_size.height) ||
13055 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13056 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013057}
13058
13059/*===========================================================================
13060 * FUNCTION : setBundleInfo
13061 *
13062 * DESCRIPTION: Set bundle info for all streams that are bundle.
13063 *
13064 * PARAMETERS : None
13065 *
13066 * RETURN : NO_ERROR on success
13067 * Error codes on failure
13068 *==========================================================================*/
13069int32_t QCamera3HardwareInterface::setBundleInfo()
13070{
13071 int32_t rc = NO_ERROR;
13072
13073 if (mChannelHandle) {
13074 cam_bundle_config_t bundleInfo;
13075 memset(&bundleInfo, 0, sizeof(bundleInfo));
13076 rc = mCameraHandle->ops->get_bundle_info(
13077 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13078 if (rc != NO_ERROR) {
13079 LOGE("get_bundle_info failed");
13080 return rc;
13081 }
13082 if (mAnalysisChannel) {
13083 mAnalysisChannel->setBundleInfo(bundleInfo);
13084 }
13085 if (mSupportChannel) {
13086 mSupportChannel->setBundleInfo(bundleInfo);
13087 }
13088 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13089 it != mStreamInfo.end(); it++) {
13090 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13091 channel->setBundleInfo(bundleInfo);
13092 }
13093 if (mRawDumpChannel) {
13094 mRawDumpChannel->setBundleInfo(bundleInfo);
13095 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013096 if (mHdrPlusRawSrcChannel) {
13097 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13098 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013099 }
13100
13101 return rc;
13102}
13103
13104/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013105 * FUNCTION : setInstantAEC
13106 *
13107 * DESCRIPTION: Set Instant AEC related params.
13108 *
13109 * PARAMETERS :
13110 * @meta: CameraMetadata reference
13111 *
13112 * RETURN : NO_ERROR on success
13113 * Error codes on failure
13114 *==========================================================================*/
13115int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13116{
13117 int32_t rc = NO_ERROR;
13118 uint8_t val = 0;
13119 char prop[PROPERTY_VALUE_MAX];
13120
13121 // First try to configure instant AEC from framework metadata
13122 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13123 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13124 }
13125
13126 // If framework did not set this value, try to read from set prop.
13127 if (val == 0) {
13128 memset(prop, 0, sizeof(prop));
13129 property_get("persist.camera.instant.aec", prop, "0");
13130 val = (uint8_t)atoi(prop);
13131 }
13132
13133 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13134 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13135 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13136 mInstantAEC = val;
13137 mInstantAECSettledFrameNumber = 0;
13138 mInstantAecFrameIdxCount = 0;
13139 LOGH("instantAEC value set %d",val);
13140 if (mInstantAEC) {
13141 memset(prop, 0, sizeof(prop));
13142 property_get("persist.camera.ae.instant.bound", prop, "10");
13143 int32_t aec_frame_skip_cnt = atoi(prop);
13144 if (aec_frame_skip_cnt >= 0) {
13145 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13146 } else {
13147 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13148 rc = BAD_VALUE;
13149 }
13150 }
13151 } else {
13152 LOGE("Bad instant aec value set %d", val);
13153 rc = BAD_VALUE;
13154 }
13155 return rc;
13156}
13157
13158/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013159 * FUNCTION : get_num_overall_buffers
13160 *
13161 * DESCRIPTION: Estimate number of pending buffers across all requests.
13162 *
13163 * PARAMETERS : None
13164 *
13165 * RETURN : Number of overall pending buffers
13166 *
13167 *==========================================================================*/
13168uint32_t PendingBuffersMap::get_num_overall_buffers()
13169{
13170 uint32_t sum_buffers = 0;
13171 for (auto &req : mPendingBuffersInRequest) {
13172 sum_buffers += req.mPendingBufferList.size();
13173 }
13174 return sum_buffers;
13175}
13176
13177/*===========================================================================
13178 * FUNCTION : removeBuf
13179 *
13180 * DESCRIPTION: Remove a matching buffer from tracker.
13181 *
13182 * PARAMETERS : @buffer: image buffer for the callback
13183 *
13184 * RETURN : None
13185 *
13186 *==========================================================================*/
13187void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13188{
13189 bool buffer_found = false;
13190 for (auto req = mPendingBuffersInRequest.begin();
13191 req != mPendingBuffersInRequest.end(); req++) {
13192 for (auto k = req->mPendingBufferList.begin();
13193 k != req->mPendingBufferList.end(); k++ ) {
13194 if (k->buffer == buffer) {
13195 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13196 req->frame_number, buffer);
13197 k = req->mPendingBufferList.erase(k);
13198 if (req->mPendingBufferList.empty()) {
13199 // Remove this request from Map
13200 req = mPendingBuffersInRequest.erase(req);
13201 }
13202 buffer_found = true;
13203 break;
13204 }
13205 }
13206 if (buffer_found) {
13207 break;
13208 }
13209 }
13210 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13211 get_num_overall_buffers());
13212}
13213
13214/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013215 * FUNCTION : getBufErrStatus
13216 *
13217 * DESCRIPTION: get buffer error status
13218 *
13219 * PARAMETERS : @buffer: buffer handle
13220 *
13221 * RETURN : Error status
13222 *
13223 *==========================================================================*/
13224int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13225{
13226 for (auto& req : mPendingBuffersInRequest) {
13227 for (auto& k : req.mPendingBufferList) {
13228 if (k.buffer == buffer)
13229 return k.bufStatus;
13230 }
13231 }
13232 return CAMERA3_BUFFER_STATUS_OK;
13233}
13234
13235/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013236 * FUNCTION : setPAAFSupport
13237 *
13238 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13239 * feature mask according to stream type and filter
13240 * arrangement
13241 *
13242 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13243 * @stream_type: stream type
13244 * @filter_arrangement: filter arrangement
13245 *
13246 * RETURN : None
13247 *==========================================================================*/
13248void QCamera3HardwareInterface::setPAAFSupport(
13249 cam_feature_mask_t& feature_mask,
13250 cam_stream_type_t stream_type,
13251 cam_color_filter_arrangement_t filter_arrangement)
13252{
13253 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13254 feature_mask, stream_type, filter_arrangement);
13255
13256 switch (filter_arrangement) {
13257 case CAM_FILTER_ARRANGEMENT_RGGB:
13258 case CAM_FILTER_ARRANGEMENT_GRBG:
13259 case CAM_FILTER_ARRANGEMENT_GBRG:
13260 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013261 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13262 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013263 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
13264 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13265 }
13266 break;
13267 case CAM_FILTER_ARRANGEMENT_Y:
13268 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13269 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13270 }
13271 break;
13272 default:
13273 break;
13274 }
13275}
13276
13277/*===========================================================================
13278* FUNCTION : getSensorMountAngle
13279*
13280* DESCRIPTION: Retrieve sensor mount angle
13281*
13282* PARAMETERS : None
13283*
13284* RETURN : sensor mount angle in uint32_t
13285*==========================================================================*/
13286uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13287{
13288 return gCamCapability[mCameraId]->sensor_mount_angle;
13289}
13290
13291/*===========================================================================
13292* FUNCTION : getRelatedCalibrationData
13293*
13294* DESCRIPTION: Retrieve related system calibration data
13295*
13296* PARAMETERS : None
13297*
13298* RETURN : Pointer of related system calibration data
13299*==========================================================================*/
13300const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13301{
13302 return (const cam_related_system_calibration_data_t *)
13303 &(gCamCapability[mCameraId]->related_cam_calibration);
13304}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013305
13306/*===========================================================================
13307 * FUNCTION : is60HzZone
13308 *
13309 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13310 *
13311 * PARAMETERS : None
13312 *
13313 * RETURN : True if in 60Hz zone, False otherwise
13314 *==========================================================================*/
13315bool QCamera3HardwareInterface::is60HzZone()
13316{
13317 time_t t = time(NULL);
13318 struct tm lt;
13319
13320 struct tm* r = localtime_r(&t, &lt);
13321
13322 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13323 return true;
13324 else
13325 return false;
13326}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013327
13328/*===========================================================================
13329 * FUNCTION : adjustBlackLevelForCFA
13330 *
13331 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13332 * of bayer CFA (Color Filter Array).
13333 *
13334 * PARAMETERS : @input: black level pattern in the order of RGGB
13335 * @output: black level pattern in the order of CFA
13336 * @color_arrangement: CFA color arrangement
13337 *
13338 * RETURN : None
13339 *==========================================================================*/
13340template<typename T>
13341void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13342 T input[BLACK_LEVEL_PATTERN_CNT],
13343 T output[BLACK_LEVEL_PATTERN_CNT],
13344 cam_color_filter_arrangement_t color_arrangement)
13345{
13346 switch (color_arrangement) {
13347 case CAM_FILTER_ARRANGEMENT_GRBG:
13348 output[0] = input[1];
13349 output[1] = input[0];
13350 output[2] = input[3];
13351 output[3] = input[2];
13352 break;
13353 case CAM_FILTER_ARRANGEMENT_GBRG:
13354 output[0] = input[2];
13355 output[1] = input[3];
13356 output[2] = input[0];
13357 output[3] = input[1];
13358 break;
13359 case CAM_FILTER_ARRANGEMENT_BGGR:
13360 output[0] = input[3];
13361 output[1] = input[2];
13362 output[2] = input[1];
13363 output[3] = input[0];
13364 break;
13365 case CAM_FILTER_ARRANGEMENT_RGGB:
13366 output[0] = input[0];
13367 output[1] = input[1];
13368 output[2] = input[2];
13369 output[3] = input[3];
13370 break;
13371 default:
13372 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13373 break;
13374 }
13375}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013376
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013377void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13378 CameraMetadata &resultMetadata,
13379 std::shared_ptr<metadata_buffer_t> settings)
13380{
13381 if (settings == nullptr) {
13382 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13383 return;
13384 }
13385
13386 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13387 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13388 }
13389
13390 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13391 String8 str((const char *)gps_methods);
13392 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13393 }
13394
13395 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13396 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13397 }
13398
13399 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13400 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13401 }
13402
13403 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13404 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13405 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13406 }
13407
13408 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13409 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13410 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13411 }
13412
13413 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13414 int32_t fwk_thumb_size[2];
13415 fwk_thumb_size[0] = thumb_size->width;
13416 fwk_thumb_size[1] = thumb_size->height;
13417 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13418 }
13419
13420 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13421 uint8_t fwk_intent = intent[0];
13422 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13423 }
13424}
13425
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013426bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13427 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13428{
13429 if (hdrPlusRequest == nullptr) return false;
13430
13431 // Check noise reduction mode is high quality.
13432 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13433 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13434 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
13435 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ.", __FUNCTION__);
13436 return false;
13437 }
13438
13439 // Check edge mode is high quality.
13440 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13441 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13442 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13443 return false;
13444 }
13445
13446 if (request.num_output_buffers != 1 ||
13447 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13448 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
13449 return false;
13450 }
13451
13452 // Get a YUV buffer from pic channel.
13453 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13454 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13455 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13456 if (res != OK) {
13457 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13458 __FUNCTION__, strerror(-res), res);
13459 return false;
13460 }
13461
13462 pbcamera::StreamBuffer buffer;
13463 buffer.streamId = kPbYuvOutputStreamId;
13464 buffer.data = yuvBuffer->buffer;
13465 buffer.dataSize = yuvBuffer->frame_len;
13466
13467 pbcamera::CaptureRequest pbRequest;
13468 pbRequest.id = request.frame_number;
13469 pbRequest.outputBuffers.push_back(buffer);
13470
13471 // Submit an HDR+ capture request to HDR+ service.
13472 res = mHdrPlusClient->submitCaptureRequest(&pbRequest);
13473 if (res != OK) {
13474 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13475 strerror(-res), res);
13476 return false;
13477 }
13478
13479 hdrPlusRequest->yuvBuffer = yuvBuffer;
13480 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13481
13482 return true;
13483}
13484
13485status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked(
13486 const cam_sensor_mode_info_t &sensor_mode_info)
13487{
13488 pbcamera::InputConfiguration inputConfig;
13489 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
13490 status_t res = OK;
13491
13492 // Configure HDR+ client streams.
13493 // Get input config.
13494 if (mHdrPlusRawSrcChannel) {
13495 // HDR+ input buffers will be provided by HAL.
13496 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
13497 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
13498 if (res != OK) {
13499 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
13500 __FUNCTION__, strerror(-res), res);
13501 return res;
13502 }
13503
13504 inputConfig.isSensorInput = false;
13505 } else {
13506 // Sensor MIPI will send data to Easel.
13507 inputConfig.isSensorInput = true;
13508 inputConfig.sensorMode.pixelArrayWidth = sensor_mode_info.pixel_array_size.width;
13509 inputConfig.sensorMode.pixelArrayHeight = sensor_mode_info.pixel_array_size.height;
13510 inputConfig.sensorMode.activeArrayWidth = sensor_mode_info.active_array_size.width;
13511 inputConfig.sensorMode.activeArrayHeight = sensor_mode_info.active_array_size.height;
13512 inputConfig.sensorMode.outputPixelClkHz = sensor_mode_info.op_pixel_clk;
13513 }
13514
13515 // Get output configurations.
13516 // Easel may need to output RAW16 buffers if mRawChannel was created.
13517 if (mRawChannel != nullptr) {
13518 pbcamera::StreamConfiguration outputConfig;
13519 res = fillPbStreamConfig(&outputConfig, kPbRaw16OutputStreamId,
13520 HAL_PIXEL_FORMAT_RAW16, mRawChannel, /*stream index*/0);
13521 if (res != OK) {
13522 LOGE("%s: Failed to get fill stream config for raw stream: %s (%d)",
13523 __FUNCTION__, strerror(-res), res);
13524 return res;
13525 }
13526 outputStreamConfigs.push_back(outputConfig);
13527 }
13528
13529 // Easel may need to output YUV output buffers if mPictureChannel was created.
13530 pbcamera::StreamConfiguration yuvOutputConfig;
13531 if (mPictureChannel != nullptr) {
13532 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
13533 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
13534 if (res != OK) {
13535 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
13536 __FUNCTION__, strerror(-res), res);
13537
13538 return res;
13539 }
13540
13541 outputStreamConfigs.push_back(yuvOutputConfig);
13542 }
13543
13544 // TODO: consider other channels for YUV output buffers.
13545
13546 res = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
13547 if (res != OK) {
13548 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
13549 strerror(-res), res);
13550 return res;
13551 }
13552
13553 return OK;
13554}
13555
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013556void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13557 const camera_metadata_t &resultMetadata) {
13558 if (result != nullptr) {
13559 if (result->outputBuffers.size() != 1) {
13560 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13561 result->outputBuffers.size());
13562 return;
13563 }
13564
13565 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13566 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13567 result->outputBuffers[0].streamId);
13568 return;
13569 }
13570
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013571 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013572 HdrPlusPendingRequest pendingRequest;
13573 {
13574 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13575 auto req = mHdrPlusPendingRequests.find(result->requestId);
13576 pendingRequest = req->second;
13577 }
13578
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013579 // Update the result metadata with the settings of the HDR+ still capture request because
13580 // the result metadata belongs to a ZSL buffer.
13581 CameraMetadata metadata;
13582 metadata = &resultMetadata;
13583 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
13584 camera_metadata_t* updatedResultMetadata = metadata.release();
13585
13586 QCamera3PicChannel *picChannel =
13587 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13588
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013589 // Check if dumping HDR+ YUV output is enabled.
13590 char prop[PROPERTY_VALUE_MAX];
13591 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13592 bool dumpYuvOutput = atoi(prop);
13593
13594 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013595 // Dump yuv buffer to a ppm file.
13596 pbcamera::StreamConfiguration outputConfig;
13597 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13598 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13599 if (rc == OK) {
13600 char buf[FILENAME_MAX] = {};
13601 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13602 result->requestId, result->outputBuffers[0].streamId,
13603 outputConfig.image.width, outputConfig.image.height);
13604
13605 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13606 } else {
13607 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13608 __FUNCTION__, strerror(-rc), rc);
13609 }
13610 }
13611
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013612 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
13613 auto halMetadata = std::make_shared<metadata_buffer_t>();
13614 clear_metadata_buffer(halMetadata.get());
13615
13616 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
13617 // encoding.
13618 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
13619 halStreamId, /*minFrameDuration*/0);
13620 if (res == OK) {
13621 // Return the buffer to pic channel for encoding.
13622 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13623 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13624 halMetadata);
13625 } else {
13626 // Return the buffer without encoding.
13627 // TODO: This should not happen but we may want to report an error buffer to camera
13628 // service.
13629 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
13630 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
13631 strerror(-res), res);
13632 }
13633
13634 // Send HDR+ metadata to framework.
13635 {
13636 pthread_mutex_lock(&mMutex);
13637
13638 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
13639 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
13640 pthread_mutex_unlock(&mMutex);
13641 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013642
13643 // Remove the HDR+ pending request.
13644 {
13645 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13646 auto req = mHdrPlusPendingRequests.find(result->requestId);
13647 mHdrPlusPendingRequests.erase(req);
13648 }
13649 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013650}
13651
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013652void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13653 // TODO: Handle HDR+ capture failures and send the failure to framework.
13654 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13655 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13656
13657 // Return the buffer to pic channel.
13658 QCamera3PicChannel *picChannel =
13659 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
13660 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
13661
13662 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013663}
13664
Thierry Strudel3d639192016-09-09 11:52:26 -070013665}; //end namespace qcamera