blob: 15135fed761fdd0586bc54288bd38e4b3c151b00 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
Thierry Strudel54dc9782017-02-15 12:12:10 -0800143const QCamera3HardwareInterface::QCameraMap<
144 camera_metadata_enum_android_binning_correction_mode_t,
145 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
146 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
147 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
148};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700149
150const QCamera3HardwareInterface::QCameraMap<
151 camera_metadata_enum_android_ir_mode_t,
152 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
153 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
154 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
155 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
156};
Thierry Strudel3d639192016-09-09 11:52:26 -0700157
158const QCamera3HardwareInterface::QCameraMap<
159 camera_metadata_enum_android_control_effect_mode_t,
160 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
161 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
162 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
163 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
164 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
165 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
166 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
167 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
168 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
169 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
170};
171
172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_control_awb_mode_t,
174 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
175 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
176 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
177 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
178 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
179 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
180 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
181 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
182 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
183 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
184};
185
186const QCamera3HardwareInterface::QCameraMap<
187 camera_metadata_enum_android_control_scene_mode_t,
188 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
189 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
190 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
191 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
192 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
193 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
194 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
195 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
196 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
197 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
198 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
199 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
200 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
201 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
202 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
203 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800204 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
205 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_control_af_mode_t,
210 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
211 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
212 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
213 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
214 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
215 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
216 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
217 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221 camera_metadata_enum_android_color_correction_aberration_mode_t,
222 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
223 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
224 CAM_COLOR_CORRECTION_ABERRATION_OFF },
225 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
226 CAM_COLOR_CORRECTION_ABERRATION_FAST },
227 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
228 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
229};
230
231const QCamera3HardwareInterface::QCameraMap<
232 camera_metadata_enum_android_control_ae_antibanding_mode_t,
233 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
234 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
235 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
236 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
237 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_ae_mode_t,
242 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
243 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
244 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
245 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
246 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
247 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
248};
249
250const QCamera3HardwareInterface::QCameraMap<
251 camera_metadata_enum_android_flash_mode_t,
252 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
253 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
254 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
255 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
256};
257
258const QCamera3HardwareInterface::QCameraMap<
259 camera_metadata_enum_android_statistics_face_detect_mode_t,
260 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
261 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
262 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
263 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
268 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
269 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
270 CAM_FOCUS_UNCALIBRATED },
271 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
272 CAM_FOCUS_APPROXIMATE },
273 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
274 CAM_FOCUS_CALIBRATED }
275};
276
277const QCamera3HardwareInterface::QCameraMap<
278 camera_metadata_enum_android_lens_state_t,
279 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
280 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
281 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
282};
283
284const int32_t available_thumbnail_sizes[] = {0, 0,
285 176, 144,
286 240, 144,
287 256, 144,
288 240, 160,
289 256, 154,
290 240, 240,
291 320, 240};
292
293const QCamera3HardwareInterface::QCameraMap<
294 camera_metadata_enum_android_sensor_test_pattern_mode_t,
295 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
296 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
297 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
298 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
299 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
300 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
301 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
302};
303
304/* Since there is no mapping for all the options some Android enum are not listed.
305 * Also, the order in this list is important because while mapping from HAL to Android it will
306 * traverse from lower to higher index which means that for HAL values that are map to different
307 * Android values, the traverse logic will select the first one found.
308 */
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_sensor_reference_illuminant1_t,
311 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
322 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
323 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
324 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
325 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
326 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
327 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
328};
329
330const QCamera3HardwareInterface::QCameraMap<
331 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
332 { 60, CAM_HFR_MODE_60FPS},
333 { 90, CAM_HFR_MODE_90FPS},
334 { 120, CAM_HFR_MODE_120FPS},
335 { 150, CAM_HFR_MODE_150FPS},
336 { 180, CAM_HFR_MODE_180FPS},
337 { 210, CAM_HFR_MODE_210FPS},
338 { 240, CAM_HFR_MODE_240FPS},
339 { 480, CAM_HFR_MODE_480FPS},
340};
341
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700342const QCamera3HardwareInterface::QCameraMap<
343 qcamera3_ext_instant_aec_mode_t,
344 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
345 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
346 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
347 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
348};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800349
350const QCamera3HardwareInterface::QCameraMap<
351 qcamera3_ext_exposure_meter_mode_t,
352 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
353 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
354 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
355 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
356 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
357 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
358 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
359 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 qcamera3_ext_iso_mode_t,
364 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
365 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
366 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
367 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
368 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
369 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
370 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
371 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
372 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
373};
374
Thierry Strudel3d639192016-09-09 11:52:26 -0700375camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
376 .initialize = QCamera3HardwareInterface::initialize,
377 .configure_streams = QCamera3HardwareInterface::configure_streams,
378 .register_stream_buffers = NULL,
379 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
380 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
381 .get_metadata_vendor_tag_ops = NULL,
382 .dump = QCamera3HardwareInterface::dump,
383 .flush = QCamera3HardwareInterface::flush,
384 .reserved = {0},
385};
386
387// initialise to some default value
388uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
389
390/*===========================================================================
391 * FUNCTION : QCamera3HardwareInterface
392 *
393 * DESCRIPTION: constructor of QCamera3HardwareInterface
394 *
395 * PARAMETERS :
396 * @cameraId : camera ID
397 *
398 * RETURN : none
399 *==========================================================================*/
400QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
401 const camera_module_callbacks_t *callbacks)
402 : mCameraId(cameraId),
403 mCameraHandle(NULL),
404 mCameraInitialized(false),
405 mCallbackOps(NULL),
406 mMetadataChannel(NULL),
407 mPictureChannel(NULL),
408 mRawChannel(NULL),
409 mSupportChannel(NULL),
410 mAnalysisChannel(NULL),
411 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700412 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700413 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800414 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800415 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700416 mCommon(),
417 mChannelHandle(0),
418 mFirstConfiguration(true),
419 mFlush(false),
420 mFlushPerf(false),
421 mParamHeap(NULL),
422 mParameters(NULL),
423 mPrevParameters(NULL),
424 m_bIsVideo(false),
425 m_bIs4KVideo(false),
426 m_bEisSupportedSize(false),
427 m_bEisEnable(false),
428 m_MobicatMask(0),
429 mMinProcessedFrameDuration(0),
430 mMinJpegFrameDuration(0),
431 mMinRawFrameDuration(0),
432 mMetaFrameCount(0U),
433 mUpdateDebugLevel(false),
434 mCallbacks(callbacks),
435 mCaptureIntent(0),
436 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700437 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800438 /* DevCamDebug metadata internal m control*/
439 mDevCamDebugMetaEnable(0),
440 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700441 mBatchSize(0),
442 mToBeQueuedVidBufs(0),
443 mHFRVideoFps(DEFAULT_VIDEO_FPS),
444 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800445 mStreamConfig(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mFirstFrameNumberInBatch(0),
447 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800448 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700449 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
450 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700451 mInstantAEC(false),
452 mResetInstantAEC(false),
453 mInstantAECSettledFrameNumber(0),
454 mAecSkipDisplayFrameBound(0),
455 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800456 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 mLastCustIntentFrmNum(-1),
459 mState(CLOSED),
460 mIsDeviceLinked(false),
461 mIsMainCamera(true),
462 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700463 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800464 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800465 mHdrPlusModeEnabled(false),
466 mIsApInputUsedForHdrPlus(false),
467 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800468 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700469{
470 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700471 mCommon.init(gCamCapability[cameraId]);
472 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700473#ifndef USE_HAL_3_3
474 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
475#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700477#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mCameraDevice.common.close = close_camera_device;
479 mCameraDevice.ops = &mCameraOps;
480 mCameraDevice.priv = this;
481 gCamCapability[cameraId]->version = CAM_HAL_V3;
482 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
483 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
484 gCamCapability[cameraId]->min_num_pp_bufs = 3;
485
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800486 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700487
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800488 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700489 mPendingLiveRequest = 0;
490 mCurrentRequestId = -1;
491 pthread_mutex_init(&mMutex, NULL);
492
493 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
494 mDefaultMetadata[i] = NULL;
495
496 // Getting system props of different kinds
497 char prop[PROPERTY_VALUE_MAX];
498 memset(prop, 0, sizeof(prop));
499 property_get("persist.camera.raw.dump", prop, "0");
500 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800501 property_get("persist.camera.hal3.force.hdr", prop, "0");
502 mForceHdrSnapshot = atoi(prop);
503
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 if (mEnableRawDump)
505 LOGD("Raw dump from Camera HAL enabled");
506
507 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
508 memset(mLdafCalib, 0, sizeof(mLdafCalib));
509
510 memset(prop, 0, sizeof(prop));
511 property_get("persist.camera.tnr.preview", prop, "0");
512 m_bTnrPreview = (uint8_t)atoi(prop);
513
514 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800515 property_get("persist.camera.swtnr.preview", prop, "1");
516 m_bSwTnrPreview = (uint8_t)atoi(prop);
517
518 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700519 property_get("persist.camera.tnr.video", prop, "0");
520 m_bTnrVideo = (uint8_t)atoi(prop);
521
522 memset(prop, 0, sizeof(prop));
523 property_get("persist.camera.avtimer.debug", prop, "0");
524 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800525 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700526
Thierry Strudel54dc9782017-02-15 12:12:10 -0800527 memset(prop, 0, sizeof(prop));
528 property_get("persist.camera.cacmode.disable", prop, "0");
529 m_cacModeDisabled = (uint8_t)atoi(prop);
530
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 //Load and read GPU library.
532 lib_surface_utils = NULL;
533 LINK_get_surface_pixel_alignment = NULL;
534 mSurfaceStridePadding = CAM_PAD_TO_32;
535 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
536 if (lib_surface_utils) {
537 *(void **)&LINK_get_surface_pixel_alignment =
538 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
539 if (LINK_get_surface_pixel_alignment) {
540 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
541 }
542 dlclose(lib_surface_utils);
543 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700544
545 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700546}
547
548/*===========================================================================
549 * FUNCTION : ~QCamera3HardwareInterface
550 *
551 * DESCRIPTION: destructor of QCamera3HardwareInterface
552 *
553 * PARAMETERS : none
554 *
555 * RETURN : none
556 *==========================================================================*/
557QCamera3HardwareInterface::~QCamera3HardwareInterface()
558{
559 LOGD("E");
560
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800561 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700562
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800563 // Disable power hint and enable the perf lock for close camera
564 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
565 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
566
567 // unlink of dualcam during close camera
568 if (mIsDeviceLinked) {
569 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
570 &m_pDualCamCmdPtr->bundle_info;
571 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
572 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
573 pthread_mutex_lock(&gCamLock);
574
575 if (mIsMainCamera == 1) {
576 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
577 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
578 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
579 // related session id should be session id of linked session
580 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
581 } else {
582 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
583 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
584 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
585 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
586 }
587 pthread_mutex_unlock(&gCamLock);
588
589 rc = mCameraHandle->ops->set_dual_cam_cmd(
590 mCameraHandle->camera_handle);
591 if (rc < 0) {
592 LOGE("Dualcam: Unlink failed, but still proceed to close");
593 }
594 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700595
596 /* We need to stop all streams before deleting any stream */
597 if (mRawDumpChannel) {
598 mRawDumpChannel->stop();
599 }
600
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700601 if (mHdrPlusRawSrcChannel) {
602 mHdrPlusRawSrcChannel->stop();
603 }
604
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 // NOTE: 'camera3_stream_t *' objects are already freed at
606 // this stage by the framework
607 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
608 it != mStreamInfo.end(); it++) {
609 QCamera3ProcessingChannel *channel = (*it)->channel;
610 if (channel) {
611 channel->stop();
612 }
613 }
614 if (mSupportChannel)
615 mSupportChannel->stop();
616
617 if (mAnalysisChannel) {
618 mAnalysisChannel->stop();
619 }
620 if (mMetadataChannel) {
621 mMetadataChannel->stop();
622 }
623 if (mChannelHandle) {
624 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
625 mChannelHandle);
626 LOGD("stopping channel %d", mChannelHandle);
627 }
628
629 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
630 it != mStreamInfo.end(); it++) {
631 QCamera3ProcessingChannel *channel = (*it)->channel;
632 if (channel)
633 delete channel;
634 free (*it);
635 }
636 if (mSupportChannel) {
637 delete mSupportChannel;
638 mSupportChannel = NULL;
639 }
640
641 if (mAnalysisChannel) {
642 delete mAnalysisChannel;
643 mAnalysisChannel = NULL;
644 }
645 if (mRawDumpChannel) {
646 delete mRawDumpChannel;
647 mRawDumpChannel = NULL;
648 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700649 if (mHdrPlusRawSrcChannel) {
650 delete mHdrPlusRawSrcChannel;
651 mHdrPlusRawSrcChannel = NULL;
652 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700653 if (mDummyBatchChannel) {
654 delete mDummyBatchChannel;
655 mDummyBatchChannel = NULL;
656 }
657
658 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800659 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700660
661 if (mMetadataChannel) {
662 delete mMetadataChannel;
663 mMetadataChannel = NULL;
664 }
665
666 /* Clean up all channels */
667 if (mCameraInitialized) {
668 if(!mFirstConfiguration){
669 //send the last unconfigure
670 cam_stream_size_info_t stream_config_info;
671 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
672 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
673 stream_config_info.buffer_info.max_buffers =
674 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700675 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700676 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
677 stream_config_info);
678 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
679 if (rc < 0) {
680 LOGE("set_parms failed for unconfigure");
681 }
682 }
683 deinitParameters();
684 }
685
686 if (mChannelHandle) {
687 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
688 mChannelHandle);
689 LOGH("deleting channel %d", mChannelHandle);
690 mChannelHandle = 0;
691 }
692
693 if (mState != CLOSED)
694 closeCamera();
695
696 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
697 req.mPendingBufferList.clear();
698 }
699 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700700 for (pendingRequestIterator i = mPendingRequestsList.begin();
701 i != mPendingRequestsList.end();) {
702 i = erasePendingRequest(i);
703 }
704 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
705 if (mDefaultMetadata[i])
706 free_camera_metadata(mDefaultMetadata[i]);
707
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800708 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 pthread_cond_destroy(&mRequestCond);
711
712 pthread_cond_destroy(&mBuffersCond);
713
714 pthread_mutex_destroy(&mMutex);
715 LOGD("X");
716}
717
718/*===========================================================================
719 * FUNCTION : erasePendingRequest
720 *
721 * DESCRIPTION: function to erase a desired pending request after freeing any
722 * allocated memory
723 *
724 * PARAMETERS :
725 * @i : iterator pointing to pending request to be erased
726 *
727 * RETURN : iterator pointing to the next request
728 *==========================================================================*/
729QCamera3HardwareInterface::pendingRequestIterator
730 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
731{
732 if (i->input_buffer != NULL) {
733 free(i->input_buffer);
734 i->input_buffer = NULL;
735 }
736 if (i->settings != NULL)
737 free_camera_metadata((camera_metadata_t*)i->settings);
738 return mPendingRequestsList.erase(i);
739}
740
741/*===========================================================================
742 * FUNCTION : camEvtHandle
743 *
744 * DESCRIPTION: Function registered to mm-camera-interface to handle events
745 *
746 * PARAMETERS :
747 * @camera_handle : interface layer camera handle
748 * @evt : ptr to event
749 * @user_data : user data ptr
750 *
751 * RETURN : none
752 *==========================================================================*/
753void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
754 mm_camera_event_t *evt,
755 void *user_data)
756{
757 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
758 if (obj && evt) {
759 switch(evt->server_event_type) {
760 case CAM_EVENT_TYPE_DAEMON_DIED:
761 pthread_mutex_lock(&obj->mMutex);
762 obj->mState = ERROR;
763 pthread_mutex_unlock(&obj->mMutex);
764 LOGE("Fatal, camera daemon died");
765 break;
766
767 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
768 LOGD("HAL got request pull from Daemon");
769 pthread_mutex_lock(&obj->mMutex);
770 obj->mWokenUpByDaemon = true;
771 obj->unblockRequestIfNecessary();
772 pthread_mutex_unlock(&obj->mMutex);
773 break;
774
775 default:
776 LOGW("Warning: Unhandled event %d",
777 evt->server_event_type);
778 break;
779 }
780 } else {
781 LOGE("NULL user_data/evt");
782 }
783}
784
785/*===========================================================================
786 * FUNCTION : openCamera
787 *
788 * DESCRIPTION: open camera
789 *
790 * PARAMETERS :
791 * @hw_device : double ptr for camera device struct
792 *
793 * RETURN : int32_t type of status
794 * NO_ERROR -- success
795 * none-zero failure code
796 *==========================================================================*/
797int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
798{
799 int rc = 0;
800 if (mState != CLOSED) {
801 *hw_device = NULL;
802 return PERMISSION_DENIED;
803 }
804
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800805 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700806 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
807 mCameraId);
808
809 rc = openCamera();
810 if (rc == 0) {
811 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800812 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700813 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800814 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700815
Thierry Strudel3d639192016-09-09 11:52:26 -0700816 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
817 mCameraId, rc);
818
819 if (rc == NO_ERROR) {
820 mState = OPENED;
821 }
822 return rc;
823}
824
825/*===========================================================================
826 * FUNCTION : openCamera
827 *
828 * DESCRIPTION: open camera
829 *
830 * PARAMETERS : none
831 *
832 * RETURN : int32_t type of status
833 * NO_ERROR -- success
834 * none-zero failure code
835 *==========================================================================*/
836int QCamera3HardwareInterface::openCamera()
837{
838 int rc = 0;
839 char value[PROPERTY_VALUE_MAX];
840
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800841 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700842 if (mCameraHandle) {
843 LOGE("Failure: Camera already opened");
844 return ALREADY_EXISTS;
845 }
846
847 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
848 if (rc < 0) {
849 LOGE("Failed to reserve flash for camera id: %d",
850 mCameraId);
851 return UNKNOWN_ERROR;
852 }
853
854 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
855 if (rc) {
856 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
857 return rc;
858 }
859
860 if (!mCameraHandle) {
861 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
862 return -ENODEV;
863 }
864
865 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
866 camEvtHandle, (void *)this);
867
868 if (rc < 0) {
869 LOGE("Error, failed to register event callback");
870 /* Not closing camera here since it is already handled in destructor */
871 return FAILED_TRANSACTION;
872 }
873
874 mExifParams.debug_params =
875 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
876 if (mExifParams.debug_params) {
877 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
878 } else {
879 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
880 return NO_MEMORY;
881 }
882 mFirstConfiguration = true;
883
884 //Notify display HAL that a camera session is active.
885 //But avoid calling the same during bootup because camera service might open/close
886 //cameras at boot time during its initialization and display service will also internally
887 //wait for camera service to initialize first while calling this display API, resulting in a
888 //deadlock situation. Since boot time camera open/close calls are made only to fetch
889 //capabilities, no need of this display bw optimization.
890 //Use "service.bootanim.exit" property to know boot status.
891 property_get("service.bootanim.exit", value, "0");
892 if (atoi(value) == 1) {
893 pthread_mutex_lock(&gCamLock);
894 if (gNumCameraSessions++ == 0) {
895 setCameraLaunchStatus(true);
896 }
897 pthread_mutex_unlock(&gCamLock);
898 }
899
900 //fill the session id needed while linking dual cam
901 pthread_mutex_lock(&gCamLock);
902 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
903 &sessionId[mCameraId]);
904 pthread_mutex_unlock(&gCamLock);
905
906 if (rc < 0) {
907 LOGE("Error, failed to get sessiion id");
908 return UNKNOWN_ERROR;
909 } else {
910 //Allocate related cam sync buffer
911 //this is needed for the payload that goes along with bundling cmd for related
912 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700913 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
914 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700915 if(rc != OK) {
916 rc = NO_MEMORY;
917 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
918 return NO_MEMORY;
919 }
920
921 //Map memory for related cam sync buffer
922 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700923 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
924 m_pDualCamCmdHeap->getFd(0),
925 sizeof(cam_dual_camera_cmd_info_t),
926 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 if(rc < 0) {
928 LOGE("Dualcam: failed to map Related cam sync buffer");
929 rc = FAILED_TRANSACTION;
930 return NO_MEMORY;
931 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700932 m_pDualCamCmdPtr =
933 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 }
935
936 LOGH("mCameraId=%d",mCameraId);
937
Chien-Yu Chenee335912017-02-09 17:53:20 -0800938 // Create an HDR+ client instance.
939 // TODO: detect if Easel exists instead of property.
940 bool enableHdrPlus = property_get_bool("persist.camera.hdrplus.enable",
941 false);
942 ALOGD("%s: HDR+ in Camera HAL %s.", __FUNCTION__, enableHdrPlus ?
943 "enabled" : "disabled");
944 if (enableHdrPlus) {
945 mHdrPlusClient = std::make_shared<HdrPlusClient>();
946 mIsApInputUsedForHdrPlus =
947 property_get_bool("persist.camera.hdrplus.apinput", false);
948 ALOGD("%s: HDR+ input is provided by %s.", __FUNCTION__,
949 mIsApInputUsedForHdrPlus ? "AP" : "Easel");
950 }
951
Thierry Strudel3d639192016-09-09 11:52:26 -0700952 return NO_ERROR;
953}
954
955/*===========================================================================
956 * FUNCTION : closeCamera
957 *
958 * DESCRIPTION: close camera
959 *
960 * PARAMETERS : none
961 *
962 * RETURN : int32_t type of status
963 * NO_ERROR -- success
964 * none-zero failure code
965 *==========================================================================*/
966int QCamera3HardwareInterface::closeCamera()
967{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800968 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700969 int rc = NO_ERROR;
970 char value[PROPERTY_VALUE_MAX];
971
972 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
973 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700974
975 // unmap memory for related cam sync buffer
976 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800977 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700978 if (NULL != m_pDualCamCmdHeap) {
979 m_pDualCamCmdHeap->deallocate();
980 delete m_pDualCamCmdHeap;
981 m_pDualCamCmdHeap = NULL;
982 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700983 }
984
Thierry Strudel3d639192016-09-09 11:52:26 -0700985 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
986 mCameraHandle = NULL;
987
Chien-Yu Chenee335912017-02-09 17:53:20 -0800988 mHdrPlusClient = nullptr;
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700989
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 //reset session id to some invalid id
991 pthread_mutex_lock(&gCamLock);
992 sessionId[mCameraId] = 0xDEADBEEF;
993 pthread_mutex_unlock(&gCamLock);
994
995 //Notify display HAL that there is no active camera session
996 //but avoid calling the same during bootup. Refer to openCamera
997 //for more details.
998 property_get("service.bootanim.exit", value, "0");
999 if (atoi(value) == 1) {
1000 pthread_mutex_lock(&gCamLock);
1001 if (--gNumCameraSessions == 0) {
1002 setCameraLaunchStatus(false);
1003 }
1004 pthread_mutex_unlock(&gCamLock);
1005 }
1006
Thierry Strudel3d639192016-09-09 11:52:26 -07001007 if (mExifParams.debug_params) {
1008 free(mExifParams.debug_params);
1009 mExifParams.debug_params = NULL;
1010 }
1011 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1012 LOGW("Failed to release flash for camera id: %d",
1013 mCameraId);
1014 }
1015 mState = CLOSED;
1016 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1017 mCameraId, rc);
1018 return rc;
1019}
1020
1021/*===========================================================================
1022 * FUNCTION : initialize
1023 *
1024 * DESCRIPTION: Initialize frameworks callback functions
1025 *
1026 * PARAMETERS :
1027 * @callback_ops : callback function to frameworks
1028 *
1029 * RETURN :
1030 *
1031 *==========================================================================*/
1032int QCamera3HardwareInterface::initialize(
1033 const struct camera3_callback_ops *callback_ops)
1034{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001035 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 int rc;
1037
1038 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1039 pthread_mutex_lock(&mMutex);
1040
1041 // Validate current state
1042 switch (mState) {
1043 case OPENED:
1044 /* valid state */
1045 break;
1046 default:
1047 LOGE("Invalid state %d", mState);
1048 rc = -ENODEV;
1049 goto err1;
1050 }
1051
1052 rc = initParameters();
1053 if (rc < 0) {
1054 LOGE("initParamters failed %d", rc);
1055 goto err1;
1056 }
1057 mCallbackOps = callback_ops;
1058
1059 mChannelHandle = mCameraHandle->ops->add_channel(
1060 mCameraHandle->camera_handle, NULL, NULL, this);
1061 if (mChannelHandle == 0) {
1062 LOGE("add_channel failed");
1063 rc = -ENOMEM;
1064 pthread_mutex_unlock(&mMutex);
1065 return rc;
1066 }
1067
1068 pthread_mutex_unlock(&mMutex);
1069 mCameraInitialized = true;
1070 mState = INITIALIZED;
1071 LOGI("X");
1072 return 0;
1073
1074err1:
1075 pthread_mutex_unlock(&mMutex);
1076 return rc;
1077}
1078
1079/*===========================================================================
1080 * FUNCTION : validateStreamDimensions
1081 *
1082 * DESCRIPTION: Check if the configuration requested are those advertised
1083 *
1084 * PARAMETERS :
1085 * @stream_list : streams to be configured
1086 *
1087 * RETURN :
1088 *
1089 *==========================================================================*/
1090int QCamera3HardwareInterface::validateStreamDimensions(
1091 camera3_stream_configuration_t *streamList)
1092{
1093 int rc = NO_ERROR;
1094 size_t count = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08001095 uint32_t depthWidth =
1096 gCamCapability[mCameraId]->active_array_size.width;
1097 uint32_t depthHeight =
1098 gCamCapability[mCameraId]->active_array_size.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001099
1100 camera3_stream_t *inputStream = NULL;
1101 /*
1102 * Loop through all streams to find input stream if it exists*
1103 */
1104 for (size_t i = 0; i< streamList->num_streams; i++) {
1105 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1106 if (inputStream != NULL) {
1107 LOGE("Error, Multiple input streams requested");
1108 return -EINVAL;
1109 }
1110 inputStream = streamList->streams[i];
1111 }
1112 }
1113 /*
1114 * Loop through all streams requested in configuration
1115 * Check if unsupported sizes have been requested on any of them
1116 */
1117 for (size_t j = 0; j < streamList->num_streams; j++) {
1118 bool sizeFound = false;
1119 camera3_stream_t *newStream = streamList->streams[j];
1120
1121 uint32_t rotatedHeight = newStream->height;
1122 uint32_t rotatedWidth = newStream->width;
1123 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1124 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1125 rotatedHeight = newStream->width;
1126 rotatedWidth = newStream->height;
1127 }
1128
1129 /*
1130 * Sizes are different for each type of stream format check against
1131 * appropriate table.
1132 */
1133 switch (newStream->format) {
1134 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1135 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1136 case HAL_PIXEL_FORMAT_RAW10:
1137 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1138 for (size_t i = 0; i < count; i++) {
1139 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1140 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1141 sizeFound = true;
1142 break;
1143 }
1144 }
1145 break;
1146 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001147 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1148 //As per spec. depth cloud should be sample count / 16
1149 uint32_t depthSamplesCount = depthWidth * depthHeight / 16;
1150 if ((depthSamplesCount == newStream->width) &&
1151 (1 == newStream->height)) {
1152 sizeFound = true;
1153 }
1154 break;
1155 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001156 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1157 /* Verify set size against generated sizes table */
1158 for (size_t i = 0; i < count; i++) {
1159 if (((int32_t)rotatedWidth ==
1160 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1161 ((int32_t)rotatedHeight ==
1162 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1163 sizeFound = true;
1164 break;
1165 }
1166 }
1167 break;
1168 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1169 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1170 default:
1171 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1172 || newStream->stream_type == CAMERA3_STREAM_INPUT
1173 || IS_USAGE_ZSL(newStream->usage)) {
1174 if (((int32_t)rotatedWidth ==
1175 gCamCapability[mCameraId]->active_array_size.width) &&
1176 ((int32_t)rotatedHeight ==
1177 gCamCapability[mCameraId]->active_array_size.height)) {
1178 sizeFound = true;
1179 break;
1180 }
1181 /* We could potentially break here to enforce ZSL stream
1182 * set from frameworks always is full active array size
1183 * but it is not clear from the spc if framework will always
1184 * follow that, also we have logic to override to full array
1185 * size, so keeping the logic lenient at the moment
1186 */
1187 }
1188 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1189 MAX_SIZES_CNT);
1190 for (size_t i = 0; i < count; i++) {
1191 if (((int32_t)rotatedWidth ==
1192 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1193 ((int32_t)rotatedHeight ==
1194 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1195 sizeFound = true;
1196 break;
1197 }
1198 }
1199 break;
1200 } /* End of switch(newStream->format) */
1201
1202 /* We error out even if a single stream has unsupported size set */
1203 if (!sizeFound) {
1204 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1205 rotatedWidth, rotatedHeight, newStream->format,
1206 gCamCapability[mCameraId]->active_array_size.width,
1207 gCamCapability[mCameraId]->active_array_size.height);
1208 rc = -EINVAL;
1209 break;
1210 }
1211 } /* End of for each stream */
1212 return rc;
1213}
1214
1215/*==============================================================================
1216 * FUNCTION : isSupportChannelNeeded
1217 *
1218 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1219 *
1220 * PARAMETERS :
1221 * @stream_list : streams to be configured
1222 * @stream_config_info : the config info for streams to be configured
1223 *
1224 * RETURN : Boolen true/false decision
1225 *
1226 *==========================================================================*/
1227bool QCamera3HardwareInterface::isSupportChannelNeeded(
1228 camera3_stream_configuration_t *streamList,
1229 cam_stream_size_info_t stream_config_info)
1230{
1231 uint32_t i;
1232 bool pprocRequested = false;
1233 /* Check for conditions where PProc pipeline does not have any streams*/
1234 for (i = 0; i < stream_config_info.num_streams; i++) {
1235 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1236 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1237 pprocRequested = true;
1238 break;
1239 }
1240 }
1241
1242 if (pprocRequested == false )
1243 return true;
1244
1245 /* Dummy stream needed if only raw or jpeg streams present */
1246 for (i = 0; i < streamList->num_streams; i++) {
1247 switch(streamList->streams[i]->format) {
1248 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1249 case HAL_PIXEL_FORMAT_RAW10:
1250 case HAL_PIXEL_FORMAT_RAW16:
1251 case HAL_PIXEL_FORMAT_BLOB:
1252 break;
1253 default:
1254 return false;
1255 }
1256 }
1257 return true;
1258}
1259
1260/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001261 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001262 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001263 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 *
1265 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001266 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001267 *
1268 * RETURN : int32_t type of status
1269 * NO_ERROR -- success
1270 * none-zero failure code
1271 *
1272 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001273int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001274{
1275 int32_t rc = NO_ERROR;
1276
1277 cam_dimension_t max_dim = {0, 0};
1278 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1279 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1280 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1281 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1282 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1283 }
1284
1285 clear_metadata_buffer(mParameters);
1286
1287 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1288 max_dim);
1289 if (rc != NO_ERROR) {
1290 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1291 return rc;
1292 }
1293
1294 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1295 if (rc != NO_ERROR) {
1296 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1297 return rc;
1298 }
1299
1300 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001301 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001302
1303 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1304 mParameters);
1305 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001306 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001307 return rc;
1308 }
1309
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001310 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001311 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1312 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1313 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1314 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1315 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001316
1317 return rc;
1318}
1319
1320/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001321 * FUNCTION : addToPPFeatureMask
1322 *
1323 * DESCRIPTION: add additional features to pp feature mask based on
1324 * stream type and usecase
1325 *
1326 * PARAMETERS :
1327 * @stream_format : stream type for feature mask
1328 * @stream_idx : stream idx within postprocess_mask list to change
1329 *
1330 * RETURN : NULL
1331 *
1332 *==========================================================================*/
1333void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1334 uint32_t stream_idx)
1335{
1336 char feature_mask_value[PROPERTY_VALUE_MAX];
1337 cam_feature_mask_t feature_mask;
1338 int args_converted;
1339 int property_len;
1340
1341 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001342#ifdef _LE_CAMERA_
1343 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1344 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1345 property_len = property_get("persist.camera.hal3.feature",
1346 feature_mask_value, swtnr_feature_mask_value);
1347#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001348 property_len = property_get("persist.camera.hal3.feature",
1349 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001350#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001351 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1352 (feature_mask_value[1] == 'x')) {
1353 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1354 } else {
1355 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1356 }
1357 if (1 != args_converted) {
1358 feature_mask = 0;
1359 LOGE("Wrong feature mask %s", feature_mask_value);
1360 return;
1361 }
1362
1363 switch (stream_format) {
1364 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1365 /* Add LLVD to pp feature mask only if video hint is enabled */
1366 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1367 mStreamConfigInfo.postprocess_mask[stream_idx]
1368 |= CAM_QTI_FEATURE_SW_TNR;
1369 LOGH("Added SW TNR to pp feature mask");
1370 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1371 mStreamConfigInfo.postprocess_mask[stream_idx]
1372 |= CAM_QCOM_FEATURE_LLVD;
1373 LOGH("Added LLVD SeeMore to pp feature mask");
1374 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001375 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1376 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1377 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1378 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001379 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1380 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1381 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1382 CAM_QTI_FEATURE_BINNING_CORRECTION;
1383 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001384 break;
1385 }
1386 default:
1387 break;
1388 }
1389 LOGD("PP feature mask %llx",
1390 mStreamConfigInfo.postprocess_mask[stream_idx]);
1391}
1392
1393/*==============================================================================
1394 * FUNCTION : updateFpsInPreviewBuffer
1395 *
1396 * DESCRIPTION: update FPS information in preview buffer.
1397 *
1398 * PARAMETERS :
1399 * @metadata : pointer to metadata buffer
1400 * @frame_number: frame_number to look for in pending buffer list
1401 *
1402 * RETURN : None
1403 *
1404 *==========================================================================*/
1405void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1406 uint32_t frame_number)
1407{
1408 // Mark all pending buffers for this particular request
1409 // with corresponding framerate information
1410 for (List<PendingBuffersInRequest>::iterator req =
1411 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1412 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1413 for(List<PendingBufferInfo>::iterator j =
1414 req->mPendingBufferList.begin();
1415 j != req->mPendingBufferList.end(); j++) {
1416 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1417 if ((req->frame_number == frame_number) &&
1418 (channel->getStreamTypeMask() &
1419 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1420 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1421 CAM_INTF_PARM_FPS_RANGE, metadata) {
1422 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1423 struct private_handle_t *priv_handle =
1424 (struct private_handle_t *)(*(j->buffer));
1425 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1426 }
1427 }
1428 }
1429 }
1430}
1431
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001432/*==============================================================================
1433 * FUNCTION : updateTimeStampInPendingBuffers
1434 *
1435 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1436 * of a frame number
1437 *
1438 * PARAMETERS :
1439 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1440 * @timestamp : timestamp to be set
1441 *
1442 * RETURN : None
1443 *
1444 *==========================================================================*/
1445void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1446 uint32_t frameNumber, nsecs_t timestamp)
1447{
1448 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1449 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1450 if (req->frame_number != frameNumber)
1451 continue;
1452
1453 for (auto k = req->mPendingBufferList.begin();
1454 k != req->mPendingBufferList.end(); k++ ) {
1455 struct private_handle_t *priv_handle =
1456 (struct private_handle_t *) (*(k->buffer));
1457 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1458 }
1459 }
1460 return;
1461}
1462
Thierry Strudel3d639192016-09-09 11:52:26 -07001463/*===========================================================================
1464 * FUNCTION : configureStreams
1465 *
1466 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1467 * and output streams.
1468 *
1469 * PARAMETERS :
1470 * @stream_list : streams to be configured
1471 *
1472 * RETURN :
1473 *
1474 *==========================================================================*/
1475int QCamera3HardwareInterface::configureStreams(
1476 camera3_stream_configuration_t *streamList)
1477{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001478 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001479 int rc = 0;
1480
1481 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001482 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001483 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001484 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001485
1486 return rc;
1487}
1488
1489/*===========================================================================
1490 * FUNCTION : configureStreamsPerfLocked
1491 *
1492 * DESCRIPTION: configureStreams while perfLock is held.
1493 *
1494 * PARAMETERS :
1495 * @stream_list : streams to be configured
1496 *
1497 * RETURN : int32_t type of status
1498 * NO_ERROR -- success
1499 * none-zero failure code
1500 *==========================================================================*/
1501int QCamera3HardwareInterface::configureStreamsPerfLocked(
1502 camera3_stream_configuration_t *streamList)
1503{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001504 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001505 int rc = 0;
1506
1507 // Sanity check stream_list
1508 if (streamList == NULL) {
1509 LOGE("NULL stream configuration");
1510 return BAD_VALUE;
1511 }
1512 if (streamList->streams == NULL) {
1513 LOGE("NULL stream list");
1514 return BAD_VALUE;
1515 }
1516
1517 if (streamList->num_streams < 1) {
1518 LOGE("Bad number of streams requested: %d",
1519 streamList->num_streams);
1520 return BAD_VALUE;
1521 }
1522
1523 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1524 LOGE("Maximum number of streams %d exceeded: %d",
1525 MAX_NUM_STREAMS, streamList->num_streams);
1526 return BAD_VALUE;
1527 }
1528
1529 mOpMode = streamList->operation_mode;
1530 LOGD("mOpMode: %d", mOpMode);
1531
1532 /* first invalidate all the steams in the mStreamList
1533 * if they appear again, they will be validated */
1534 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1535 it != mStreamInfo.end(); it++) {
1536 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1537 if (channel) {
1538 channel->stop();
1539 }
1540 (*it)->status = INVALID;
1541 }
1542
1543 if (mRawDumpChannel) {
1544 mRawDumpChannel->stop();
1545 delete mRawDumpChannel;
1546 mRawDumpChannel = NULL;
1547 }
1548
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001549 if (mHdrPlusRawSrcChannel) {
1550 mHdrPlusRawSrcChannel->stop();
1551 delete mHdrPlusRawSrcChannel;
1552 mHdrPlusRawSrcChannel = NULL;
1553 }
1554
Thierry Strudel3d639192016-09-09 11:52:26 -07001555 if (mSupportChannel)
1556 mSupportChannel->stop();
1557
1558 if (mAnalysisChannel) {
1559 mAnalysisChannel->stop();
1560 }
1561 if (mMetadataChannel) {
1562 /* If content of mStreamInfo is not 0, there is metadata stream */
1563 mMetadataChannel->stop();
1564 }
1565 if (mChannelHandle) {
1566 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1567 mChannelHandle);
1568 LOGD("stopping channel %d", mChannelHandle);
1569 }
1570
1571 pthread_mutex_lock(&mMutex);
1572
1573 // Check state
1574 switch (mState) {
1575 case INITIALIZED:
1576 case CONFIGURED:
1577 case STARTED:
1578 /* valid state */
1579 break;
1580 default:
1581 LOGE("Invalid state %d", mState);
1582 pthread_mutex_unlock(&mMutex);
1583 return -ENODEV;
1584 }
1585
1586 /* Check whether we have video stream */
1587 m_bIs4KVideo = false;
1588 m_bIsVideo = false;
1589 m_bEisSupportedSize = false;
1590 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001591 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001592 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001593 bool depthPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001594 uint32_t videoWidth = 0U;
1595 uint32_t videoHeight = 0U;
1596 size_t rawStreamCnt = 0;
1597 size_t stallStreamCnt = 0;
1598 size_t processedStreamCnt = 0;
1599 // Number of streams on ISP encoder path
1600 size_t numStreamsOnEncoder = 0;
1601 size_t numYuv888OnEncoder = 0;
1602 bool bYuv888OverrideJpeg = false;
1603 cam_dimension_t largeYuv888Size = {0, 0};
1604 cam_dimension_t maxViewfinderSize = {0, 0};
1605 bool bJpegExceeds4K = false;
1606 bool bJpegOnEncoder = false;
1607 bool bUseCommonFeatureMask = false;
1608 cam_feature_mask_t commonFeatureMask = 0;
1609 bool bSmallJpegSize = false;
1610 uint32_t width_ratio;
1611 uint32_t height_ratio;
1612 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1613 camera3_stream_t *inputStream = NULL;
1614 bool isJpeg = false;
1615 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001616 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001617
1618 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1619
1620 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001621 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 uint8_t eis_prop_set;
1623 uint32_t maxEisWidth = 0;
1624 uint32_t maxEisHeight = 0;
1625
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001626 // Initialize all instant AEC related variables
1627 mInstantAEC = false;
1628 mResetInstantAEC = false;
1629 mInstantAECSettledFrameNumber = 0;
1630 mAecSkipDisplayFrameBound = 0;
1631 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001632 mCurrFeatureState = 0;
1633 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001634
Thierry Strudel3d639192016-09-09 11:52:26 -07001635 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1636
1637 size_t count = IS_TYPE_MAX;
1638 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1639 for (size_t i = 0; i < count; i++) {
1640 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001641 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1642 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001643 break;
1644 }
1645 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001646 count = CAM_OPT_STAB_MAX;
1647 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1648 for (size_t i = 0; i < count; i++) {
1649 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1650 oisSupported = true;
1651 break;
1652 }
1653 }
1654
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001655 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001656 maxEisWidth = MAX_EIS_WIDTH;
1657 maxEisHeight = MAX_EIS_HEIGHT;
1658 }
1659
1660 /* EIS setprop control */
1661 char eis_prop[PROPERTY_VALUE_MAX];
1662 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001663 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001664 eis_prop_set = (uint8_t)atoi(eis_prop);
1665
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001666 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001667 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1668
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001669 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1670 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1671
Thierry Strudel3d639192016-09-09 11:52:26 -07001672 /* stream configurations */
1673 for (size_t i = 0; i < streamList->num_streams; i++) {
1674 camera3_stream_t *newStream = streamList->streams[i];
1675 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1676 "height = %d, rotation = %d, usage = 0x%x",
1677 i, newStream->stream_type, newStream->format,
1678 newStream->width, newStream->height, newStream->rotation,
1679 newStream->usage);
1680 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1681 newStream->stream_type == CAMERA3_STREAM_INPUT){
1682 isZsl = true;
1683 }
1684 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1685 inputStream = newStream;
1686 }
1687
Emilian Peev7650c122017-01-19 08:24:33 -08001688 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1689 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001690 isJpeg = true;
1691 jpegSize.width = newStream->width;
1692 jpegSize.height = newStream->height;
1693 if (newStream->width > VIDEO_4K_WIDTH ||
1694 newStream->height > VIDEO_4K_HEIGHT)
1695 bJpegExceeds4K = true;
1696 }
1697
1698 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1699 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1700 m_bIsVideo = true;
1701 videoWidth = newStream->width;
1702 videoHeight = newStream->height;
1703 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1704 (VIDEO_4K_HEIGHT <= newStream->height)) {
1705 m_bIs4KVideo = true;
1706 }
1707 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1708 (newStream->height <= maxEisHeight);
1709 }
1710 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1711 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1712 switch (newStream->format) {
1713 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001714 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1715 depthPresent = true;
1716 break;
1717 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001718 stallStreamCnt++;
1719 if (isOnEncoder(maxViewfinderSize, newStream->width,
1720 newStream->height)) {
1721 numStreamsOnEncoder++;
1722 bJpegOnEncoder = true;
1723 }
1724 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1725 newStream->width);
1726 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1727 newStream->height);;
1728 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1729 "FATAL: max_downscale_factor cannot be zero and so assert");
1730 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1731 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1732 LOGH("Setting small jpeg size flag to true");
1733 bSmallJpegSize = true;
1734 }
1735 break;
1736 case HAL_PIXEL_FORMAT_RAW10:
1737 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1738 case HAL_PIXEL_FORMAT_RAW16:
1739 rawStreamCnt++;
1740 break;
1741 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1742 processedStreamCnt++;
1743 if (isOnEncoder(maxViewfinderSize, newStream->width,
1744 newStream->height)) {
1745 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1746 !IS_USAGE_ZSL(newStream->usage)) {
1747 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1748 }
1749 numStreamsOnEncoder++;
1750 }
1751 break;
1752 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1753 processedStreamCnt++;
1754 if (isOnEncoder(maxViewfinderSize, newStream->width,
1755 newStream->height)) {
1756 // If Yuv888 size is not greater than 4K, set feature mask
1757 // to SUPERSET so that it support concurrent request on
1758 // YUV and JPEG.
1759 if (newStream->width <= VIDEO_4K_WIDTH &&
1760 newStream->height <= VIDEO_4K_HEIGHT) {
1761 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1762 }
1763 numStreamsOnEncoder++;
1764 numYuv888OnEncoder++;
1765 largeYuv888Size.width = newStream->width;
1766 largeYuv888Size.height = newStream->height;
1767 }
1768 break;
1769 default:
1770 processedStreamCnt++;
1771 if (isOnEncoder(maxViewfinderSize, newStream->width,
1772 newStream->height)) {
1773 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1774 numStreamsOnEncoder++;
1775 }
1776 break;
1777 }
1778
1779 }
1780 }
1781
1782 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1783 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1784 !m_bIsVideo) {
1785 m_bEisEnable = false;
1786 }
1787
Thierry Strudel54dc9782017-02-15 12:12:10 -08001788 uint8_t forceEnableTnr = 0;
1789 char tnr_prop[PROPERTY_VALUE_MAX];
1790 memset(tnr_prop, 0, sizeof(tnr_prop));
1791 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1792 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* Logic to enable/disable TNR based on specific config size/etc.*/
1795 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1796 ((videoWidth == 1920 && videoHeight == 1080) ||
1797 (videoWidth == 1280 && videoHeight == 720)) &&
1798 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1799 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001800 else if (forceEnableTnr)
1801 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001802
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001803 char videoHdrProp[PROPERTY_VALUE_MAX];
1804 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1805 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1806 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1807
1808 if (hdr_mode_prop == 1 && m_bIsVideo &&
1809 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1810 m_bVideoHdrEnabled = true;
1811 else
1812 m_bVideoHdrEnabled = false;
1813
1814
Thierry Strudel3d639192016-09-09 11:52:26 -07001815 /* Check if num_streams is sane */
1816 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1817 rawStreamCnt > MAX_RAW_STREAMS ||
1818 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1819 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1820 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1821 pthread_mutex_unlock(&mMutex);
1822 return -EINVAL;
1823 }
1824 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001825 if (isZsl && m_bIs4KVideo) {
1826 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001827 pthread_mutex_unlock(&mMutex);
1828 return -EINVAL;
1829 }
1830 /* Check if stream sizes are sane */
1831 if (numStreamsOnEncoder > 2) {
1832 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1833 pthread_mutex_unlock(&mMutex);
1834 return -EINVAL;
1835 } else if (1 < numStreamsOnEncoder){
1836 bUseCommonFeatureMask = true;
1837 LOGH("Multiple streams above max viewfinder size, common mask needed");
1838 }
1839
1840 /* Check if BLOB size is greater than 4k in 4k recording case */
1841 if (m_bIs4KVideo && bJpegExceeds4K) {
1842 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1843 pthread_mutex_unlock(&mMutex);
1844 return -EINVAL;
1845 }
1846
Emilian Peev7650c122017-01-19 08:24:33 -08001847 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1848 depthPresent) {
1849 LOGE("HAL doesn't support depth streams in HFR mode!");
1850 pthread_mutex_unlock(&mMutex);
1851 return -EINVAL;
1852 }
1853
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1855 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1856 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1857 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1858 // configurations:
1859 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1860 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1861 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1862 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1863 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1864 __func__);
1865 pthread_mutex_unlock(&mMutex);
1866 return -EINVAL;
1867 }
1868
1869 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1870 // the YUV stream's size is greater or equal to the JPEG size, set common
1871 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1872 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1873 jpegSize.width, jpegSize.height) &&
1874 largeYuv888Size.width > jpegSize.width &&
1875 largeYuv888Size.height > jpegSize.height) {
1876 bYuv888OverrideJpeg = true;
1877 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1878 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1879 }
1880
1881 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1882 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1883 commonFeatureMask);
1884 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1885 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1886
1887 rc = validateStreamDimensions(streamList);
1888 if (rc == NO_ERROR) {
1889 rc = validateStreamRotations(streamList);
1890 }
1891 if (rc != NO_ERROR) {
1892 LOGE("Invalid stream configuration requested!");
1893 pthread_mutex_unlock(&mMutex);
1894 return rc;
1895 }
1896
1897 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1898 for (size_t i = 0; i < streamList->num_streams; i++) {
1899 camera3_stream_t *newStream = streamList->streams[i];
1900 LOGH("newStream type = %d, stream format = %d "
1901 "stream size : %d x %d, stream rotation = %d",
1902 newStream->stream_type, newStream->format,
1903 newStream->width, newStream->height, newStream->rotation);
1904 //if the stream is in the mStreamList validate it
1905 bool stream_exists = false;
1906 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1907 it != mStreamInfo.end(); it++) {
1908 if ((*it)->stream == newStream) {
1909 QCamera3ProcessingChannel *channel =
1910 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1911 stream_exists = true;
1912 if (channel)
1913 delete channel;
1914 (*it)->status = VALID;
1915 (*it)->stream->priv = NULL;
1916 (*it)->channel = NULL;
1917 }
1918 }
1919 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1920 //new stream
1921 stream_info_t* stream_info;
1922 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1923 if (!stream_info) {
1924 LOGE("Could not allocate stream info");
1925 rc = -ENOMEM;
1926 pthread_mutex_unlock(&mMutex);
1927 return rc;
1928 }
1929 stream_info->stream = newStream;
1930 stream_info->status = VALID;
1931 stream_info->channel = NULL;
1932 mStreamInfo.push_back(stream_info);
1933 }
1934 /* Covers Opaque ZSL and API1 F/W ZSL */
1935 if (IS_USAGE_ZSL(newStream->usage)
1936 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1937 if (zslStream != NULL) {
1938 LOGE("Multiple input/reprocess streams requested!");
1939 pthread_mutex_unlock(&mMutex);
1940 return BAD_VALUE;
1941 }
1942 zslStream = newStream;
1943 }
1944 /* Covers YUV reprocess */
1945 if (inputStream != NULL) {
1946 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1947 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1948 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1949 && inputStream->width == newStream->width
1950 && inputStream->height == newStream->height) {
1951 if (zslStream != NULL) {
1952 /* This scenario indicates multiple YUV streams with same size
1953 * as input stream have been requested, since zsl stream handle
1954 * is solely use for the purpose of overriding the size of streams
1955 * which share h/w streams we will just make a guess here as to
1956 * which of the stream is a ZSL stream, this will be refactored
1957 * once we make generic logic for streams sharing encoder output
1958 */
1959 LOGH("Warning, Multiple ip/reprocess streams requested!");
1960 }
1961 zslStream = newStream;
1962 }
1963 }
1964 }
1965
1966 /* If a zsl stream is set, we know that we have configured at least one input or
1967 bidirectional stream */
1968 if (NULL != zslStream) {
1969 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1970 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1971 mInputStreamInfo.format = zslStream->format;
1972 mInputStreamInfo.usage = zslStream->usage;
1973 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1974 mInputStreamInfo.dim.width,
1975 mInputStreamInfo.dim.height,
1976 mInputStreamInfo.format, mInputStreamInfo.usage);
1977 }
1978
1979 cleanAndSortStreamInfo();
1980 if (mMetadataChannel) {
1981 delete mMetadataChannel;
1982 mMetadataChannel = NULL;
1983 }
1984 if (mSupportChannel) {
1985 delete mSupportChannel;
1986 mSupportChannel = NULL;
1987 }
1988
1989 if (mAnalysisChannel) {
1990 delete mAnalysisChannel;
1991 mAnalysisChannel = NULL;
1992 }
1993
1994 if (mDummyBatchChannel) {
1995 delete mDummyBatchChannel;
1996 mDummyBatchChannel = NULL;
1997 }
1998
Emilian Peev7650c122017-01-19 08:24:33 -08001999 if (mDepthChannel) {
2000 mDepthChannel = NULL;
2001 }
2002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 //Create metadata channel and initialize it
2004 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2005 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2006 gCamCapability[mCameraId]->color_arrangement);
2007 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2008 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002009 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 if (mMetadataChannel == NULL) {
2011 LOGE("failed to allocate metadata channel");
2012 rc = -ENOMEM;
2013 pthread_mutex_unlock(&mMutex);
2014 return rc;
2015 }
2016 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2017 if (rc < 0) {
2018 LOGE("metadata channel initialization failed");
2019 delete mMetadataChannel;
2020 mMetadataChannel = NULL;
2021 pthread_mutex_unlock(&mMutex);
2022 return rc;
2023 }
2024
Thierry Strudel3d639192016-09-09 11:52:26 -07002025 bool isRawStreamRequested = false;
2026 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2027 /* Allocate channel objects for the requested streams */
2028 for (size_t i = 0; i < streamList->num_streams; i++) {
2029 camera3_stream_t *newStream = streamList->streams[i];
2030 uint32_t stream_usage = newStream->usage;
2031 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2032 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2033 struct camera_info *p_info = NULL;
2034 pthread_mutex_lock(&gCamLock);
2035 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2036 pthread_mutex_unlock(&gCamLock);
2037 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2038 || IS_USAGE_ZSL(newStream->usage)) &&
2039 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
2040 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2041 if (bUseCommonFeatureMask) {
2042 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2043 commonFeatureMask;
2044 } else {
2045 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2046 CAM_QCOM_FEATURE_NONE;
2047 }
2048
2049 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
2050 LOGH("Input stream configured, reprocess config");
2051 } else {
2052 //for non zsl streams find out the format
2053 switch (newStream->format) {
2054 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2055 {
2056 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2057 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2058 /* add additional features to pp feature mask */
2059 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2060 mStreamConfigInfo.num_streams);
2061
2062 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2063 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2064 CAM_STREAM_TYPE_VIDEO;
2065 if (m_bTnrEnabled && m_bTnrVideo) {
2066 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2067 CAM_QCOM_FEATURE_CPP_TNR;
2068 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2069 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2070 ~CAM_QCOM_FEATURE_CDS;
2071 }
2072 } else {
2073 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2074 CAM_STREAM_TYPE_PREVIEW;
2075 if (m_bTnrEnabled && m_bTnrPreview) {
2076 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2077 CAM_QCOM_FEATURE_CPP_TNR;
2078 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2079 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2080 ~CAM_QCOM_FEATURE_CDS;
2081 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002082 if(!m_bSwTnrPreview) {
2083 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2084 ~CAM_QTI_FEATURE_SW_TNR;
2085 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002086 padding_info.width_padding = mSurfaceStridePadding;
2087 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002088 previewSize.width = (int32_t)newStream->width;
2089 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 }
2091 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2092 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2093 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2094 newStream->height;
2095 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2096 newStream->width;
2097 }
2098 }
2099 break;
2100 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2101 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2102 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2103 if (bUseCommonFeatureMask)
2104 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2105 commonFeatureMask;
2106 else
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2108 CAM_QCOM_FEATURE_NONE;
2109 } else {
2110 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2111 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2112 }
2113 break;
2114 case HAL_PIXEL_FORMAT_BLOB:
2115 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2116 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2117 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2118 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2119 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2120 } else {
2121 if (bUseCommonFeatureMask &&
2122 isOnEncoder(maxViewfinderSize, newStream->width,
2123 newStream->height)) {
2124 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2125 } else {
2126 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2127 }
2128 }
2129 if (isZsl) {
2130 if (zslStream) {
2131 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2132 (int32_t)zslStream->width;
2133 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2134 (int32_t)zslStream->height;
2135 } else {
2136 LOGE("Error, No ZSL stream identified");
2137 pthread_mutex_unlock(&mMutex);
2138 return -EINVAL;
2139 }
2140 } else if (m_bIs4KVideo) {
2141 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2142 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2143 } else if (bYuv888OverrideJpeg) {
2144 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2145 (int32_t)largeYuv888Size.width;
2146 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2147 (int32_t)largeYuv888Size.height;
2148 }
2149 break;
2150 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2151 case HAL_PIXEL_FORMAT_RAW16:
2152 case HAL_PIXEL_FORMAT_RAW10:
2153 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2154 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2155 isRawStreamRequested = true;
2156 break;
2157 default:
2158 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2159 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2160 break;
2161 }
2162 }
2163
2164 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2165 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2166 gCamCapability[mCameraId]->color_arrangement);
2167
2168 if (newStream->priv == NULL) {
2169 //New stream, construct channel
2170 switch (newStream->stream_type) {
2171 case CAMERA3_STREAM_INPUT:
2172 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2173 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2174 break;
2175 case CAMERA3_STREAM_BIDIRECTIONAL:
2176 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2177 GRALLOC_USAGE_HW_CAMERA_WRITE;
2178 break;
2179 case CAMERA3_STREAM_OUTPUT:
2180 /* For video encoding stream, set read/write rarely
2181 * flag so that they may be set to un-cached */
2182 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2183 newStream->usage |=
2184 (GRALLOC_USAGE_SW_READ_RARELY |
2185 GRALLOC_USAGE_SW_WRITE_RARELY |
2186 GRALLOC_USAGE_HW_CAMERA_WRITE);
2187 else if (IS_USAGE_ZSL(newStream->usage))
2188 {
2189 LOGD("ZSL usage flag skipping");
2190 }
2191 else if (newStream == zslStream
2192 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2193 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2194 } else
2195 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2196 break;
2197 default:
2198 LOGE("Invalid stream_type %d", newStream->stream_type);
2199 break;
2200 }
2201
2202 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2203 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2204 QCamera3ProcessingChannel *channel = NULL;
2205 switch (newStream->format) {
2206 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2207 if ((newStream->usage &
2208 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2209 (streamList->operation_mode ==
2210 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2211 ) {
2212 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2213 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002214 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002215 this,
2216 newStream,
2217 (cam_stream_type_t)
2218 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2219 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2220 mMetadataChannel,
2221 0); //heap buffers are not required for HFR video channel
2222 if (channel == NULL) {
2223 LOGE("allocation of channel failed");
2224 pthread_mutex_unlock(&mMutex);
2225 return -ENOMEM;
2226 }
2227 //channel->getNumBuffers() will return 0 here so use
2228 //MAX_INFLIGH_HFR_REQUESTS
2229 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2230 newStream->priv = channel;
2231 LOGI("num video buffers in HFR mode: %d",
2232 MAX_INFLIGHT_HFR_REQUESTS);
2233 } else {
2234 /* Copy stream contents in HFR preview only case to create
2235 * dummy batch channel so that sensor streaming is in
2236 * HFR mode */
2237 if (!m_bIsVideo && (streamList->operation_mode ==
2238 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2239 mDummyBatchStream = *newStream;
2240 }
2241 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2242 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002243 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002244 this,
2245 newStream,
2246 (cam_stream_type_t)
2247 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2248 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2249 mMetadataChannel,
2250 MAX_INFLIGHT_REQUESTS);
2251 if (channel == NULL) {
2252 LOGE("allocation of channel failed");
2253 pthread_mutex_unlock(&mMutex);
2254 return -ENOMEM;
2255 }
2256 newStream->max_buffers = channel->getNumBuffers();
2257 newStream->priv = channel;
2258 }
2259 break;
2260 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2261 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2262 mChannelHandle,
2263 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002264 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002265 this,
2266 newStream,
2267 (cam_stream_type_t)
2268 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2270 mMetadataChannel);
2271 if (channel == NULL) {
2272 LOGE("allocation of YUV channel failed");
2273 pthread_mutex_unlock(&mMutex);
2274 return -ENOMEM;
2275 }
2276 newStream->max_buffers = channel->getNumBuffers();
2277 newStream->priv = channel;
2278 break;
2279 }
2280 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2281 case HAL_PIXEL_FORMAT_RAW16:
2282 case HAL_PIXEL_FORMAT_RAW10:
2283 mRawChannel = new QCamera3RawChannel(
2284 mCameraHandle->camera_handle, mChannelHandle,
2285 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002286 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002287 this, newStream,
2288 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2289 mMetadataChannel,
2290 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2291 if (mRawChannel == NULL) {
2292 LOGE("allocation of raw channel failed");
2293 pthread_mutex_unlock(&mMutex);
2294 return -ENOMEM;
2295 }
2296 newStream->max_buffers = mRawChannel->getNumBuffers();
2297 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2298 break;
2299 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002300 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2301 mDepthChannel = new QCamera3DepthChannel(
2302 mCameraHandle->camera_handle, mChannelHandle,
2303 mCameraHandle->ops, NULL, NULL, &padding_info,
2304 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2305 mMetadataChannel);
2306 if (NULL == mDepthChannel) {
2307 LOGE("Allocation of depth channel failed");
2308 pthread_mutex_unlock(&mMutex);
2309 return NO_MEMORY;
2310 }
2311 newStream->priv = mDepthChannel;
2312 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2313 } else {
2314 // Max live snapshot inflight buffer is 1. This is to mitigate
2315 // frame drop issues for video snapshot. The more buffers being
2316 // allocated, the more frame drops there are.
2317 mPictureChannel = new QCamera3PicChannel(
2318 mCameraHandle->camera_handle, mChannelHandle,
2319 mCameraHandle->ops, captureResultCb,
2320 setBufferErrorStatus, &padding_info, this, newStream,
2321 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2322 m_bIs4KVideo, isZsl, mMetadataChannel,
2323 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2324 if (mPictureChannel == NULL) {
2325 LOGE("allocation of channel failed");
2326 pthread_mutex_unlock(&mMutex);
2327 return -ENOMEM;
2328 }
2329 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2330 newStream->max_buffers = mPictureChannel->getNumBuffers();
2331 mPictureChannel->overrideYuvSize(
2332 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2333 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002335 break;
2336
2337 default:
2338 LOGE("not a supported format 0x%x", newStream->format);
2339 break;
2340 }
2341 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2342 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2343 } else {
2344 LOGE("Error, Unknown stream type");
2345 pthread_mutex_unlock(&mMutex);
2346 return -EINVAL;
2347 }
2348
2349 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2350 if (channel != NULL && channel->isUBWCEnabled()) {
2351 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2353 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002354 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2355 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2356 }
2357 }
2358
2359 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2360 it != mStreamInfo.end(); it++) {
2361 if ((*it)->stream == newStream) {
2362 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2363 break;
2364 }
2365 }
2366 } else {
2367 // Channel already exists for this stream
2368 // Do nothing for now
2369 }
2370 padding_info = gCamCapability[mCameraId]->padding_info;
2371
Emilian Peev7650c122017-01-19 08:24:33 -08002372 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002373 * since there is no real stream associated with it
2374 */
Emilian Peev7650c122017-01-19 08:24:33 -08002375 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
2376 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002378 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 }
2380
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002381 // Create analysis stream all the time, even when h/w support is not available
2382 {
2383 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2384 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2385 gCamCapability[mCameraId]->color_arrangement);
2386 cam_analysis_info_t analysisInfo;
2387 int32_t ret = NO_ERROR;
2388 ret = mCommon.getAnalysisInfo(
2389 FALSE,
2390 analysisFeatureMask,
2391 &analysisInfo);
2392 if (ret == NO_ERROR) {
2393 cam_dimension_t analysisDim;
2394 analysisDim = mCommon.getMatchingDimension(previewSize,
2395 analysisInfo.analysis_recommended_res);
2396
2397 mAnalysisChannel = new QCamera3SupportChannel(
2398 mCameraHandle->camera_handle,
2399 mChannelHandle,
2400 mCameraHandle->ops,
2401 &analysisInfo.analysis_padding_info,
2402 analysisFeatureMask,
2403 CAM_STREAM_TYPE_ANALYSIS,
2404 &analysisDim,
2405 (analysisInfo.analysis_format
2406 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2407 : CAM_FORMAT_YUV_420_NV21),
2408 analysisInfo.hw_analysis_supported,
2409 gCamCapability[mCameraId]->color_arrangement,
2410 this,
2411 0); // force buffer count to 0
2412 } else {
2413 LOGW("getAnalysisInfo failed, ret = %d", ret);
2414 }
2415 if (!mAnalysisChannel) {
2416 LOGW("Analysis channel cannot be created");
2417 }
2418 }
2419
Thierry Strudel3d639192016-09-09 11:52:26 -07002420 //RAW DUMP channel
2421 if (mEnableRawDump && isRawStreamRequested == false){
2422 cam_dimension_t rawDumpSize;
2423 rawDumpSize = getMaxRawSize(mCameraId);
2424 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2425 setPAAFSupport(rawDumpFeatureMask,
2426 CAM_STREAM_TYPE_RAW,
2427 gCamCapability[mCameraId]->color_arrangement);
2428 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2429 mChannelHandle,
2430 mCameraHandle->ops,
2431 rawDumpSize,
2432 &padding_info,
2433 this, rawDumpFeatureMask);
2434 if (!mRawDumpChannel) {
2435 LOGE("Raw Dump channel cannot be created");
2436 pthread_mutex_unlock(&mMutex);
2437 return -ENOMEM;
2438 }
2439 }
2440
Chien-Yu Chenee335912017-02-09 17:53:20 -08002441 // Initialize HDR+ Raw Source channel if AP is providing RAW input to Easel.
2442 if (mHdrPlusClient != nullptr && mIsApInputUsedForHdrPlus) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002443 if (isRawStreamRequested || mRawDumpChannel) {
Chien-Yu Chenee335912017-02-09 17:53:20 -08002444 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported. "
2445 "HDR+ RAW source channel is not created.",
2446 __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002447 } else {
2448 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2449 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2450 setPAAFSupport(hdrPlusRawFeatureMask,
2451 CAM_STREAM_TYPE_RAW,
2452 gCamCapability[mCameraId]->color_arrangement);
2453 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2454 mChannelHandle,
2455 mCameraHandle->ops,
2456 rawSize,
2457 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002458 this, hdrPlusRawFeatureMask,
2459 mHdrPlusClient,
2460 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002461 if (!mHdrPlusRawSrcChannel) {
2462 LOGE("HDR+ Raw Source channel cannot be created");
2463 pthread_mutex_unlock(&mMutex);
2464 return -ENOMEM;
2465 }
2466 }
2467 }
2468
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 if (mAnalysisChannel) {
2470 cam_analysis_info_t analysisInfo;
2471 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2473 CAM_STREAM_TYPE_ANALYSIS;
2474 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2475 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2476 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2477 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2478 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002479 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2481 &analysisInfo);
2482 if (rc != NO_ERROR) {
2483 LOGE("getAnalysisInfo failed, ret = %d", rc);
2484 pthread_mutex_unlock(&mMutex);
2485 return rc;
2486 }
2487 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002488 mCommon.getMatchingDimension(previewSize,
2489 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 mStreamConfigInfo.num_streams++;
2491 }
2492
2493 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2494 cam_analysis_info_t supportInfo;
2495 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2496 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2497 setPAAFSupport(callbackFeatureMask,
2498 CAM_STREAM_TYPE_CALLBACK,
2499 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002500 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002501 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002502 if (ret != NO_ERROR) {
2503 /* Ignore the error for Mono camera
2504 * because the PAAF bit mask is only set
2505 * for CAM_STREAM_TYPE_ANALYSIS stream type
2506 */
2507 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2508 LOGW("getAnalysisInfo failed, ret = %d", ret);
2509 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 }
2511 mSupportChannel = new QCamera3SupportChannel(
2512 mCameraHandle->camera_handle,
2513 mChannelHandle,
2514 mCameraHandle->ops,
2515 &gCamCapability[mCameraId]->padding_info,
2516 callbackFeatureMask,
2517 CAM_STREAM_TYPE_CALLBACK,
2518 &QCamera3SupportChannel::kDim,
2519 CAM_FORMAT_YUV_420_NV21,
2520 supportInfo.hw_analysis_supported,
2521 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002522 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002523 if (!mSupportChannel) {
2524 LOGE("dummy channel cannot be created");
2525 pthread_mutex_unlock(&mMutex);
2526 return -ENOMEM;
2527 }
2528 }
2529
2530 if (mSupportChannel) {
2531 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2532 QCamera3SupportChannel::kDim;
2533 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2534 CAM_STREAM_TYPE_CALLBACK;
2535 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2536 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2537 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2538 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2539 gCamCapability[mCameraId]->color_arrangement);
2540 mStreamConfigInfo.num_streams++;
2541 }
2542
2543 if (mRawDumpChannel) {
2544 cam_dimension_t rawSize;
2545 rawSize = getMaxRawSize(mCameraId);
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2547 rawSize;
2548 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2549 CAM_STREAM_TYPE_RAW;
2550 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2551 CAM_QCOM_FEATURE_NONE;
2552 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2553 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2554 gCamCapability[mCameraId]->color_arrangement);
2555 mStreamConfigInfo.num_streams++;
2556 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002557
2558 if (mHdrPlusRawSrcChannel) {
2559 cam_dimension_t rawSize;
2560 rawSize = getMaxRawSize(mCameraId);
2561 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2562 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2564 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2565 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2566 gCamCapability[mCameraId]->color_arrangement);
2567 mStreamConfigInfo.num_streams++;
2568 }
2569
Thierry Strudel3d639192016-09-09 11:52:26 -07002570 /* In HFR mode, if video stream is not added, create a dummy channel so that
2571 * ISP can create a batch mode even for preview only case. This channel is
2572 * never 'start'ed (no stream-on), it is only 'initialized' */
2573 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2574 !m_bIsVideo) {
2575 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2576 setPAAFSupport(dummyFeatureMask,
2577 CAM_STREAM_TYPE_VIDEO,
2578 gCamCapability[mCameraId]->color_arrangement);
2579 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2580 mChannelHandle,
2581 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002582 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002583 this,
2584 &mDummyBatchStream,
2585 CAM_STREAM_TYPE_VIDEO,
2586 dummyFeatureMask,
2587 mMetadataChannel);
2588 if (NULL == mDummyBatchChannel) {
2589 LOGE("creation of mDummyBatchChannel failed."
2590 "Preview will use non-hfr sensor mode ");
2591 }
2592 }
2593 if (mDummyBatchChannel) {
2594 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2595 mDummyBatchStream.width;
2596 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2597 mDummyBatchStream.height;
2598 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2599 CAM_STREAM_TYPE_VIDEO;
2600 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2601 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2602 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2603 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2604 gCamCapability[mCameraId]->color_arrangement);
2605 mStreamConfigInfo.num_streams++;
2606 }
2607
2608 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2609 mStreamConfigInfo.buffer_info.max_buffers =
2610 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2611
2612 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2613 for (pendingRequestIterator i = mPendingRequestsList.begin();
2614 i != mPendingRequestsList.end();) {
2615 i = erasePendingRequest(i);
2616 }
2617 mPendingFrameDropList.clear();
2618 // Initialize/Reset the pending buffers list
2619 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2620 req.mPendingBufferList.clear();
2621 }
2622 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2623
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 mCurJpegMeta.clear();
2625 //Get min frame duration for this streams configuration
2626 deriveMinFrameDuration();
2627
Chien-Yu Chenee335912017-02-09 17:53:20 -08002628 mFirstPreviewIntentSeen = false;
2629
2630 // Disable HRD+ if it's enabled;
2631 disableHdrPlusModeLocked();
2632
Thierry Strudel3d639192016-09-09 11:52:26 -07002633 // Update state
2634 mState = CONFIGURED;
2635
2636 pthread_mutex_unlock(&mMutex);
2637
2638 return rc;
2639}
2640
2641/*===========================================================================
2642 * FUNCTION : validateCaptureRequest
2643 *
2644 * DESCRIPTION: validate a capture request from camera service
2645 *
2646 * PARAMETERS :
2647 * @request : request from framework to process
2648 *
2649 * RETURN :
2650 *
2651 *==========================================================================*/
2652int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002653 camera3_capture_request_t *request,
2654 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002655{
2656 ssize_t idx = 0;
2657 const camera3_stream_buffer_t *b;
2658 CameraMetadata meta;
2659
2660 /* Sanity check the request */
2661 if (request == NULL) {
2662 LOGE("NULL capture request");
2663 return BAD_VALUE;
2664 }
2665
2666 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2667 /*settings cannot be null for the first request*/
2668 return BAD_VALUE;
2669 }
2670
2671 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002672 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2673 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 LOGE("Request %d: No output buffers provided!",
2675 __FUNCTION__, frameNumber);
2676 return BAD_VALUE;
2677 }
2678 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2679 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2680 request->num_output_buffers, MAX_NUM_STREAMS);
2681 return BAD_VALUE;
2682 }
2683 if (request->input_buffer != NULL) {
2684 b = request->input_buffer;
2685 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2686 LOGE("Request %d: Buffer %ld: Status not OK!",
2687 frameNumber, (long)idx);
2688 return BAD_VALUE;
2689 }
2690 if (b->release_fence != -1) {
2691 LOGE("Request %d: Buffer %ld: Has a release fence!",
2692 frameNumber, (long)idx);
2693 return BAD_VALUE;
2694 }
2695 if (b->buffer == NULL) {
2696 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2697 frameNumber, (long)idx);
2698 return BAD_VALUE;
2699 }
2700 }
2701
2702 // Validate all buffers
2703 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002704 if (b == NULL) {
2705 return BAD_VALUE;
2706 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002707 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002708 QCamera3ProcessingChannel *channel =
2709 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2710 if (channel == NULL) {
2711 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2712 frameNumber, (long)idx);
2713 return BAD_VALUE;
2714 }
2715 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2716 LOGE("Request %d: Buffer %ld: Status not OK!",
2717 frameNumber, (long)idx);
2718 return BAD_VALUE;
2719 }
2720 if (b->release_fence != -1) {
2721 LOGE("Request %d: Buffer %ld: Has a release fence!",
2722 frameNumber, (long)idx);
2723 return BAD_VALUE;
2724 }
2725 if (b->buffer == NULL) {
2726 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2727 frameNumber, (long)idx);
2728 return BAD_VALUE;
2729 }
2730 if (*(b->buffer) == NULL) {
2731 LOGE("Request %d: Buffer %ld: NULL private handle!",
2732 frameNumber, (long)idx);
2733 return BAD_VALUE;
2734 }
2735 idx++;
2736 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002737 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 return NO_ERROR;
2739}
2740
2741/*===========================================================================
2742 * FUNCTION : deriveMinFrameDuration
2743 *
2744 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2745 * on currently configured streams.
2746 *
2747 * PARAMETERS : NONE
2748 *
2749 * RETURN : NONE
2750 *
2751 *==========================================================================*/
2752void QCamera3HardwareInterface::deriveMinFrameDuration()
2753{
2754 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2755
2756 maxJpegDim = 0;
2757 maxProcessedDim = 0;
2758 maxRawDim = 0;
2759
2760 // Figure out maximum jpeg, processed, and raw dimensions
2761 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2762 it != mStreamInfo.end(); it++) {
2763
2764 // Input stream doesn't have valid stream_type
2765 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2766 continue;
2767
2768 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2769 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2770 if (dimension > maxJpegDim)
2771 maxJpegDim = dimension;
2772 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2773 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2774 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2775 if (dimension > maxRawDim)
2776 maxRawDim = dimension;
2777 } else {
2778 if (dimension > maxProcessedDim)
2779 maxProcessedDim = dimension;
2780 }
2781 }
2782
2783 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2784 MAX_SIZES_CNT);
2785
2786 //Assume all jpeg dimensions are in processed dimensions.
2787 if (maxJpegDim > maxProcessedDim)
2788 maxProcessedDim = maxJpegDim;
2789 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2790 if (maxProcessedDim > maxRawDim) {
2791 maxRawDim = INT32_MAX;
2792
2793 for (size_t i = 0; i < count; i++) {
2794 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2795 gCamCapability[mCameraId]->raw_dim[i].height;
2796 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2797 maxRawDim = dimension;
2798 }
2799 }
2800
2801 //Find minimum durations for processed, jpeg, and raw
2802 for (size_t i = 0; i < count; i++) {
2803 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2804 gCamCapability[mCameraId]->raw_dim[i].height) {
2805 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2806 break;
2807 }
2808 }
2809 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2810 for (size_t i = 0; i < count; i++) {
2811 if (maxProcessedDim ==
2812 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2813 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2814 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2815 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2816 break;
2817 }
2818 }
2819}
2820
2821/*===========================================================================
2822 * FUNCTION : getMinFrameDuration
2823 *
2824 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2825 * and current request configuration.
2826 *
2827 * PARAMETERS : @request: requset sent by the frameworks
2828 *
2829 * RETURN : min farme duration for a particular request
2830 *
2831 *==========================================================================*/
2832int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2833{
2834 bool hasJpegStream = false;
2835 bool hasRawStream = false;
2836 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2837 const camera3_stream_t *stream = request->output_buffers[i].stream;
2838 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2839 hasJpegStream = true;
2840 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2841 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2842 stream->format == HAL_PIXEL_FORMAT_RAW16)
2843 hasRawStream = true;
2844 }
2845
2846 if (!hasJpegStream)
2847 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2848 else
2849 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2850}
2851
2852/*===========================================================================
2853 * FUNCTION : handleBuffersDuringFlushLock
2854 *
2855 * DESCRIPTION: Account for buffers returned from back-end during flush
2856 * This function is executed while mMutex is held by the caller.
2857 *
2858 * PARAMETERS :
2859 * @buffer: image buffer for the callback
2860 *
2861 * RETURN :
2862 *==========================================================================*/
2863void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2864{
2865 bool buffer_found = false;
2866 for (List<PendingBuffersInRequest>::iterator req =
2867 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2868 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2869 for (List<PendingBufferInfo>::iterator i =
2870 req->mPendingBufferList.begin();
2871 i != req->mPendingBufferList.end(); i++) {
2872 if (i->buffer == buffer->buffer) {
2873 mPendingBuffersMap.numPendingBufsAtFlush--;
2874 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2875 buffer->buffer, req->frame_number,
2876 mPendingBuffersMap.numPendingBufsAtFlush);
2877 buffer_found = true;
2878 break;
2879 }
2880 }
2881 if (buffer_found) {
2882 break;
2883 }
2884 }
2885 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2886 //signal the flush()
2887 LOGD("All buffers returned to HAL. Continue flush");
2888 pthread_cond_signal(&mBuffersCond);
2889 }
2890}
2891
Thierry Strudel3d639192016-09-09 11:52:26 -07002892/*===========================================================================
2893 * FUNCTION : handleBatchMetadata
2894 *
2895 * DESCRIPTION: Handles metadata buffer callback in batch mode
2896 *
2897 * PARAMETERS : @metadata_buf: metadata buffer
2898 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2899 * the meta buf in this method
2900 *
2901 * RETURN :
2902 *
2903 *==========================================================================*/
2904void QCamera3HardwareInterface::handleBatchMetadata(
2905 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2906{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002907 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002908
2909 if (NULL == metadata_buf) {
2910 LOGE("metadata_buf is NULL");
2911 return;
2912 }
2913 /* In batch mode, the metdata will contain the frame number and timestamp of
2914 * the last frame in the batch. Eg: a batch containing buffers from request
2915 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2916 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2917 * multiple process_capture_results */
2918 metadata_buffer_t *metadata =
2919 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2920 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2921 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2922 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2923 uint32_t frame_number = 0, urgent_frame_number = 0;
2924 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2925 bool invalid_metadata = false;
2926 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2927 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002928 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002929
2930 int32_t *p_frame_number_valid =
2931 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2932 uint32_t *p_frame_number =
2933 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2934 int64_t *p_capture_time =
2935 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2936 int32_t *p_urgent_frame_number_valid =
2937 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2938 uint32_t *p_urgent_frame_number =
2939 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2940
2941 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2942 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2943 (NULL == p_urgent_frame_number)) {
2944 LOGE("Invalid metadata");
2945 invalid_metadata = true;
2946 } else {
2947 frame_number_valid = *p_frame_number_valid;
2948 last_frame_number = *p_frame_number;
2949 last_frame_capture_time = *p_capture_time;
2950 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2951 last_urgent_frame_number = *p_urgent_frame_number;
2952 }
2953
2954 /* In batchmode, when no video buffers are requested, set_parms are sent
2955 * for every capture_request. The difference between consecutive urgent
2956 * frame numbers and frame numbers should be used to interpolate the
2957 * corresponding frame numbers and time stamps */
2958 pthread_mutex_lock(&mMutex);
2959 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002960 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2961 if(idx < 0) {
2962 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2963 last_urgent_frame_number);
2964 mState = ERROR;
2965 pthread_mutex_unlock(&mMutex);
2966 return;
2967 }
2968 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002969 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2970 first_urgent_frame_number;
2971
2972 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2973 urgent_frame_number_valid,
2974 first_urgent_frame_number, last_urgent_frame_number);
2975 }
2976
2977 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002978 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2979 if(idx < 0) {
2980 LOGE("Invalid frame number received: %d. Irrecoverable error",
2981 last_frame_number);
2982 mState = ERROR;
2983 pthread_mutex_unlock(&mMutex);
2984 return;
2985 }
2986 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002987 frameNumDiff = last_frame_number + 1 -
2988 first_frame_number;
2989 mPendingBatchMap.removeItem(last_frame_number);
2990
2991 LOGD("frm: valid: %d frm_num: %d - %d",
2992 frame_number_valid,
2993 first_frame_number, last_frame_number);
2994
2995 }
2996 pthread_mutex_unlock(&mMutex);
2997
2998 if (urgent_frame_number_valid || frame_number_valid) {
2999 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3000 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3001 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3002 urgentFrameNumDiff, last_urgent_frame_number);
3003 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3004 LOGE("frameNumDiff: %d frameNum: %d",
3005 frameNumDiff, last_frame_number);
3006 }
3007
3008 for (size_t i = 0; i < loopCount; i++) {
3009 /* handleMetadataWithLock is called even for invalid_metadata for
3010 * pipeline depth calculation */
3011 if (!invalid_metadata) {
3012 /* Infer frame number. Batch metadata contains frame number of the
3013 * last frame */
3014 if (urgent_frame_number_valid) {
3015 if (i < urgentFrameNumDiff) {
3016 urgent_frame_number =
3017 first_urgent_frame_number + i;
3018 LOGD("inferred urgent frame_number: %d",
3019 urgent_frame_number);
3020 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3021 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3022 } else {
3023 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3024 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3025 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3026 }
3027 }
3028
3029 /* Infer frame number. Batch metadata contains frame number of the
3030 * last frame */
3031 if (frame_number_valid) {
3032 if (i < frameNumDiff) {
3033 frame_number = first_frame_number + i;
3034 LOGD("inferred frame_number: %d", frame_number);
3035 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3036 CAM_INTF_META_FRAME_NUMBER, frame_number);
3037 } else {
3038 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3039 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3040 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3041 }
3042 }
3043
3044 if (last_frame_capture_time) {
3045 //Infer timestamp
3046 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003047 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003049 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003050 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3051 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3052 LOGD("batch capture_time: %lld, capture_time: %lld",
3053 last_frame_capture_time, capture_time);
3054 }
3055 }
3056 pthread_mutex_lock(&mMutex);
3057 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003058 false /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003059 (i == 0) /* first metadata in the batch metadata */,
3060 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003061 pthread_mutex_unlock(&mMutex);
3062 }
3063
3064 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003065 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mMetadataChannel->bufDone(metadata_buf);
3067 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003068 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003069 }
3070}
3071
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003072void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3073 camera3_error_msg_code_t errorCode)
3074{
3075 camera3_notify_msg_t notify_msg;
3076 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3077 notify_msg.type = CAMERA3_MSG_ERROR;
3078 notify_msg.message.error.error_code = errorCode;
3079 notify_msg.message.error.error_stream = NULL;
3080 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003081 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003082
3083 return;
3084}
Thierry Strudel3d639192016-09-09 11:52:26 -07003085/*===========================================================================
3086 * FUNCTION : handleMetadataWithLock
3087 *
3088 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3089 *
3090 * PARAMETERS : @metadata_buf: metadata buffer
3091 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3092 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003093 * @firstMetadataInBatch: Boolean to indicate whether this is the
3094 * first metadata in a batch. Valid only for batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003095 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3096 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003097 *
3098 * RETURN :
3099 *
3100 *==========================================================================*/
3101void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003102 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Thierry Strudel54dc9782017-02-15 12:12:10 -08003103 bool firstMetadataInBatch, bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003104{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003105 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003106 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3107 //during flush do not send metadata from this thread
3108 LOGD("not sending metadata during flush or when mState is error");
3109 if (free_and_bufdone_meta_buf) {
3110 mMetadataChannel->bufDone(metadata_buf);
3111 free(metadata_buf);
3112 }
3113 return;
3114 }
3115
3116 //not in flush
3117 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3118 int32_t frame_number_valid, urgent_frame_number_valid;
3119 uint32_t frame_number, urgent_frame_number;
3120 int64_t capture_time;
3121 nsecs_t currentSysTime;
3122
3123 int32_t *p_frame_number_valid =
3124 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3125 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3126 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3127 int32_t *p_urgent_frame_number_valid =
3128 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3129 uint32_t *p_urgent_frame_number =
3130 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3131 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3132 metadata) {
3133 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3134 *p_frame_number_valid, *p_frame_number);
3135 }
3136
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003137 camera_metadata_t *resultMetadata = nullptr;
3138
Thierry Strudel3d639192016-09-09 11:52:26 -07003139 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3140 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3141 LOGE("Invalid metadata");
3142 if (free_and_bufdone_meta_buf) {
3143 mMetadataChannel->bufDone(metadata_buf);
3144 free(metadata_buf);
3145 }
3146 goto done_metadata;
3147 }
3148 frame_number_valid = *p_frame_number_valid;
3149 frame_number = *p_frame_number;
3150 capture_time = *p_capture_time;
3151 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3152 urgent_frame_number = *p_urgent_frame_number;
3153 currentSysTime = systemTime(CLOCK_MONOTONIC);
3154
3155 // Detect if buffers from any requests are overdue
3156 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003157 int64_t timeout;
3158 {
3159 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3160 // If there is a pending HDR+ request, the following requests may be blocked until the
3161 // HDR+ request is done. So allow a longer timeout.
3162 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3163 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3164 }
3165
3166 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003167 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003168 assert(missed.stream->priv);
3169 if (missed.stream->priv) {
3170 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3171 assert(ch->mStreams[0]);
3172 if (ch->mStreams[0]) {
3173 LOGE("Cancel missing frame = %d, buffer = %p,"
3174 "stream type = %d, stream format = %d",
3175 req.frame_number, missed.buffer,
3176 ch->mStreams[0]->getMyType(), missed.stream->format);
3177 ch->timeoutFrame(req.frame_number);
3178 }
3179 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003180 }
3181 }
3182 }
3183 //Partial result on process_capture_result for timestamp
3184 if (urgent_frame_number_valid) {
3185 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3186 urgent_frame_number, capture_time);
3187
3188 //Recieved an urgent Frame Number, handle it
3189 //using partial results
3190 for (pendingRequestIterator i =
3191 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3192 LOGD("Iterator Frame = %d urgent frame = %d",
3193 i->frame_number, urgent_frame_number);
3194
3195 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3196 (i->partial_result_cnt == 0)) {
3197 LOGE("Error: HAL missed urgent metadata for frame number %d",
3198 i->frame_number);
3199 }
3200
3201 if (i->frame_number == urgent_frame_number &&
3202 i->bUrgentReceived == 0) {
3203
3204 camera3_capture_result_t result;
3205 memset(&result, 0, sizeof(camera3_capture_result_t));
3206
3207 i->partial_result_cnt++;
3208 i->bUrgentReceived = 1;
3209 // Extract 3A metadata
3210 result.result =
3211 translateCbUrgentMetadataToResultMetadata(metadata);
3212 // Populate metadata result
3213 result.frame_number = urgent_frame_number;
3214 result.num_output_buffers = 0;
3215 result.output_buffers = NULL;
3216 result.partial_result = i->partial_result_cnt;
3217
Chien-Yu Chenee335912017-02-09 17:53:20 -08003218 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003219 // Notify HDR+ client about the partial metadata.
3220 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3221 result.partial_result == PARTIAL_RESULT_COUNT);
3222 }
3223
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003224 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003225 LOGD("urgent frame_number = %u, capture_time = %lld",
3226 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003227 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3228 // Instant AEC settled for this frame.
3229 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3230 mInstantAECSettledFrameNumber = urgent_frame_number;
3231 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003232 free_camera_metadata((camera_metadata_t *)result.result);
3233 break;
3234 }
3235 }
3236 }
3237
3238 if (!frame_number_valid) {
3239 LOGD("Not a valid normal frame number, used as SOF only");
3240 if (free_and_bufdone_meta_buf) {
3241 mMetadataChannel->bufDone(metadata_buf);
3242 free(metadata_buf);
3243 }
3244 goto done_metadata;
3245 }
3246 LOGH("valid frame_number = %u, capture_time = %lld",
3247 frame_number, capture_time);
3248
Emilian Peev7650c122017-01-19 08:24:33 -08003249 if (metadata->is_depth_data_valid) {
3250 handleDepthDataLocked(metadata->depth_data, frame_number);
3251 }
3252
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003253 // Check whether any stream buffer corresponding to this is dropped or not
3254 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3255 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3256 for (auto & pendingRequest : mPendingRequestsList) {
3257 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3258 mInstantAECSettledFrameNumber)) {
3259 camera3_notify_msg_t notify_msg = {};
3260 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003261 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003262 QCamera3ProcessingChannel *channel =
3263 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003264 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003265 if (p_cam_frame_drop) {
3266 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003267 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003268 // Got the stream ID for drop frame.
3269 dropFrame = true;
3270 break;
3271 }
3272 }
3273 } else {
3274 // This is instant AEC case.
3275 // For instant AEC drop the stream untill AEC is settled.
3276 dropFrame = true;
3277 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003278
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003279 if (dropFrame) {
3280 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3281 if (p_cam_frame_drop) {
3282 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003283 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003284 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003285 } else {
3286 // For instant AEC, inform frame drop and frame number
3287 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3288 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003289 pendingRequest.frame_number, streamID,
3290 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003291 }
3292 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003293 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003294 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003295 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003296 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003297 if (p_cam_frame_drop) {
3298 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003299 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003300 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003301 } else {
3302 // For instant AEC, inform frame drop and frame number
3303 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3304 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003305 pendingRequest.frame_number, streamID,
3306 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003307 }
3308 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003309 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003310 PendingFrameDrop.stream_ID = streamID;
3311 // Add the Frame drop info to mPendingFrameDropList
3312 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003313 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003314 }
3315 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003316 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003317
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003318 for (auto & pendingRequest : mPendingRequestsList) {
3319 // Find the pending request with the frame number.
3320 if (pendingRequest.frame_number == frame_number) {
3321 // Update the sensor timestamp.
3322 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003323
Thierry Strudel3d639192016-09-09 11:52:26 -07003324
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003325 /* Set the timestamp in display metadata so that clients aware of
3326 private_handle such as VT can use this un-modified timestamps.
3327 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003328 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003329
Thierry Strudel3d639192016-09-09 11:52:26 -07003330 // Find channel requiring metadata, meaning internal offline postprocess
3331 // is needed.
3332 //TODO: for now, we don't support two streams requiring metadata at the same time.
3333 // (because we are not making copies, and metadata buffer is not reference counted.
3334 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003335 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3336 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003337 if (iter->need_metadata) {
3338 internalPproc = true;
3339 QCamera3ProcessingChannel *channel =
3340 (QCamera3ProcessingChannel *)iter->stream->priv;
3341 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003342 if(p_is_metabuf_queued != NULL) {
3343 *p_is_metabuf_queued = true;
3344 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003345 break;
3346 }
3347 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003348 for (auto itr = pendingRequest.internalRequestList.begin();
3349 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003350 if (itr->need_metadata) {
3351 internalPproc = true;
3352 QCamera3ProcessingChannel *channel =
3353 (QCamera3ProcessingChannel *)itr->stream->priv;
3354 channel->queueReprocMetadata(metadata_buf);
3355 break;
3356 }
3357 }
3358
Thierry Strudel54dc9782017-02-15 12:12:10 -08003359 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003360 resultMetadata = translateFromHalMetadata(metadata,
3361 pendingRequest.timestamp, pendingRequest.request_id,
3362 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3363 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003364 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003365 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003366 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003367 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003368 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003369 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003370
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003371 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003372
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003373 if (pendingRequest.blob_request) {
3374 //Dump tuning metadata if enabled and available
3375 char prop[PROPERTY_VALUE_MAX];
3376 memset(prop, 0, sizeof(prop));
3377 property_get("persist.camera.dumpmetadata", prop, "0");
3378 int32_t enabled = atoi(prop);
3379 if (enabled && metadata->is_tuning_params_valid) {
3380 dumpMetadataToFile(metadata->tuning_params,
3381 mMetaFrameCount,
3382 enabled,
3383 "Snapshot",
3384 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 }
3386 }
3387
3388 if (!internalPproc) {
3389 LOGD("couldn't find need_metadata for this metadata");
3390 // Return metadata buffer
3391 if (free_and_bufdone_meta_buf) {
3392 mMetadataChannel->bufDone(metadata_buf);
3393 free(metadata_buf);
3394 }
3395 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003396
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003397 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 }
3399 }
3400
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003401 // Try to send out shutter callbacks and capture results.
3402 handlePendingResultsWithLock(frame_number, resultMetadata);
3403 return;
3404
Thierry Strudel3d639192016-09-09 11:52:26 -07003405done_metadata:
3406 for (pendingRequestIterator i = mPendingRequestsList.begin();
3407 i != mPendingRequestsList.end() ;i++) {
3408 i->pipeline_depth++;
3409 }
3410 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3411 unblockRequestIfNecessary();
3412}
3413
3414/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003415 * FUNCTION : handleDepthDataWithLock
3416 *
3417 * DESCRIPTION: Handles incoming depth data
3418 *
3419 * PARAMETERS : @depthData : Depth data
3420 * @frameNumber: Frame number of the incoming depth data
3421 *
3422 * RETURN :
3423 *
3424 *==========================================================================*/
3425void QCamera3HardwareInterface::handleDepthDataLocked(
3426 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3427 uint32_t currentFrameNumber;
3428 buffer_handle_t *depthBuffer;
3429
3430 if (nullptr == mDepthChannel) {
3431 LOGE("Depth channel not present!");
3432 return;
3433 }
3434
3435 camera3_stream_buffer_t resultBuffer =
3436 {.acquire_fence = -1,
3437 .release_fence = -1,
3438 .status = CAMERA3_BUFFER_STATUS_OK,
3439 .buffer = nullptr,
3440 .stream = mDepthChannel->getStream()};
3441 camera3_capture_result_t result =
3442 {.result = nullptr,
3443 .num_output_buffers = 1,
3444 .output_buffers = &resultBuffer,
3445 .partial_result = 0,
3446 .frame_number = 0};
3447
3448 do {
3449 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3450 if (nullptr == depthBuffer) {
3451 break;
3452 }
3453
3454 result.frame_number = currentFrameNumber;
3455 resultBuffer.buffer = depthBuffer;
3456 if (currentFrameNumber == frameNumber) {
3457 int32_t rc = mDepthChannel->populateDepthData(depthData,
3458 frameNumber);
3459 if (NO_ERROR != rc) {
3460 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3461 } else {
3462 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3463 }
3464 } else if (currentFrameNumber > frameNumber) {
3465 break;
3466 } else {
3467 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3468 {{currentFrameNumber, mDepthChannel->getStream(),
3469 CAMERA3_MSG_ERROR_BUFFER}}};
3470 orchestrateNotify(&notify_msg);
3471
3472 LOGE("Depth buffer for frame number: %d is missing "
3473 "returning back!", currentFrameNumber);
3474 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3475 }
3476 mDepthChannel->unmapBuffer(currentFrameNumber);
3477
3478 orchestrateResult(&result);
3479 } while (currentFrameNumber < frameNumber);
3480}
3481
3482/*===========================================================================
3483 * FUNCTION : notifyErrorFoPendingDepthData
3484 *
3485 * DESCRIPTION: Returns error for any pending depth buffers
3486 *
3487 * PARAMETERS : depthCh - depth channel that needs to get flushed
3488 *
3489 * RETURN :
3490 *
3491 *==========================================================================*/
3492void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3493 QCamera3DepthChannel *depthCh) {
3494 uint32_t currentFrameNumber;
3495 buffer_handle_t *depthBuffer;
3496
3497 if (nullptr == depthCh) {
3498 return;
3499 }
3500
3501 camera3_notify_msg_t notify_msg =
3502 {.type = CAMERA3_MSG_ERROR,
3503 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3504 camera3_stream_buffer_t resultBuffer =
3505 {.acquire_fence = -1,
3506 .release_fence = -1,
3507 .buffer = nullptr,
3508 .stream = depthCh->getStream(),
3509 .status = CAMERA3_BUFFER_STATUS_ERROR};
3510 camera3_capture_result_t result =
3511 {.result = nullptr,
3512 .frame_number = 0,
3513 .num_output_buffers = 1,
3514 .partial_result = 0,
3515 .output_buffers = &resultBuffer};
3516
3517 while (nullptr !=
3518 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3519 depthCh->unmapBuffer(currentFrameNumber);
3520
3521 notify_msg.message.error.frame_number = currentFrameNumber;
3522 orchestrateNotify(&notify_msg);
3523
3524 resultBuffer.buffer = depthBuffer;
3525 result.frame_number = currentFrameNumber;
3526 orchestrateResult(&result);
3527 };
3528}
3529
3530/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003531 * FUNCTION : hdrPlusPerfLock
3532 *
3533 * DESCRIPTION: perf lock for HDR+ using custom intent
3534 *
3535 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3536 *
3537 * RETURN : None
3538 *
3539 *==========================================================================*/
3540void QCamera3HardwareInterface::hdrPlusPerfLock(
3541 mm_camera_super_buf_t *metadata_buf)
3542{
3543 if (NULL == metadata_buf) {
3544 LOGE("metadata_buf is NULL");
3545 return;
3546 }
3547 metadata_buffer_t *metadata =
3548 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3549 int32_t *p_frame_number_valid =
3550 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3551 uint32_t *p_frame_number =
3552 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3553
3554 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3555 LOGE("%s: Invalid metadata", __func__);
3556 return;
3557 }
3558
3559 //acquire perf lock for 5 sec after the last HDR frame is captured
3560 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3561 if ((p_frame_number != NULL) &&
3562 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003563 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003564 }
3565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003566}
3567
3568/*===========================================================================
3569 * FUNCTION : handleInputBufferWithLock
3570 *
3571 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3572 *
3573 * PARAMETERS : @frame_number: frame number of the input buffer
3574 *
3575 * RETURN :
3576 *
3577 *==========================================================================*/
3578void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3579{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003580 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003581 pendingRequestIterator i = mPendingRequestsList.begin();
3582 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3583 i++;
3584 }
3585 if (i != mPendingRequestsList.end() && i->input_buffer) {
3586 //found the right request
3587 if (!i->shutter_notified) {
3588 CameraMetadata settings;
3589 camera3_notify_msg_t notify_msg;
3590 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3591 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3592 if(i->settings) {
3593 settings = i->settings;
3594 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3595 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3596 } else {
3597 LOGE("No timestamp in input settings! Using current one.");
3598 }
3599 } else {
3600 LOGE("Input settings missing!");
3601 }
3602
3603 notify_msg.type = CAMERA3_MSG_SHUTTER;
3604 notify_msg.message.shutter.frame_number = frame_number;
3605 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003606 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003607 i->shutter_notified = true;
3608 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3609 i->frame_number, notify_msg.message.shutter.timestamp);
3610 }
3611
3612 if (i->input_buffer->release_fence != -1) {
3613 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3614 close(i->input_buffer->release_fence);
3615 if (rc != OK) {
3616 LOGE("input buffer sync wait failed %d", rc);
3617 }
3618 }
3619
3620 camera3_capture_result result;
3621 memset(&result, 0, sizeof(camera3_capture_result));
3622 result.frame_number = frame_number;
3623 result.result = i->settings;
3624 result.input_buffer = i->input_buffer;
3625 result.partial_result = PARTIAL_RESULT_COUNT;
3626
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003627 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003628 LOGD("Input request metadata and input buffer frame_number = %u",
3629 i->frame_number);
3630 i = erasePendingRequest(i);
3631 } else {
3632 LOGE("Could not find input request for frame number %d", frame_number);
3633 }
3634}
3635
3636/*===========================================================================
3637 * FUNCTION : handleBufferWithLock
3638 *
3639 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3640 *
3641 * PARAMETERS : @buffer: image buffer for the callback
3642 * @frame_number: frame number of the image buffer
3643 *
3644 * RETURN :
3645 *
3646 *==========================================================================*/
3647void QCamera3HardwareInterface::handleBufferWithLock(
3648 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3649{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003650 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003651
3652 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3653 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3654 }
3655
Thierry Strudel3d639192016-09-09 11:52:26 -07003656 /* Nothing to be done during error state */
3657 if ((ERROR == mState) || (DEINIT == mState)) {
3658 return;
3659 }
3660 if (mFlushPerf) {
3661 handleBuffersDuringFlushLock(buffer);
3662 return;
3663 }
3664 //not in flush
3665 // If the frame number doesn't exist in the pending request list,
3666 // directly send the buffer to the frameworks, and update pending buffers map
3667 // Otherwise, book-keep the buffer.
3668 pendingRequestIterator i = mPendingRequestsList.begin();
3669 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3670 i++;
3671 }
3672 if (i == mPendingRequestsList.end()) {
3673 // Verify all pending requests frame_numbers are greater
3674 for (pendingRequestIterator j = mPendingRequestsList.begin();
3675 j != mPendingRequestsList.end(); j++) {
3676 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3677 LOGW("Error: pending live frame number %d is smaller than %d",
3678 j->frame_number, frame_number);
3679 }
3680 }
3681 camera3_capture_result_t result;
3682 memset(&result, 0, sizeof(camera3_capture_result_t));
3683 result.result = NULL;
3684 result.frame_number = frame_number;
3685 result.num_output_buffers = 1;
3686 result.partial_result = 0;
3687 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3688 m != mPendingFrameDropList.end(); m++) {
3689 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3690 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3691 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3692 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3693 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3694 frame_number, streamID);
3695 m = mPendingFrameDropList.erase(m);
3696 break;
3697 }
3698 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003699 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003700 result.output_buffers = buffer;
3701 LOGH("result frame_number = %d, buffer = %p",
3702 frame_number, buffer->buffer);
3703
3704 mPendingBuffersMap.removeBuf(buffer->buffer);
3705
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003706 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003707 } else {
3708 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003709 if (i->input_buffer->release_fence != -1) {
3710 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3711 close(i->input_buffer->release_fence);
3712 if (rc != OK) {
3713 LOGE("input buffer sync wait failed %d", rc);
3714 }
3715 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003717
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003718 // Put buffer into the pending request
3719 for (auto &requestedBuffer : i->buffers) {
3720 if (requestedBuffer.stream == buffer->stream) {
3721 if (requestedBuffer.buffer != nullptr) {
3722 LOGE("Error: buffer is already set");
3723 } else {
3724 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3725 sizeof(camera3_stream_buffer_t));
3726 *(requestedBuffer.buffer) = *buffer;
3727 LOGH("cache buffer %p at result frame_number %u",
3728 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003729 }
3730 }
3731 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732
3733 if (i->input_buffer) {
3734 // For a reprocessing request, try to send out shutter callback and result metadata.
3735 handlePendingResultsWithLock(frame_number, nullptr);
3736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003738
3739 if (mPreviewStarted == false) {
3740 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3741 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3742 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3743 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3744 mPreviewStarted = true;
3745
3746 // Set power hint for preview
3747 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3748 }
3749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003750}
3751
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003752void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3753 const camera_metadata_t *resultMetadata)
3754{
3755 // Find the pending request for this result metadata.
3756 auto requestIter = mPendingRequestsList.begin();
3757 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3758 requestIter++;
3759 }
3760
3761 if (requestIter == mPendingRequestsList.end()) {
3762 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3763 return;
3764 }
3765
3766 // Update the result metadata
3767 requestIter->resultMetadata = resultMetadata;
3768
3769 // Check what type of request this is.
3770 bool liveRequest = false;
3771 if (requestIter->hdrplus) {
3772 // HDR+ request doesn't have partial results.
3773 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3774 } else if (requestIter->input_buffer != nullptr) {
3775 // Reprocessing request result is the same as settings.
3776 requestIter->resultMetadata = requestIter->settings;
3777 // Reprocessing request doesn't have partial results.
3778 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3779 } else {
3780 liveRequest = true;
3781 requestIter->partial_result_cnt++;
3782 mPendingLiveRequest--;
3783
3784 // For a live request, send the metadata to HDR+ client.
Chien-Yu Chenee335912017-02-09 17:53:20 -08003785 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003786 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3787 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3788 }
3789 }
3790
3791 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3792 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3793 bool readyToSend = true;
3794
3795 // Iterate through the pending requests to send out shutter callbacks and results that are
3796 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3797 // live requests that don't have result metadata yet.
3798 auto iter = mPendingRequestsList.begin();
3799 while (iter != mPendingRequestsList.end()) {
3800 // Check if current pending request is ready. If it's not ready, the following pending
3801 // requests are also not ready.
3802 if (readyToSend && iter->resultMetadata == nullptr) {
3803 readyToSend = false;
3804 }
3805
3806 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3807
3808 std::vector<camera3_stream_buffer_t> outputBuffers;
3809
3810 camera3_capture_result_t result = {};
3811 result.frame_number = iter->frame_number;
3812 result.result = iter->resultMetadata;
3813 result.partial_result = iter->partial_result_cnt;
3814
3815 // If this pending buffer has result metadata, we may be able to send out shutter callback
3816 // and result metadata.
3817 if (iter->resultMetadata != nullptr) {
3818 if (!readyToSend) {
3819 // If any of the previous pending request is not ready, this pending request is
3820 // also not ready to send in order to keep shutter callbacks and result metadata
3821 // in order.
3822 iter++;
3823 continue;
3824 }
3825
3826 // Invoke shutter callback if not yet.
3827 if (!iter->shutter_notified) {
3828 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3829
3830 // Find the timestamp in HDR+ result metadata
3831 camera_metadata_ro_entry_t entry;
3832 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3833 ANDROID_SENSOR_TIMESTAMP, &entry);
3834 if (res != OK) {
3835 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3836 __FUNCTION__, iter->frame_number, strerror(-res), res);
3837 } else {
3838 timestamp = entry.data.i64[0];
3839 }
3840
3841 camera3_notify_msg_t notify_msg = {};
3842 notify_msg.type = CAMERA3_MSG_SHUTTER;
3843 notify_msg.message.shutter.frame_number = iter->frame_number;
3844 notify_msg.message.shutter.timestamp = timestamp;
3845 orchestrateNotify(&notify_msg);
3846 iter->shutter_notified = true;
3847 }
3848
3849 result.input_buffer = iter->input_buffer;
3850
3851 // Prepare output buffer array
3852 for (auto bufferInfoIter = iter->buffers.begin();
3853 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3854 if (bufferInfoIter->buffer != nullptr) {
3855
3856 QCamera3Channel *channel =
3857 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3858 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3859
3860 // Check if this buffer is a dropped frame.
3861 auto frameDropIter = mPendingFrameDropList.begin();
3862 while (frameDropIter != mPendingFrameDropList.end()) {
3863 if((frameDropIter->stream_ID == streamID) &&
3864 (frameDropIter->frame_number == frameNumber)) {
3865 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3866 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3867 streamID);
3868 mPendingFrameDropList.erase(frameDropIter);
3869 break;
3870 } else {
3871 frameDropIter++;
3872 }
3873 }
3874
3875 // Check buffer error status
3876 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3877 bufferInfoIter->buffer->buffer);
3878 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3879
3880 outputBuffers.push_back(*(bufferInfoIter->buffer));
3881 free(bufferInfoIter->buffer);
3882 bufferInfoIter->buffer = NULL;
3883 }
3884 }
3885
3886 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3887 result.num_output_buffers = outputBuffers.size();
3888 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3889 // If the result metadata belongs to a live request, notify errors for previous pending
3890 // live requests.
3891 mPendingLiveRequest--;
3892
3893 CameraMetadata dummyMetadata;
3894 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3895 result.result = dummyMetadata.release();
3896
3897 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3898 } else {
3899 iter++;
3900 continue;
3901 }
3902
3903 orchestrateResult(&result);
3904
3905 // For reprocessing, result metadata is the same as settings so do not free it here to
3906 // avoid double free.
3907 if (result.result != iter->settings) {
3908 free_camera_metadata((camera_metadata_t *)result.result);
3909 }
3910 iter->resultMetadata = nullptr;
3911 iter = erasePendingRequest(iter);
3912 }
3913
3914 if (liveRequest) {
3915 for (auto &iter : mPendingRequestsList) {
3916 // Increment pipeline depth for the following pending requests.
3917 if (iter.frame_number > frameNumber) {
3918 iter.pipeline_depth++;
3919 }
3920 }
3921 }
3922
3923 unblockRequestIfNecessary();
3924}
3925
Thierry Strudel3d639192016-09-09 11:52:26 -07003926/*===========================================================================
3927 * FUNCTION : unblockRequestIfNecessary
3928 *
3929 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3930 * that mMutex is held when this function is called.
3931 *
3932 * PARAMETERS :
3933 *
3934 * RETURN :
3935 *
3936 *==========================================================================*/
3937void QCamera3HardwareInterface::unblockRequestIfNecessary()
3938{
3939 // Unblock process_capture_request
3940 pthread_cond_signal(&mRequestCond);
3941}
3942
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003943/*===========================================================================
3944 * FUNCTION : isHdrSnapshotRequest
3945 *
3946 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3947 *
3948 * PARAMETERS : camera3 request structure
3949 *
3950 * RETURN : boolean decision variable
3951 *
3952 *==========================================================================*/
3953bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3954{
3955 if (request == NULL) {
3956 LOGE("Invalid request handle");
3957 assert(0);
3958 return false;
3959 }
3960
3961 if (!mForceHdrSnapshot) {
3962 CameraMetadata frame_settings;
3963 frame_settings = request->settings;
3964
3965 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3966 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3967 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3968 return false;
3969 }
3970 } else {
3971 return false;
3972 }
3973
3974 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3975 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3976 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3977 return false;
3978 }
3979 } else {
3980 return false;
3981 }
3982 }
3983
3984 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3985 if (request->output_buffers[i].stream->format
3986 == HAL_PIXEL_FORMAT_BLOB) {
3987 return true;
3988 }
3989 }
3990
3991 return false;
3992}
3993/*===========================================================================
3994 * FUNCTION : orchestrateRequest
3995 *
3996 * DESCRIPTION: Orchestrates a capture request from camera service
3997 *
3998 * PARAMETERS :
3999 * @request : request from framework to process
4000 *
4001 * RETURN : Error status codes
4002 *
4003 *==========================================================================*/
4004int32_t QCamera3HardwareInterface::orchestrateRequest(
4005 camera3_capture_request_t *request)
4006{
4007
4008 uint32_t originalFrameNumber = request->frame_number;
4009 uint32_t originalOutputCount = request->num_output_buffers;
4010 const camera_metadata_t *original_settings = request->settings;
4011 List<InternalRequest> internallyRequestedStreams;
4012 List<InternalRequest> emptyInternalList;
4013
4014 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4015 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4016 uint32_t internalFrameNumber;
4017 CameraMetadata modified_meta;
4018
4019
4020 /* Add Blob channel to list of internally requested streams */
4021 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4022 if (request->output_buffers[i].stream->format
4023 == HAL_PIXEL_FORMAT_BLOB) {
4024 InternalRequest streamRequested;
4025 streamRequested.meteringOnly = 1;
4026 streamRequested.need_metadata = 0;
4027 streamRequested.stream = request->output_buffers[i].stream;
4028 internallyRequestedStreams.push_back(streamRequested);
4029 }
4030 }
4031 request->num_output_buffers = 0;
4032 auto itr = internallyRequestedStreams.begin();
4033
4034 /* Modify setting to set compensation */
4035 modified_meta = request->settings;
4036 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4037 uint8_t aeLock = 1;
4038 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4039 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4040 camera_metadata_t *modified_settings = modified_meta.release();
4041 request->settings = modified_settings;
4042
4043 /* Capture Settling & -2x frame */
4044 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4045 request->frame_number = internalFrameNumber;
4046 processCaptureRequest(request, internallyRequestedStreams);
4047
4048 request->num_output_buffers = originalOutputCount;
4049 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4050 request->frame_number = internalFrameNumber;
4051 processCaptureRequest(request, emptyInternalList);
4052 request->num_output_buffers = 0;
4053
4054 modified_meta = modified_settings;
4055 expCompensation = 0;
4056 aeLock = 1;
4057 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4058 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4059 modified_settings = modified_meta.release();
4060 request->settings = modified_settings;
4061
4062 /* Capture Settling & 0X frame */
4063
4064 itr = internallyRequestedStreams.begin();
4065 if (itr == internallyRequestedStreams.end()) {
4066 LOGE("Error Internally Requested Stream list is empty");
4067 assert(0);
4068 } else {
4069 itr->need_metadata = 0;
4070 itr->meteringOnly = 1;
4071 }
4072
4073 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4074 request->frame_number = internalFrameNumber;
4075 processCaptureRequest(request, internallyRequestedStreams);
4076
4077 itr = internallyRequestedStreams.begin();
4078 if (itr == internallyRequestedStreams.end()) {
4079 ALOGE("Error Internally Requested Stream list is empty");
4080 assert(0);
4081 } else {
4082 itr->need_metadata = 1;
4083 itr->meteringOnly = 0;
4084 }
4085
4086 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4087 request->frame_number = internalFrameNumber;
4088 processCaptureRequest(request, internallyRequestedStreams);
4089
4090 /* Capture 2X frame*/
4091 modified_meta = modified_settings;
4092 expCompensation = GB_HDR_2X_STEP_EV;
4093 aeLock = 1;
4094 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4095 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4096 modified_settings = modified_meta.release();
4097 request->settings = modified_settings;
4098
4099 itr = internallyRequestedStreams.begin();
4100 if (itr == internallyRequestedStreams.end()) {
4101 ALOGE("Error Internally Requested Stream list is empty");
4102 assert(0);
4103 } else {
4104 itr->need_metadata = 0;
4105 itr->meteringOnly = 1;
4106 }
4107 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4108 request->frame_number = internalFrameNumber;
4109 processCaptureRequest(request, internallyRequestedStreams);
4110
4111 itr = internallyRequestedStreams.begin();
4112 if (itr == internallyRequestedStreams.end()) {
4113 ALOGE("Error Internally Requested Stream list is empty");
4114 assert(0);
4115 } else {
4116 itr->need_metadata = 1;
4117 itr->meteringOnly = 0;
4118 }
4119
4120 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4121 request->frame_number = internalFrameNumber;
4122 processCaptureRequest(request, internallyRequestedStreams);
4123
4124
4125 /* Capture 2X on original streaming config*/
4126 internallyRequestedStreams.clear();
4127
4128 /* Restore original settings pointer */
4129 request->settings = original_settings;
4130 } else {
4131 uint32_t internalFrameNumber;
4132 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4133 request->frame_number = internalFrameNumber;
4134 return processCaptureRequest(request, internallyRequestedStreams);
4135 }
4136
4137 return NO_ERROR;
4138}
4139
4140/*===========================================================================
4141 * FUNCTION : orchestrateResult
4142 *
4143 * DESCRIPTION: Orchestrates a capture result to camera service
4144 *
4145 * PARAMETERS :
4146 * @request : request from framework to process
4147 *
4148 * RETURN :
4149 *
4150 *==========================================================================*/
4151void QCamera3HardwareInterface::orchestrateResult(
4152 camera3_capture_result_t *result)
4153{
4154 uint32_t frameworkFrameNumber;
4155 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4156 frameworkFrameNumber);
4157 if (rc != NO_ERROR) {
4158 LOGE("Cannot find translated frameworkFrameNumber");
4159 assert(0);
4160 } else {
4161 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004162 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004163 } else {
4164 result->frame_number = frameworkFrameNumber;
4165 mCallbackOps->process_capture_result(mCallbackOps, result);
4166 }
4167 }
4168}
4169
4170/*===========================================================================
4171 * FUNCTION : orchestrateNotify
4172 *
4173 * DESCRIPTION: Orchestrates a notify to camera service
4174 *
4175 * PARAMETERS :
4176 * @request : request from framework to process
4177 *
4178 * RETURN :
4179 *
4180 *==========================================================================*/
4181void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4182{
4183 uint32_t frameworkFrameNumber;
4184 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
4185 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
4186 frameworkFrameNumber);
4187 if (rc != NO_ERROR) {
4188 LOGE("Cannot find translated frameworkFrameNumber");
4189 assert(0);
4190 } else {
4191 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004192 LOGD("Internal Request drop the notifyCb");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004193 } else {
4194 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4195 mCallbackOps->notify(mCallbackOps, notify_msg);
4196 }
4197 }
4198}
4199
4200/*===========================================================================
4201 * FUNCTION : FrameNumberRegistry
4202 *
4203 * DESCRIPTION: Constructor
4204 *
4205 * PARAMETERS :
4206 *
4207 * RETURN :
4208 *
4209 *==========================================================================*/
4210FrameNumberRegistry::FrameNumberRegistry()
4211{
4212 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4213}
4214
4215/*===========================================================================
4216 * FUNCTION : ~FrameNumberRegistry
4217 *
4218 * DESCRIPTION: Destructor
4219 *
4220 * PARAMETERS :
4221 *
4222 * RETURN :
4223 *
4224 *==========================================================================*/
4225FrameNumberRegistry::~FrameNumberRegistry()
4226{
4227}
4228
4229/*===========================================================================
4230 * FUNCTION : PurgeOldEntriesLocked
4231 *
4232 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4233 *
4234 * PARAMETERS :
4235 *
4236 * RETURN : NONE
4237 *
4238 *==========================================================================*/
4239void FrameNumberRegistry::purgeOldEntriesLocked()
4240{
4241 while (_register.begin() != _register.end()) {
4242 auto itr = _register.begin();
4243 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4244 _register.erase(itr);
4245 } else {
4246 return;
4247 }
4248 }
4249}
4250
4251/*===========================================================================
4252 * FUNCTION : allocStoreInternalFrameNumber
4253 *
4254 * DESCRIPTION: Method to note down a framework request and associate a new
4255 * internal request number against it
4256 *
4257 * PARAMETERS :
4258 * @fFrameNumber: Identifier given by framework
4259 * @internalFN : Output parameter which will have the newly generated internal
4260 * entry
4261 *
4262 * RETURN : Error code
4263 *
4264 *==========================================================================*/
4265int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4266 uint32_t &internalFrameNumber)
4267{
4268 Mutex::Autolock lock(mRegistryLock);
4269 internalFrameNumber = _nextFreeInternalNumber++;
4270 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4271 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4272 purgeOldEntriesLocked();
4273 return NO_ERROR;
4274}
4275
4276/*===========================================================================
4277 * FUNCTION : generateStoreInternalFrameNumber
4278 *
4279 * DESCRIPTION: Method to associate a new internal request number independent
4280 * of any associate with framework requests
4281 *
4282 * PARAMETERS :
4283 * @internalFrame#: Output parameter which will have the newly generated internal
4284 *
4285 *
4286 * RETURN : Error code
4287 *
4288 *==========================================================================*/
4289int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4290{
4291 Mutex::Autolock lock(mRegistryLock);
4292 internalFrameNumber = _nextFreeInternalNumber++;
4293 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4294 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4295 purgeOldEntriesLocked();
4296 return NO_ERROR;
4297}
4298
4299/*===========================================================================
4300 * FUNCTION : getFrameworkFrameNumber
4301 *
4302 * DESCRIPTION: Method to query the framework framenumber given an internal #
4303 *
4304 * PARAMETERS :
4305 * @internalFrame#: Internal reference
4306 * @frameworkframenumber: Output parameter holding framework frame entry
4307 *
4308 * RETURN : Error code
4309 *
4310 *==========================================================================*/
4311int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4312 uint32_t &frameworkFrameNumber)
4313{
4314 Mutex::Autolock lock(mRegistryLock);
4315 auto itr = _register.find(internalFrameNumber);
4316 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004317 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004318 return -ENOENT;
4319 }
4320
4321 frameworkFrameNumber = itr->second;
4322 purgeOldEntriesLocked();
4323 return NO_ERROR;
4324}
Thierry Strudel3d639192016-09-09 11:52:26 -07004325
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004326status_t QCamera3HardwareInterface::fillPbStreamConfig(
4327 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4328 QCamera3Channel *channel, uint32_t streamIndex) {
4329 if (config == nullptr) {
4330 LOGE("%s: config is null", __FUNCTION__);
4331 return BAD_VALUE;
4332 }
4333
4334 if (channel == nullptr) {
4335 LOGE("%s: channel is null", __FUNCTION__);
4336 return BAD_VALUE;
4337 }
4338
4339 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4340 if (stream == nullptr) {
4341 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4342 return NAME_NOT_FOUND;
4343 }
4344
4345 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4346 if (streamInfo == nullptr) {
4347 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4348 return NAME_NOT_FOUND;
4349 }
4350
4351 config->id = pbStreamId;
4352 config->image.width = streamInfo->dim.width;
4353 config->image.height = streamInfo->dim.height;
4354 config->image.padding = 0;
4355 config->image.format = pbStreamFormat;
4356
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004357 uint32_t totalPlaneSize = 0;
4358
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004359 // Fill plane information.
4360 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4361 pbcamera::PlaneConfiguration plane;
4362 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4363 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4364 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004365
4366 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004367 }
4368
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004369 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004370 return OK;
4371}
4372
Thierry Strudel3d639192016-09-09 11:52:26 -07004373/*===========================================================================
4374 * FUNCTION : processCaptureRequest
4375 *
4376 * DESCRIPTION: process a capture request from camera service
4377 *
4378 * PARAMETERS :
4379 * @request : request from framework to process
4380 *
4381 * RETURN :
4382 *
4383 *==========================================================================*/
4384int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004385 camera3_capture_request_t *request,
4386 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004387{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004388 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004389 int rc = NO_ERROR;
4390 int32_t request_id;
4391 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004392 bool isVidBufRequested = false;
4393 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004394 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004395
4396 pthread_mutex_lock(&mMutex);
4397
4398 // Validate current state
4399 switch (mState) {
4400 case CONFIGURED:
4401 case STARTED:
4402 /* valid state */
4403 break;
4404
4405 case ERROR:
4406 pthread_mutex_unlock(&mMutex);
4407 handleCameraDeviceError();
4408 return -ENODEV;
4409
4410 default:
4411 LOGE("Invalid state %d", mState);
4412 pthread_mutex_unlock(&mMutex);
4413 return -ENODEV;
4414 }
4415
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004416 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004417 if (rc != NO_ERROR) {
4418 LOGE("incoming request is not valid");
4419 pthread_mutex_unlock(&mMutex);
4420 return rc;
4421 }
4422
4423 meta = request->settings;
4424
4425 // For first capture request, send capture intent, and
4426 // stream on all streams
4427 if (mState == CONFIGURED) {
4428 // send an unconfigure to the backend so that the isp
4429 // resources are deallocated
4430 if (!mFirstConfiguration) {
4431 cam_stream_size_info_t stream_config_info;
4432 int32_t hal_version = CAM_HAL_V3;
4433 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4434 stream_config_info.buffer_info.min_buffers =
4435 MIN_INFLIGHT_REQUESTS;
4436 stream_config_info.buffer_info.max_buffers =
4437 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4438 clear_metadata_buffer(mParameters);
4439 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4440 CAM_INTF_PARM_HAL_VERSION, hal_version);
4441 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4442 CAM_INTF_META_STREAM_INFO, stream_config_info);
4443 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4444 mParameters);
4445 if (rc < 0) {
4446 LOGE("set_parms for unconfigure failed");
4447 pthread_mutex_unlock(&mMutex);
4448 return rc;
4449 }
4450 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004451 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004452 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004453 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004454 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004455 property_get("persist.camera.is_type", is_type_value, "4");
4456 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4457 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4458 property_get("persist.camera.is_type_preview", is_type_value, "4");
4459 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4460 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004461
4462 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4463 int32_t hal_version = CAM_HAL_V3;
4464 uint8_t captureIntent =
4465 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4466 mCaptureIntent = captureIntent;
4467 clear_metadata_buffer(mParameters);
4468 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4469 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4470 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004471 if (mFirstConfiguration) {
4472 // configure instant AEC
4473 // Instant AEC is a session based parameter and it is needed only
4474 // once per complete session after open camera.
4475 // i.e. This is set only once for the first capture request, after open camera.
4476 setInstantAEC(meta);
4477 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004478 uint8_t fwkVideoStabMode=0;
4479 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4480 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4481 }
4482
4483 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4484 // turn it on for video/preview
4485 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4486 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004487 int32_t vsMode;
4488 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4490 rc = BAD_VALUE;
4491 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004492 LOGD("setEis %d", setEis);
4493 bool eis3Supported = false;
4494 size_t count = IS_TYPE_MAX;
4495 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4496 for (size_t i = 0; i < count; i++) {
4497 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4498 eis3Supported = true;
4499 break;
4500 }
4501 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004502
4503 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004504 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004505 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4506 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004507 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4508 is_type = isTypePreview;
4509 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4510 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4511 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004512 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004513 } else {
4514 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004515 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004516 } else {
4517 is_type = IS_TYPE_NONE;
4518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004519 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004520 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004521 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4522 }
4523 }
4524
4525 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4526 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4527
Thierry Strudel54dc9782017-02-15 12:12:10 -08004528 //Disable tintless only if the property is set to 0
4529 memset(prop, 0, sizeof(prop));
4530 property_get("persist.camera.tintless.enable", prop, "1");
4531 int32_t tintless_value = atoi(prop);
4532
Thierry Strudel3d639192016-09-09 11:52:26 -07004533 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4534 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004535
Thierry Strudel3d639192016-09-09 11:52:26 -07004536 //Disable CDS for HFR mode or if DIS/EIS is on.
4537 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4538 //after every configure_stream
4539 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4540 (m_bIsVideo)) {
4541 int32_t cds = CAM_CDS_MODE_OFF;
4542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4543 CAM_INTF_PARM_CDS_MODE, cds))
4544 LOGE("Failed to disable CDS for HFR mode");
4545
4546 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004547
4548 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4549 uint8_t* use_av_timer = NULL;
4550
4551 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004552 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004553 use_av_timer = &m_debug_avtimer;
4554 }
4555 else{
4556 use_av_timer =
4557 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004558 if (use_av_timer) {
4559 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4560 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004561 }
4562
4563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4564 rc = BAD_VALUE;
4565 }
4566 }
4567
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 setMobicat();
4569
4570 /* Set fps and hfr mode while sending meta stream info so that sensor
4571 * can configure appropriate streaming mode */
4572 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004573 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4574 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4576 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004577 if (rc == NO_ERROR) {
4578 int32_t max_fps =
4579 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004580 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004581 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4582 }
4583 /* For HFR, more buffers are dequeued upfront to improve the performance */
4584 if (mBatchSize) {
4585 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4586 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4587 }
4588 }
4589 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004590 LOGE("setHalFpsRange failed");
4591 }
4592 }
4593 if (meta.exists(ANDROID_CONTROL_MODE)) {
4594 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4595 rc = extractSceneMode(meta, metaMode, mParameters);
4596 if (rc != NO_ERROR) {
4597 LOGE("extractSceneMode failed");
4598 }
4599 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004600 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004601
Thierry Strudel04e026f2016-10-10 11:27:36 -07004602 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4603 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4604 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4605 rc = setVideoHdrMode(mParameters, vhdr);
4606 if (rc != NO_ERROR) {
4607 LOGE("setVideoHDR is failed");
4608 }
4609 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004610
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 //TODO: validate the arguments, HSV scenemode should have only the
4612 //advertised fps ranges
4613
4614 /*set the capture intent, hal version, tintless, stream info,
4615 *and disenable parameters to the backend*/
4616 LOGD("set_parms META_STREAM_INFO " );
4617 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4618 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004619 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004620 mStreamConfigInfo.type[i],
4621 mStreamConfigInfo.stream_sizes[i].width,
4622 mStreamConfigInfo.stream_sizes[i].height,
4623 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004624 mStreamConfigInfo.format[i],
4625 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004626 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004627
Thierry Strudel3d639192016-09-09 11:52:26 -07004628 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4629 mParameters);
4630 if (rc < 0) {
4631 LOGE("set_parms failed for hal version, stream info");
4632 }
4633
Chien-Yu Chenee335912017-02-09 17:53:20 -08004634 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4635 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004636 if (rc != NO_ERROR) {
4637 LOGE("Failed to get sensor output size");
4638 pthread_mutex_unlock(&mMutex);
4639 goto error_exit;
4640 }
4641
4642 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4643 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004644 mSensorModeInfo.active_array_size.width,
4645 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004646
4647 /* Set batchmode before initializing channel. Since registerBuffer
4648 * internally initializes some of the channels, better set batchmode
4649 * even before first register buffer */
4650 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4651 it != mStreamInfo.end(); it++) {
4652 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4653 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4654 && mBatchSize) {
4655 rc = channel->setBatchSize(mBatchSize);
4656 //Disable per frame map unmap for HFR/batchmode case
4657 rc |= channel->setPerFrameMapUnmap(false);
4658 if (NO_ERROR != rc) {
4659 LOGE("Channel init failed %d", rc);
4660 pthread_mutex_unlock(&mMutex);
4661 goto error_exit;
4662 }
4663 }
4664 }
4665
4666 //First initialize all streams
4667 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4668 it != mStreamInfo.end(); it++) {
4669 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4670 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4671 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004672 setEis) {
4673 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4674 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4675 is_type = mStreamConfigInfo.is_type[i];
4676 break;
4677 }
4678 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004679 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004680 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004681 rc = channel->initialize(IS_TYPE_NONE);
4682 }
4683 if (NO_ERROR != rc) {
4684 LOGE("Channel initialization failed %d", rc);
4685 pthread_mutex_unlock(&mMutex);
4686 goto error_exit;
4687 }
4688 }
4689
4690 if (mRawDumpChannel) {
4691 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4692 if (rc != NO_ERROR) {
4693 LOGE("Error: Raw Dump Channel init failed");
4694 pthread_mutex_unlock(&mMutex);
4695 goto error_exit;
4696 }
4697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004698 if (mHdrPlusRawSrcChannel) {
4699 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4700 if (rc != NO_ERROR) {
4701 LOGE("Error: HDR+ RAW Source Channel init failed");
4702 pthread_mutex_unlock(&mMutex);
4703 goto error_exit;
4704 }
4705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004706 if (mSupportChannel) {
4707 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4708 if (rc < 0) {
4709 LOGE("Support channel initialization failed");
4710 pthread_mutex_unlock(&mMutex);
4711 goto error_exit;
4712 }
4713 }
4714 if (mAnalysisChannel) {
4715 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4716 if (rc < 0) {
4717 LOGE("Analysis channel initialization failed");
4718 pthread_mutex_unlock(&mMutex);
4719 goto error_exit;
4720 }
4721 }
4722 if (mDummyBatchChannel) {
4723 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4724 if (rc < 0) {
4725 LOGE("mDummyBatchChannel setBatchSize failed");
4726 pthread_mutex_unlock(&mMutex);
4727 goto error_exit;
4728 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004729 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004730 if (rc < 0) {
4731 LOGE("mDummyBatchChannel initialization failed");
4732 pthread_mutex_unlock(&mMutex);
4733 goto error_exit;
4734 }
4735 }
4736
4737 // Set bundle info
4738 rc = setBundleInfo();
4739 if (rc < 0) {
4740 LOGE("setBundleInfo failed %d", rc);
4741 pthread_mutex_unlock(&mMutex);
4742 goto error_exit;
4743 }
4744
4745 //update settings from app here
4746 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4747 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4748 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4749 }
4750 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4751 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4752 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4753 }
4754 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4755 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4756 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4757
4758 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4759 (mLinkedCameraId != mCameraId) ) {
4760 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4761 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004762 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 goto error_exit;
4764 }
4765 }
4766
4767 // add bundle related cameras
4768 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4769 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004770 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4771 &m_pDualCamCmdPtr->bundle_info;
4772 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 if (mIsDeviceLinked)
4774 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4775 else
4776 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4777
4778 pthread_mutex_lock(&gCamLock);
4779
4780 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4781 LOGE("Dualcam: Invalid Session Id ");
4782 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 goto error_exit;
4785 }
4786
4787 if (mIsMainCamera == 1) {
4788 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4789 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004790 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004791 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004792 // related session id should be session id of linked session
4793 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4794 } else {
4795 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4796 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004797 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004798 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4800 }
4801 pthread_mutex_unlock(&gCamLock);
4802
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004803 rc = mCameraHandle->ops->set_dual_cam_cmd(
4804 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 if (rc < 0) {
4806 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004807 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004808 goto error_exit;
4809 }
4810 }
4811
4812 //Then start them.
4813 LOGH("Start META Channel");
4814 rc = mMetadataChannel->start();
4815 if (rc < 0) {
4816 LOGE("META channel start failed");
4817 pthread_mutex_unlock(&mMutex);
4818 goto error_exit;
4819 }
4820
4821 if (mAnalysisChannel) {
4822 rc = mAnalysisChannel->start();
4823 if (rc < 0) {
4824 LOGE("Analysis channel start failed");
4825 mMetadataChannel->stop();
4826 pthread_mutex_unlock(&mMutex);
4827 goto error_exit;
4828 }
4829 }
4830
4831 if (mSupportChannel) {
4832 rc = mSupportChannel->start();
4833 if (rc < 0) {
4834 LOGE("Support channel start failed");
4835 mMetadataChannel->stop();
4836 /* Although support and analysis are mutually exclusive today
4837 adding it in anycase for future proofing */
4838 if (mAnalysisChannel) {
4839 mAnalysisChannel->stop();
4840 }
4841 pthread_mutex_unlock(&mMutex);
4842 goto error_exit;
4843 }
4844 }
4845 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4846 it != mStreamInfo.end(); it++) {
4847 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4848 LOGH("Start Processing Channel mask=%d",
4849 channel->getStreamTypeMask());
4850 rc = channel->start();
4851 if (rc < 0) {
4852 LOGE("channel start failed");
4853 pthread_mutex_unlock(&mMutex);
4854 goto error_exit;
4855 }
4856 }
4857
4858 if (mRawDumpChannel) {
4859 LOGD("Starting raw dump stream");
4860 rc = mRawDumpChannel->start();
4861 if (rc != NO_ERROR) {
4862 LOGE("Error Starting Raw Dump Channel");
4863 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4864 it != mStreamInfo.end(); it++) {
4865 QCamera3Channel *channel =
4866 (QCamera3Channel *)(*it)->stream->priv;
4867 LOGH("Stopping Processing Channel mask=%d",
4868 channel->getStreamTypeMask());
4869 channel->stop();
4870 }
4871 if (mSupportChannel)
4872 mSupportChannel->stop();
4873 if (mAnalysisChannel) {
4874 mAnalysisChannel->stop();
4875 }
4876 mMetadataChannel->stop();
4877 pthread_mutex_unlock(&mMutex);
4878 goto error_exit;
4879 }
4880 }
4881
4882 if (mChannelHandle) {
4883
4884 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4885 mChannelHandle);
4886 if (rc != NO_ERROR) {
4887 LOGE("start_channel failed %d", rc);
4888 pthread_mutex_unlock(&mMutex);
4889 goto error_exit;
4890 }
4891 }
4892
4893 goto no_error;
4894error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004895 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 return rc;
4897no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004898 mWokenUpByDaemon = false;
4899 mPendingLiveRequest = 0;
4900 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004901 }
4902
Chien-Yu Chenee335912017-02-09 17:53:20 -08004903 // Enable HDR+ mode for the first PREVIEW_INTENT request.
4904 if (mHdrPlusClient != nullptr && !mFirstPreviewIntentSeen &&
4905 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
4906 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
4907 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
4908 rc = enableHdrPlusModeLocked();
4909 if (rc != OK) {
4910 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
4911 pthread_mutex_unlock(&mMutex);
4912 return rc;
4913 }
4914
4915 // Start HDR+ RAW source channel if AP provides RAW input buffers.
4916 if (mHdrPlusRawSrcChannel) {
4917 rc = mHdrPlusRawSrcChannel->start();
4918 if (rc != OK) {
4919 LOGE("Error Starting HDR+ RAW Channel");
4920 pthread_mutex_unlock(&mMutex);
4921 return rc;
4922 }
4923 }
4924 mFirstPreviewIntentSeen = true;
4925 }
4926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004929
4930 if (mFlushPerf) {
4931 //we cannot accept any requests during flush
4932 LOGE("process_capture_request cannot proceed during flush");
4933 pthread_mutex_unlock(&mMutex);
4934 return NO_ERROR; //should return an error
4935 }
4936
4937 if (meta.exists(ANDROID_REQUEST_ID)) {
4938 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4939 mCurrentRequestId = request_id;
4940 LOGD("Received request with id: %d", request_id);
4941 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4942 LOGE("Unable to find request id field, \
4943 & no previous id available");
4944 pthread_mutex_unlock(&mMutex);
4945 return NAME_NOT_FOUND;
4946 } else {
4947 LOGD("Re-using old request id");
4948 request_id = mCurrentRequestId;
4949 }
4950
4951 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4952 request->num_output_buffers,
4953 request->input_buffer,
4954 frameNumber);
4955 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004956 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08004958 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 uint32_t snapshotStreamId = 0;
4960 for (size_t i = 0; i < request->num_output_buffers; i++) {
4961 const camera3_stream_buffer_t& output = request->output_buffers[i];
4962 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4963
Emilian Peev7650c122017-01-19 08:24:33 -08004964 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
4965 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004966 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 blob_request = 1;
4968 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4969 }
4970
4971 if (output.acquire_fence != -1) {
4972 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4973 close(output.acquire_fence);
4974 if (rc != OK) {
4975 LOGE("sync wait failed %d", rc);
4976 pthread_mutex_unlock(&mMutex);
4977 return rc;
4978 }
4979 }
4980
Emilian Peev7650c122017-01-19 08:24:33 -08004981 if (output.stream->data_space == HAL_DATASPACE_DEPTH) {
4982 depthRequestPresent = true;
4983 continue;
4984 }
4985
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004986 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004987 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004988
4989 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4990 isVidBufRequested = true;
4991 }
4992 }
4993
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004994 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4995 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4996 itr++) {
4997 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4998 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4999 channel->getStreamID(channel->getStreamTypeMask());
5000
5001 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5002 isVidBufRequested = true;
5003 }
5004 }
5005
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005007 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005008 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 }
5010 if (blob_request && mRawDumpChannel) {
5011 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005012 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005014 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005015 }
5016
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005017 {
5018 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5019 // Request a RAW buffer if
5020 // 1. mHdrPlusRawSrcChannel is valid.
5021 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5022 // 3. There is no pending HDR+ request.
5023 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5024 mHdrPlusPendingRequests.size() == 0) {
5025 streamsArray.stream_request[streamsArray.num_streams].streamID =
5026 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5027 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5028 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005029 }
5030
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005031 //extract capture intent
5032 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5033 mCaptureIntent =
5034 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5035 }
5036
5037 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5038 mCacMode =
5039 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5040 }
5041
5042 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005043 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005044
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005045 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chenee335912017-02-09 17:53:20 -08005046 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08005047 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5048 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005049 }
5050
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005051 if (hdrPlusRequest) {
5052 // For a HDR+ request, just set the frame parameters.
5053 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5054 if (rc < 0) {
5055 LOGE("fail to set frame parameters");
5056 pthread_mutex_unlock(&mMutex);
5057 return rc;
5058 }
5059 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 /* Parse the settings:
5061 * - For every request in NORMAL MODE
5062 * - For every request in HFR mode during preview only case
5063 * - For first request of every batch in HFR mode during video
5064 * recording. In batchmode the same settings except frame number is
5065 * repeated in each request of the batch.
5066 */
5067 if (!mBatchSize ||
5068 (mBatchSize && !isVidBufRequested) ||
5069 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005070 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005071 if (rc < 0) {
5072 LOGE("fail to set frame parameters");
5073 pthread_mutex_unlock(&mMutex);
5074 return rc;
5075 }
5076 }
5077 /* For batchMode HFR, setFrameParameters is not called for every
5078 * request. But only frame number of the latest request is parsed.
5079 * Keep track of first and last frame numbers in a batch so that
5080 * metadata for the frame numbers of batch can be duplicated in
5081 * handleBatchMetadta */
5082 if (mBatchSize) {
5083 if (!mToBeQueuedVidBufs) {
5084 //start of the batch
5085 mFirstFrameNumberInBatch = request->frame_number;
5086 }
5087 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5088 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5089 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005090 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005091 return BAD_VALUE;
5092 }
5093 }
5094 if (mNeedSensorRestart) {
5095 /* Unlock the mutex as restartSensor waits on the channels to be
5096 * stopped, which in turn calls stream callback functions -
5097 * handleBufferWithLock and handleMetadataWithLock */
5098 pthread_mutex_unlock(&mMutex);
5099 rc = dynamicUpdateMetaStreamInfo();
5100 if (rc != NO_ERROR) {
5101 LOGE("Restarting the sensor failed");
5102 return BAD_VALUE;
5103 }
5104 mNeedSensorRestart = false;
5105 pthread_mutex_lock(&mMutex);
5106 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005107 if(mResetInstantAEC) {
5108 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5109 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5110 mResetInstantAEC = false;
5111 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005112 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005113 if (request->input_buffer->acquire_fence != -1) {
5114 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5115 close(request->input_buffer->acquire_fence);
5116 if (rc != OK) {
5117 LOGE("input buffer sync wait failed %d", rc);
5118 pthread_mutex_unlock(&mMutex);
5119 return rc;
5120 }
5121 }
5122 }
5123
5124 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5125 mLastCustIntentFrmNum = frameNumber;
5126 }
5127 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005128 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 pendingRequestIterator latestRequest;
5130 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005131 pendingRequest.num_buffers = depthRequestPresent ?
5132 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005133 pendingRequest.request_id = request_id;
5134 pendingRequest.blob_request = blob_request;
5135 pendingRequest.timestamp = 0;
5136 pendingRequest.bUrgentReceived = 0;
5137 if (request->input_buffer) {
5138 pendingRequest.input_buffer =
5139 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5140 *(pendingRequest.input_buffer) = *(request->input_buffer);
5141 pInputBuffer = pendingRequest.input_buffer;
5142 } else {
5143 pendingRequest.input_buffer = NULL;
5144 pInputBuffer = NULL;
5145 }
5146
5147 pendingRequest.pipeline_depth = 0;
5148 pendingRequest.partial_result_cnt = 0;
5149 extractJpegMetadata(mCurJpegMeta, request);
5150 pendingRequest.jpegMetadata = mCurJpegMeta;
5151 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5152 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005153 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005154 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5155 mHybridAeEnable =
5156 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5157 }
5158 pendingRequest.hybrid_ae_enable = mHybridAeEnable;
Samuel Ha68ba5172016-12-15 18:41:12 -08005159 /* DevCamDebug metadata processCaptureRequest */
5160 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5161 mDevCamDebugMetaEnable =
5162 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5163 }
5164 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5165 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005166
5167 //extract CAC info
5168 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5169 mCacMode =
5170 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5171 }
5172 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005173 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005174
5175 PendingBuffersInRequest bufsForCurRequest;
5176 bufsForCurRequest.frame_number = frameNumber;
5177 // Mark current timestamp for the new request
5178 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005179 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005180
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005181 if (hdrPlusRequest) {
5182 // Save settings for this request.
5183 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5184 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5185
5186 // Add to pending HDR+ request queue.
5187 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5188 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5189
5190 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5191 }
5192
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev7650c122017-01-19 08:24:33 -08005194 if (request->output_buffers[i].stream->data_space ==
5195 HAL_DATASPACE_DEPTH) {
5196 continue;
5197 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 RequestedBufferInfo requestedBuf;
5199 memset(&requestedBuf, 0, sizeof(requestedBuf));
5200 requestedBuf.stream = request->output_buffers[i].stream;
5201 requestedBuf.buffer = NULL;
5202 pendingRequest.buffers.push_back(requestedBuf);
5203
5204 // Add to buffer handle the pending buffers list
5205 PendingBufferInfo bufferInfo;
5206 bufferInfo.buffer = request->output_buffers[i].buffer;
5207 bufferInfo.stream = request->output_buffers[i].stream;
5208 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5209 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5210 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5211 frameNumber, bufferInfo.buffer,
5212 channel->getStreamTypeMask(), bufferInfo.stream->format);
5213 }
5214 // Add this request packet into mPendingBuffersMap
5215 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5216 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5217 mPendingBuffersMap.get_num_overall_buffers());
5218
5219 latestRequest = mPendingRequestsList.insert(
5220 mPendingRequestsList.end(), pendingRequest);
5221 if(mFlush) {
5222 LOGI("mFlush is true");
5223 pthread_mutex_unlock(&mMutex);
5224 return NO_ERROR;
5225 }
5226
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005227 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5228 // channel.
5229 if (!hdrPlusRequest) {
5230 int indexUsed;
5231 // Notify metadata channel we receive a request
5232 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005233
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005234 if(request->input_buffer != NULL){
5235 LOGD("Input request, frame_number %d", frameNumber);
5236 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5237 if (NO_ERROR != rc) {
5238 LOGE("fail to set reproc parameters");
5239 pthread_mutex_unlock(&mMutex);
5240 return rc;
5241 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 }
5243
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005244 // Call request on other streams
5245 uint32_t streams_need_metadata = 0;
5246 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5247 for (size_t i = 0; i < request->num_output_buffers; i++) {
5248 const camera3_stream_buffer_t& output = request->output_buffers[i];
5249 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5250
5251 if (channel == NULL) {
5252 LOGW("invalid channel pointer for stream");
5253 continue;
5254 }
5255
5256 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5257 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5258 output.buffer, request->input_buffer, frameNumber);
5259 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005261 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5262 if (rc < 0) {
5263 LOGE("Fail to request on picture channel");
5264 pthread_mutex_unlock(&mMutex);
5265 return rc;
5266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005267 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005268 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5269 assert(NULL != mDepthChannel);
5270 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005271
Emilian Peev7650c122017-01-19 08:24:33 -08005272 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5273 if (rc < 0) {
5274 LOGE("Fail to map on depth buffer");
5275 pthread_mutex_unlock(&mMutex);
5276 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005277 }
Emilian Peev7650c122017-01-19 08:24:33 -08005278 } else {
5279 LOGD("snapshot request with buffer %p, frame_number %d",
5280 output.buffer, frameNumber);
5281 if (!request->settings) {
5282 rc = channel->request(output.buffer, frameNumber,
5283 NULL, mPrevParameters, indexUsed);
5284 } else {
5285 rc = channel->request(output.buffer, frameNumber,
5286 NULL, mParameters, indexUsed);
5287 }
5288 if (rc < 0) {
5289 LOGE("Fail to request on picture channel");
5290 pthread_mutex_unlock(&mMutex);
5291 return rc;
5292 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005293
Emilian Peev7650c122017-01-19 08:24:33 -08005294 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5295 uint32_t j = 0;
5296 for (j = 0; j < streamsArray.num_streams; j++) {
5297 if (streamsArray.stream_request[j].streamID == streamId) {
5298 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5299 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5300 else
5301 streamsArray.stream_request[j].buf_index = indexUsed;
5302 break;
5303 }
5304 }
5305 if (j == streamsArray.num_streams) {
5306 LOGE("Did not find matching stream to update index");
5307 assert(0);
5308 }
5309
5310 pendingBufferIter->need_metadata = true;
5311 streams_need_metadata++;
5312 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005314 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5315 bool needMetadata = false;
5316 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5317 rc = yuvChannel->request(output.buffer, frameNumber,
5318 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5319 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005321 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 pthread_mutex_unlock(&mMutex);
5323 return rc;
5324 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005325
5326 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5327 uint32_t j = 0;
5328 for (j = 0; j < streamsArray.num_streams; j++) {
5329 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5331 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5332 else
5333 streamsArray.stream_request[j].buf_index = indexUsed;
5334 break;
5335 }
5336 }
5337 if (j == streamsArray.num_streams) {
5338 LOGE("Did not find matching stream to update index");
5339 assert(0);
5340 }
5341
5342 pendingBufferIter->need_metadata = needMetadata;
5343 if (needMetadata)
5344 streams_need_metadata += 1;
5345 LOGD("calling YUV channel request, need_metadata is %d",
5346 needMetadata);
5347 } else {
5348 LOGD("request with buffer %p, frame_number %d",
5349 output.buffer, frameNumber);
5350
5351 rc = channel->request(output.buffer, frameNumber, indexUsed);
5352
5353 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5354 uint32_t j = 0;
5355 for (j = 0; j < streamsArray.num_streams; j++) {
5356 if (streamsArray.stream_request[j].streamID == streamId) {
5357 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5358 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5359 else
5360 streamsArray.stream_request[j].buf_index = indexUsed;
5361 break;
5362 }
5363 }
5364 if (j == streamsArray.num_streams) {
5365 LOGE("Did not find matching stream to update index");
5366 assert(0);
5367 }
5368
5369 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5370 && mBatchSize) {
5371 mToBeQueuedVidBufs++;
5372 if (mToBeQueuedVidBufs == mBatchSize) {
5373 channel->queueBatchBuf();
5374 }
5375 }
5376 if (rc < 0) {
5377 LOGE("request failed");
5378 pthread_mutex_unlock(&mMutex);
5379 return rc;
5380 }
5381 }
5382 pendingBufferIter++;
5383 }
5384
5385 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5386 itr++) {
5387 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5388
5389 if (channel == NULL) {
5390 LOGE("invalid channel pointer for stream");
5391 assert(0);
5392 return BAD_VALUE;
5393 }
5394
5395 InternalRequest requestedStream;
5396 requestedStream = (*itr);
5397
5398
5399 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5400 LOGD("snapshot request internally input buffer %p, frame_number %d",
5401 request->input_buffer, frameNumber);
5402 if(request->input_buffer != NULL){
5403 rc = channel->request(NULL, frameNumber,
5404 pInputBuffer, &mReprocMeta, indexUsed, true,
5405 requestedStream.meteringOnly);
5406 if (rc < 0) {
5407 LOGE("Fail to request on picture channel");
5408 pthread_mutex_unlock(&mMutex);
5409 return rc;
5410 }
5411 } else {
5412 LOGD("snapshot request with frame_number %d", frameNumber);
5413 if (!request->settings) {
5414 rc = channel->request(NULL, frameNumber,
5415 NULL, mPrevParameters, indexUsed, true,
5416 requestedStream.meteringOnly);
5417 } else {
5418 rc = channel->request(NULL, frameNumber,
5419 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5420 }
5421 if (rc < 0) {
5422 LOGE("Fail to request on picture channel");
5423 pthread_mutex_unlock(&mMutex);
5424 return rc;
5425 }
5426
5427 if ((*itr).meteringOnly != 1) {
5428 requestedStream.need_metadata = 1;
5429 streams_need_metadata++;
5430 }
5431 }
5432
5433 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5434 uint32_t j = 0;
5435 for (j = 0; j < streamsArray.num_streams; j++) {
5436 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005437 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5438 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5439 else
5440 streamsArray.stream_request[j].buf_index = indexUsed;
5441 break;
5442 }
5443 }
5444 if (j == streamsArray.num_streams) {
5445 LOGE("Did not find matching stream to update index");
5446 assert(0);
5447 }
5448
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005449 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005450 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005451 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005453 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005454 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005455 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005456
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 //If 2 streams have need_metadata set to true, fail the request, unless
5458 //we copy/reference count the metadata buffer
5459 if (streams_need_metadata > 1) {
5460 LOGE("not supporting request in which two streams requires"
5461 " 2 HAL metadata for reprocessing");
5462 pthread_mutex_unlock(&mMutex);
5463 return -EINVAL;
5464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005465
Emilian Peev7650c122017-01-19 08:24:33 -08005466 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5468 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5469 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5470 pthread_mutex_unlock(&mMutex);
5471 return BAD_VALUE;
5472 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005473 if (request->input_buffer == NULL) {
5474 /* Set the parameters to backend:
5475 * - For every request in NORMAL MODE
5476 * - For every request in HFR mode during preview only case
5477 * - Once every batch in HFR mode during video recording
5478 */
5479 if (!mBatchSize ||
5480 (mBatchSize && !isVidBufRequested) ||
5481 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5482 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5483 mBatchSize, isVidBufRequested,
5484 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005485
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005486 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5487 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5488 uint32_t m = 0;
5489 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5490 if (streamsArray.stream_request[k].streamID ==
5491 mBatchedStreamsArray.stream_request[m].streamID)
5492 break;
5493 }
5494 if (m == mBatchedStreamsArray.num_streams) {
5495 mBatchedStreamsArray.stream_request\
5496 [mBatchedStreamsArray.num_streams].streamID =
5497 streamsArray.stream_request[k].streamID;
5498 mBatchedStreamsArray.stream_request\
5499 [mBatchedStreamsArray.num_streams].buf_index =
5500 streamsArray.stream_request[k].buf_index;
5501 mBatchedStreamsArray.num_streams =
5502 mBatchedStreamsArray.num_streams + 1;
5503 }
5504 }
5505 streamsArray = mBatchedStreamsArray;
5506 }
5507 /* Update stream id of all the requested buffers */
5508 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5509 streamsArray)) {
5510 LOGE("Failed to set stream type mask in the parameters");
5511 return BAD_VALUE;
5512 }
5513
5514 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5515 mParameters);
5516 if (rc < 0) {
5517 LOGE("set_parms failed");
5518 }
5519 /* reset to zero coz, the batch is queued */
5520 mToBeQueuedVidBufs = 0;
5521 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5522 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5523 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005524 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5525 uint32_t m = 0;
5526 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5527 if (streamsArray.stream_request[k].streamID ==
5528 mBatchedStreamsArray.stream_request[m].streamID)
5529 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005530 }
5531 if (m == mBatchedStreamsArray.num_streams) {
5532 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5533 streamID = streamsArray.stream_request[k].streamID;
5534 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5535 buf_index = streamsArray.stream_request[k].buf_index;
5536 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5537 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005538 }
5539 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005540 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 }
5543
5544 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5545
5546 mState = STARTED;
5547 // Added a timed condition wait
5548 struct timespec ts;
5549 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005550 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005551 if (rc < 0) {
5552 isValidTimeout = 0;
5553 LOGE("Error reading the real time clock!!");
5554 }
5555 else {
5556 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 int64_t timeout = 5;
5558 {
5559 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5560 // If there is a pending HDR+ request, the following requests may be blocked until the
5561 // HDR+ request is done. So allow a longer timeout.
5562 if (mHdrPlusPendingRequests.size() > 0) {
5563 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5564 }
5565 }
5566 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005567 }
5568 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005569 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005570 (mState != ERROR) && (mState != DEINIT)) {
5571 if (!isValidTimeout) {
5572 LOGD("Blocking on conditional wait");
5573 pthread_cond_wait(&mRequestCond, &mMutex);
5574 }
5575 else {
5576 LOGD("Blocking on timed conditional wait");
5577 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5578 if (rc == ETIMEDOUT) {
5579 rc = -ENODEV;
5580 LOGE("Unblocked on timeout!!!!");
5581 break;
5582 }
5583 }
5584 LOGD("Unblocked");
5585 if (mWokenUpByDaemon) {
5586 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005587 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 break;
5589 }
5590 }
5591 pthread_mutex_unlock(&mMutex);
5592
5593 return rc;
5594}
5595
5596/*===========================================================================
5597 * FUNCTION : dump
5598 *
5599 * DESCRIPTION:
5600 *
5601 * PARAMETERS :
5602 *
5603 *
5604 * RETURN :
5605 *==========================================================================*/
5606void QCamera3HardwareInterface::dump(int fd)
5607{
5608 pthread_mutex_lock(&mMutex);
5609 dprintf(fd, "\n Camera HAL3 information Begin \n");
5610
5611 dprintf(fd, "\nNumber of pending requests: %zu \n",
5612 mPendingRequestsList.size());
5613 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5614 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5615 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5616 for(pendingRequestIterator i = mPendingRequestsList.begin();
5617 i != mPendingRequestsList.end(); i++) {
5618 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5619 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5620 i->input_buffer);
5621 }
5622 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5623 mPendingBuffersMap.get_num_overall_buffers());
5624 dprintf(fd, "-------+------------------\n");
5625 dprintf(fd, " Frame | Stream type mask \n");
5626 dprintf(fd, "-------+------------------\n");
5627 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5628 for(auto &j : req.mPendingBufferList) {
5629 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5630 dprintf(fd, " %5d | %11d \n",
5631 req.frame_number, channel->getStreamTypeMask());
5632 }
5633 }
5634 dprintf(fd, "-------+------------------\n");
5635
5636 dprintf(fd, "\nPending frame drop list: %zu\n",
5637 mPendingFrameDropList.size());
5638 dprintf(fd, "-------+-----------\n");
5639 dprintf(fd, " Frame | Stream ID \n");
5640 dprintf(fd, "-------+-----------\n");
5641 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5642 i != mPendingFrameDropList.end(); i++) {
5643 dprintf(fd, " %5d | %9d \n",
5644 i->frame_number, i->stream_ID);
5645 }
5646 dprintf(fd, "-------+-----------\n");
5647
5648 dprintf(fd, "\n Camera HAL3 information End \n");
5649
5650 /* use dumpsys media.camera as trigger to send update debug level event */
5651 mUpdateDebugLevel = true;
5652 pthread_mutex_unlock(&mMutex);
5653 return;
5654}
5655
5656/*===========================================================================
5657 * FUNCTION : flush
5658 *
5659 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5660 * conditionally restarts channels
5661 *
5662 * PARAMETERS :
5663 * @ restartChannels: re-start all channels
5664 *
5665 *
5666 * RETURN :
5667 * 0 on success
5668 * Error code on failure
5669 *==========================================================================*/
5670int QCamera3HardwareInterface::flush(bool restartChannels)
5671{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005672 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005673 int32_t rc = NO_ERROR;
5674
5675 LOGD("Unblocking Process Capture Request");
5676 pthread_mutex_lock(&mMutex);
5677 mFlush = true;
5678 pthread_mutex_unlock(&mMutex);
5679
5680 rc = stopAllChannels();
5681 // unlink of dualcam
5682 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005683 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5684 &m_pDualCamCmdPtr->bundle_info;
5685 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005686 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5687 pthread_mutex_lock(&gCamLock);
5688
5689 if (mIsMainCamera == 1) {
5690 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5691 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005692 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005693 // related session id should be session id of linked session
5694 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5695 } else {
5696 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5697 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005698 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5700 }
5701 pthread_mutex_unlock(&gCamLock);
5702
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005703 rc = mCameraHandle->ops->set_dual_cam_cmd(
5704 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005705 if (rc < 0) {
5706 LOGE("Dualcam: Unlink failed, but still proceed to close");
5707 }
5708 }
5709
5710 if (rc < 0) {
5711 LOGE("stopAllChannels failed");
5712 return rc;
5713 }
5714 if (mChannelHandle) {
5715 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5716 mChannelHandle);
5717 }
5718
5719 // Reset bundle info
5720 rc = setBundleInfo();
5721 if (rc < 0) {
5722 LOGE("setBundleInfo failed %d", rc);
5723 return rc;
5724 }
5725
5726 // Mutex Lock
5727 pthread_mutex_lock(&mMutex);
5728
5729 // Unblock process_capture_request
5730 mPendingLiveRequest = 0;
5731 pthread_cond_signal(&mRequestCond);
5732
5733 rc = notifyErrorForPendingRequests();
5734 if (rc < 0) {
5735 LOGE("notifyErrorForPendingRequests failed");
5736 pthread_mutex_unlock(&mMutex);
5737 return rc;
5738 }
5739
5740 mFlush = false;
5741
5742 // Start the Streams/Channels
5743 if (restartChannels) {
5744 rc = startAllChannels();
5745 if (rc < 0) {
5746 LOGE("startAllChannels failed");
5747 pthread_mutex_unlock(&mMutex);
5748 return rc;
5749 }
5750 }
5751
5752 if (mChannelHandle) {
5753 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5754 mChannelHandle);
5755 if (rc < 0) {
5756 LOGE("start_channel failed");
5757 pthread_mutex_unlock(&mMutex);
5758 return rc;
5759 }
5760 }
5761
5762 pthread_mutex_unlock(&mMutex);
5763
5764 return 0;
5765}
5766
5767/*===========================================================================
5768 * FUNCTION : flushPerf
5769 *
5770 * DESCRIPTION: This is the performance optimization version of flush that does
5771 * not use stream off, rather flushes the system
5772 *
5773 * PARAMETERS :
5774 *
5775 *
5776 * RETURN : 0 : success
5777 * -EINVAL: input is malformed (device is not valid)
5778 * -ENODEV: if the device has encountered a serious error
5779 *==========================================================================*/
5780int QCamera3HardwareInterface::flushPerf()
5781{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005782 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005783 int32_t rc = 0;
5784 struct timespec timeout;
5785 bool timed_wait = false;
5786
5787 pthread_mutex_lock(&mMutex);
5788 mFlushPerf = true;
5789 mPendingBuffersMap.numPendingBufsAtFlush =
5790 mPendingBuffersMap.get_num_overall_buffers();
5791 LOGD("Calling flush. Wait for %d buffers to return",
5792 mPendingBuffersMap.numPendingBufsAtFlush);
5793
5794 /* send the flush event to the backend */
5795 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5796 if (rc < 0) {
5797 LOGE("Error in flush: IOCTL failure");
5798 mFlushPerf = false;
5799 pthread_mutex_unlock(&mMutex);
5800 return -ENODEV;
5801 }
5802
5803 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5804 LOGD("No pending buffers in HAL, return flush");
5805 mFlushPerf = false;
5806 pthread_mutex_unlock(&mMutex);
5807 return rc;
5808 }
5809
5810 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005811 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005812 if (rc < 0) {
5813 LOGE("Error reading the real time clock, cannot use timed wait");
5814 } else {
5815 timeout.tv_sec += FLUSH_TIMEOUT;
5816 timed_wait = true;
5817 }
5818
5819 //Block on conditional variable
5820 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5821 LOGD("Waiting on mBuffersCond");
5822 if (!timed_wait) {
5823 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5824 if (rc != 0) {
5825 LOGE("pthread_cond_wait failed due to rc = %s",
5826 strerror(rc));
5827 break;
5828 }
5829 } else {
5830 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5831 if (rc != 0) {
5832 LOGE("pthread_cond_timedwait failed due to rc = %s",
5833 strerror(rc));
5834 break;
5835 }
5836 }
5837 }
5838 if (rc != 0) {
5839 mFlushPerf = false;
5840 pthread_mutex_unlock(&mMutex);
5841 return -ENODEV;
5842 }
5843
5844 LOGD("Received buffers, now safe to return them");
5845
5846 //make sure the channels handle flush
5847 //currently only required for the picture channel to release snapshot resources
5848 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5849 it != mStreamInfo.end(); it++) {
5850 QCamera3Channel *channel = (*it)->channel;
5851 if (channel) {
5852 rc = channel->flush();
5853 if (rc) {
5854 LOGE("Flushing the channels failed with error %d", rc);
5855 // even though the channel flush failed we need to continue and
5856 // return the buffers we have to the framework, however the return
5857 // value will be an error
5858 rc = -ENODEV;
5859 }
5860 }
5861 }
5862
5863 /* notify the frameworks and send errored results */
5864 rc = notifyErrorForPendingRequests();
5865 if (rc < 0) {
5866 LOGE("notifyErrorForPendingRequests failed");
5867 pthread_mutex_unlock(&mMutex);
5868 return rc;
5869 }
5870
5871 //unblock process_capture_request
5872 mPendingLiveRequest = 0;
5873 unblockRequestIfNecessary();
5874
5875 mFlushPerf = false;
5876 pthread_mutex_unlock(&mMutex);
5877 LOGD ("Flush Operation complete. rc = %d", rc);
5878 return rc;
5879}
5880
5881/*===========================================================================
5882 * FUNCTION : handleCameraDeviceError
5883 *
5884 * DESCRIPTION: This function calls internal flush and notifies the error to
5885 * framework and updates the state variable.
5886 *
5887 * PARAMETERS : None
5888 *
5889 * RETURN : NO_ERROR on Success
5890 * Error code on failure
5891 *==========================================================================*/
5892int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5893{
5894 int32_t rc = NO_ERROR;
5895
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005896 {
5897 Mutex::Autolock lock(mFlushLock);
5898 pthread_mutex_lock(&mMutex);
5899 if (mState != ERROR) {
5900 //if mState != ERROR, nothing to be done
5901 pthread_mutex_unlock(&mMutex);
5902 return NO_ERROR;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005905
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005906 rc = flush(false /* restart channels */);
5907 if (NO_ERROR != rc) {
5908 LOGE("internal flush to handle mState = ERROR failed");
5909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005910
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005911 pthread_mutex_lock(&mMutex);
5912 mState = DEINIT;
5913 pthread_mutex_unlock(&mMutex);
5914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005915
5916 camera3_notify_msg_t notify_msg;
5917 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5918 notify_msg.type = CAMERA3_MSG_ERROR;
5919 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5920 notify_msg.message.error.error_stream = NULL;
5921 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005922 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005923
5924 return rc;
5925}
5926
5927/*===========================================================================
5928 * FUNCTION : captureResultCb
5929 *
5930 * DESCRIPTION: Callback handler for all capture result
5931 * (streams, as well as metadata)
5932 *
5933 * PARAMETERS :
5934 * @metadata : metadata information
5935 * @buffer : actual gralloc buffer to be returned to frameworks.
5936 * NULL if metadata.
5937 *
5938 * RETURN : NONE
5939 *==========================================================================*/
5940void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5941 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5942{
5943 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005944 pthread_mutex_lock(&mMutex);
5945 uint8_t batchSize = mBatchSize;
5946 pthread_mutex_unlock(&mMutex);
5947 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005948 handleBatchMetadata(metadata_buf,
5949 true /* free_and_bufdone_meta_buf */);
5950 } else { /* mBatchSize = 0 */
5951 hdrPlusPerfLock(metadata_buf);
5952 pthread_mutex_lock(&mMutex);
5953 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005954 true /* free_and_bufdone_meta_buf */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08005955 false /* first frame of batch metadata */ ,
5956 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07005957 pthread_mutex_unlock(&mMutex);
5958 }
5959 } else if (isInputBuffer) {
5960 pthread_mutex_lock(&mMutex);
5961 handleInputBufferWithLock(frame_number);
5962 pthread_mutex_unlock(&mMutex);
5963 } else {
5964 pthread_mutex_lock(&mMutex);
5965 handleBufferWithLock(buffer, frame_number);
5966 pthread_mutex_unlock(&mMutex);
5967 }
5968 return;
5969}
5970
5971/*===========================================================================
5972 * FUNCTION : getReprocessibleOutputStreamId
5973 *
5974 * DESCRIPTION: Get source output stream id for the input reprocess stream
5975 * based on size and format, which would be the largest
5976 * output stream if an input stream exists.
5977 *
5978 * PARAMETERS :
5979 * @id : return the stream id if found
5980 *
5981 * RETURN : int32_t type of status
5982 * NO_ERROR -- success
5983 * none-zero failure code
5984 *==========================================================================*/
5985int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5986{
5987 /* check if any output or bidirectional stream with the same size and format
5988 and return that stream */
5989 if ((mInputStreamInfo.dim.width > 0) &&
5990 (mInputStreamInfo.dim.height > 0)) {
5991 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5992 it != mStreamInfo.end(); it++) {
5993
5994 camera3_stream_t *stream = (*it)->stream;
5995 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5996 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5997 (stream->format == mInputStreamInfo.format)) {
5998 // Usage flag for an input stream and the source output stream
5999 // may be different.
6000 LOGD("Found reprocessible output stream! %p", *it);
6001 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6002 stream->usage, mInputStreamInfo.usage);
6003
6004 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6005 if (channel != NULL && channel->mStreams[0]) {
6006 id = channel->mStreams[0]->getMyServerID();
6007 return NO_ERROR;
6008 }
6009 }
6010 }
6011 } else {
6012 LOGD("No input stream, so no reprocessible output stream");
6013 }
6014 return NAME_NOT_FOUND;
6015}
6016
6017/*===========================================================================
6018 * FUNCTION : lookupFwkName
6019 *
6020 * DESCRIPTION: In case the enum is not same in fwk and backend
6021 * make sure the parameter is correctly propogated
6022 *
6023 * PARAMETERS :
6024 * @arr : map between the two enums
6025 * @len : len of the map
6026 * @hal_name : name of the hal_parm to map
6027 *
6028 * RETURN : int type of status
6029 * fwk_name -- success
6030 * none-zero failure code
6031 *==========================================================================*/
6032template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6033 size_t len, halType hal_name)
6034{
6035
6036 for (size_t i = 0; i < len; i++) {
6037 if (arr[i].hal_name == hal_name) {
6038 return arr[i].fwk_name;
6039 }
6040 }
6041
6042 /* Not able to find matching framework type is not necessarily
6043 * an error case. This happens when mm-camera supports more attributes
6044 * than the frameworks do */
6045 LOGH("Cannot find matching framework type");
6046 return NAME_NOT_FOUND;
6047}
6048
6049/*===========================================================================
6050 * FUNCTION : lookupHalName
6051 *
6052 * DESCRIPTION: In case the enum is not same in fwk and backend
6053 * make sure the parameter is correctly propogated
6054 *
6055 * PARAMETERS :
6056 * @arr : map between the two enums
6057 * @len : len of the map
6058 * @fwk_name : name of the hal_parm to map
6059 *
6060 * RETURN : int32_t type of status
6061 * hal_name -- success
6062 * none-zero failure code
6063 *==========================================================================*/
6064template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6065 size_t len, fwkType fwk_name)
6066{
6067 for (size_t i = 0; i < len; i++) {
6068 if (arr[i].fwk_name == fwk_name) {
6069 return arr[i].hal_name;
6070 }
6071 }
6072
6073 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6074 return NAME_NOT_FOUND;
6075}
6076
6077/*===========================================================================
6078 * FUNCTION : lookupProp
6079 *
6080 * DESCRIPTION: lookup a value by its name
6081 *
6082 * PARAMETERS :
6083 * @arr : map between the two enums
6084 * @len : size of the map
6085 * @name : name to be looked up
6086 *
6087 * RETURN : Value if found
6088 * CAM_CDS_MODE_MAX if not found
6089 *==========================================================================*/
6090template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6091 size_t len, const char *name)
6092{
6093 if (name) {
6094 for (size_t i = 0; i < len; i++) {
6095 if (!strcmp(arr[i].desc, name)) {
6096 return arr[i].val;
6097 }
6098 }
6099 }
6100 return CAM_CDS_MODE_MAX;
6101}
6102
6103/*===========================================================================
6104 *
6105 * DESCRIPTION:
6106 *
6107 * PARAMETERS :
6108 * @metadata : metadata information from callback
6109 * @timestamp: metadata buffer timestamp
6110 * @request_id: request id
6111 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006112 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006113 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6114 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006115 * @pprocDone: whether internal offline postprocsesing is done
6116 *
6117 * RETURN : camera_metadata_t*
6118 * metadata in a format specified by fwk
6119 *==========================================================================*/
6120camera_metadata_t*
6121QCamera3HardwareInterface::translateFromHalMetadata(
6122 metadata_buffer_t *metadata,
6123 nsecs_t timestamp,
6124 int32_t request_id,
6125 const CameraMetadata& jpegMetadata,
6126 uint8_t pipeline_depth,
6127 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006128 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006129 /* DevCamDebug metadata translateFromHalMetadata argument */
6130 uint8_t DevCamDebug_meta_enable,
6131 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006132 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006133 uint8_t fwk_cacMode,
6134 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006135{
6136 CameraMetadata camMetadata;
6137 camera_metadata_t *resultMetadata;
6138
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006139 if (mBatchSize && !firstMetadataInBatch) {
6140 /* In batch mode, use cached metadata from the first metadata
6141 in the batch */
6142 camMetadata.clear();
6143 camMetadata = mCachedMetadata;
6144 }
6145
Thierry Strudel3d639192016-09-09 11:52:26 -07006146 if (jpegMetadata.entryCount())
6147 camMetadata.append(jpegMetadata);
6148
6149 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6150 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6151 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6152 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006153 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006154 if (mBatchSize == 0) {
6155 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6156 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006158
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006159 if (mBatchSize && !firstMetadataInBatch) {
6160 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6161 resultMetadata = camMetadata.release();
6162 return resultMetadata;
6163 }
6164
Samuel Ha68ba5172016-12-15 18:41:12 -08006165 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6166 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6167 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6168 // DevCamDebug metadata translateFromHalMetadata AF
6169 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6170 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6171 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6172 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6173 }
6174 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6175 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6176 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6177 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6178 }
6179 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6180 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6181 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6182 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6183 }
6184 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6185 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6186 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6187 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6188 }
6189 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6190 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6191 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6192 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6193 }
6194 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6195 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6196 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6197 *DevCamDebug_af_monitor_pdaf_target_pos;
6198 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6199 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6200 }
6201 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6202 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6203 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6204 *DevCamDebug_af_monitor_pdaf_confidence;
6205 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6206 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6207 }
6208 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6209 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6210 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6211 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6212 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6213 }
6214 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6215 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6216 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6217 *DevCamDebug_af_monitor_tof_target_pos;
6218 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6219 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6220 }
6221 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6222 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6223 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6224 *DevCamDebug_af_monitor_tof_confidence;
6225 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6226 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6227 }
6228 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6229 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6230 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6231 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6232 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6233 }
6234 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6235 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6236 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6237 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6238 &fwk_DevCamDebug_af_monitor_type_select, 1);
6239 }
6240 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6241 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6242 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6243 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6244 &fwk_DevCamDebug_af_monitor_refocus, 1);
6245 }
6246 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6247 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6248 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6249 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6250 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6251 }
6252 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6253 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6254 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6255 *DevCamDebug_af_search_pdaf_target_pos;
6256 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6257 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6258 }
6259 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6260 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6261 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6262 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6263 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6264 }
6265 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6266 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6267 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6268 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6269 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6270 }
6271 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6272 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6273 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6274 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6275 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6276 }
6277 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6278 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6279 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6280 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6281 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6282 }
6283 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6284 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6285 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6286 *DevCamDebug_af_search_tof_target_pos;
6287 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6288 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6289 }
6290 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6291 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6292 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6293 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6294 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6295 }
6296 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6297 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6298 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6299 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6300 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6301 }
6302 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6303 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6304 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6305 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6306 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6307 }
6308 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6309 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6310 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6311 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6312 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6313 }
6314 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6315 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6316 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6317 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6318 &fwk_DevCamDebug_af_search_type_select, 1);
6319 }
6320 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6321 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6322 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6323 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6324 &fwk_DevCamDebug_af_search_next_pos, 1);
6325 }
6326 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6327 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6328 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6329 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6330 &fwk_DevCamDebug_af_search_target_pos, 1);
6331 }
6332 // DevCamDebug metadata translateFromHalMetadata AEC
6333 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6334 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6335 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6336 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6337 }
6338 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6339 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6340 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6341 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6342 }
6343 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6344 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6345 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6346 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6347 }
6348 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6349 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6350 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6351 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6352 }
6353 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6354 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6355 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6356 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6357 }
6358 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6359 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6360 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6361 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6362 }
6363 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6364 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6365 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6366 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6367 }
6368 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6369 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6370 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6371 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6372 }
Samuel Ha34229982017-02-17 13:51:11 -08006373 // DevCamDebug metadata translateFromHalMetadata zzHDR
6374 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6375 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6376 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6377 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6378 }
6379 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6380 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
6381 float fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
6382 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6383 }
6384 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6385 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6386 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6387 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6388 }
6389 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6390 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
6391 float fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
6392 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6393 }
6394 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6395 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6396 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6397 *DevCamDebug_aec_hdr_sensitivity_ratio;
6398 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6399 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6400 }
6401 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6402 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6403 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6404 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6405 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6406 }
6407 // DevCamDebug metadata translateFromHalMetadata ADRC
6408 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6409 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6410 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6411 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6412 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6413 }
6414 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6415 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6416 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6417 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6418 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6419 }
6420 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6421 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6422 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6423 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6424 }
6425 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6426 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6427 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6428 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6429 }
6430 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6431 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6432 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6433 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6434 }
6435 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6436 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6437 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6438 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6439 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006440 // DevCamDebug metadata translateFromHalMetadata AWB
6441 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6442 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6443 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6444 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6445 }
6446 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6447 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6448 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6449 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6450 }
6451 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6452 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6453 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6454 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6455 }
6456 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6457 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6458 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6459 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6460 }
6461 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6462 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6463 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6464 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6465 }
6466 }
6467 // atrace_end(ATRACE_TAG_ALWAYS);
6468
Thierry Strudel3d639192016-09-09 11:52:26 -07006469 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6470 int64_t fwk_frame_number = *frame_number;
6471 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6472 }
6473
6474 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6475 int32_t fps_range[2];
6476 fps_range[0] = (int32_t)float_range->min_fps;
6477 fps_range[1] = (int32_t)float_range->max_fps;
6478 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6479 fps_range, 2);
6480 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6481 fps_range[0], fps_range[1]);
6482 }
6483
6484 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6485 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6486 }
6487
6488 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6489 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6490 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6491 *sceneMode);
6492 if (NAME_NOT_FOUND != val) {
6493 uint8_t fwkSceneMode = (uint8_t)val;
6494 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6495 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6496 fwkSceneMode);
6497 }
6498 }
6499
6500 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6501 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6502 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6503 }
6504
6505 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6506 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6507 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6508 }
6509
6510 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6511 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6512 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6513 }
6514
6515 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6516 CAM_INTF_META_EDGE_MODE, metadata) {
6517 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6518 }
6519
6520 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6521 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6522 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6523 }
6524
6525 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6526 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6527 }
6528
6529 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6530 if (0 <= *flashState) {
6531 uint8_t fwk_flashState = (uint8_t) *flashState;
6532 if (!gCamCapability[mCameraId]->flash_available) {
6533 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6534 }
6535 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6536 }
6537 }
6538
6539 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6540 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6541 if (NAME_NOT_FOUND != val) {
6542 uint8_t fwk_flashMode = (uint8_t)val;
6543 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6544 }
6545 }
6546
6547 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6548 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6549 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6550 }
6551
6552 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6553 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6554 }
6555
6556 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6557 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6558 }
6559
6560 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6561 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6562 }
6563
6564 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6565 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6566 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6567 }
6568
6569 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6570 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6571 LOGD("fwk_videoStab = %d", fwk_videoStab);
6572 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6573 } else {
6574 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6575 // and so hardcoding the Video Stab result to OFF mode.
6576 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6577 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006578 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006579 }
6580
6581 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6582 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6583 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6584 }
6585
6586 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6587 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6588 }
6589
Thierry Strudel3d639192016-09-09 11:52:26 -07006590 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6591 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006592 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006593
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006594 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6595 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006596
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006597 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006598 blackLevelAppliedPattern->cam_black_level[0],
6599 blackLevelAppliedPattern->cam_black_level[1],
6600 blackLevelAppliedPattern->cam_black_level[2],
6601 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006602 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6603 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006604
6605#ifndef USE_HAL_3_3
6606 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006607 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6608 // depth space.
6609 fwk_blackLevelInd[0] /= 4.0;
6610 fwk_blackLevelInd[1] /= 4.0;
6611 fwk_blackLevelInd[2] /= 4.0;
6612 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006613 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6614 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006616 }
6617
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006618#ifndef USE_HAL_3_3
6619 // Fixed whitelevel is used by ISP/Sensor
6620 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6621 &gCamCapability[mCameraId]->white_level, 1);
6622#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006623
6624 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6625 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6626 int32_t scalerCropRegion[4];
6627 scalerCropRegion[0] = hScalerCropRegion->left;
6628 scalerCropRegion[1] = hScalerCropRegion->top;
6629 scalerCropRegion[2] = hScalerCropRegion->width;
6630 scalerCropRegion[3] = hScalerCropRegion->height;
6631
6632 // Adjust crop region from sensor output coordinate system to active
6633 // array coordinate system.
6634 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6635 scalerCropRegion[2], scalerCropRegion[3]);
6636
6637 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6638 }
6639
6640 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6641 LOGD("sensorExpTime = %lld", *sensorExpTime);
6642 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6643 }
6644
6645 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6646 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6647 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6648 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6649 }
6650
6651 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6652 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6653 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6654 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6655 sensorRollingShutterSkew, 1);
6656 }
6657
6658 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6659 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6660 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6661
6662 //calculate the noise profile based on sensitivity
6663 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6664 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6665 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6666 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6667 noise_profile[i] = noise_profile_S;
6668 noise_profile[i+1] = noise_profile_O;
6669 }
6670 LOGD("noise model entry (S, O) is (%f, %f)",
6671 noise_profile_S, noise_profile_O);
6672 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6673 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6674 }
6675
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006676#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006677 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006678 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006679 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006680 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006681 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6682 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6683 }
6684 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006685#endif
6686
Thierry Strudel3d639192016-09-09 11:52:26 -07006687 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6688 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6689 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6690 }
6691
6692 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6693 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6694 *faceDetectMode);
6695 if (NAME_NOT_FOUND != val) {
6696 uint8_t fwk_faceDetectMode = (uint8_t)val;
6697 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6698
6699 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6700 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6701 CAM_INTF_META_FACE_DETECTION, metadata) {
6702 uint8_t numFaces = MIN(
6703 faceDetectionInfo->num_faces_detected, MAX_ROI);
6704 int32_t faceIds[MAX_ROI];
6705 uint8_t faceScores[MAX_ROI];
6706 int32_t faceRectangles[MAX_ROI * 4];
6707 int32_t faceLandmarks[MAX_ROI * 6];
6708 size_t j = 0, k = 0;
6709
6710 for (size_t i = 0; i < numFaces; i++) {
6711 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6712 // Adjust crop region from sensor output coordinate system to active
6713 // array coordinate system.
6714 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6715 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6716 rect.width, rect.height);
6717
6718 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6719 faceRectangles+j, -1);
6720
6721 j+= 4;
6722 }
6723 if (numFaces <= 0) {
6724 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6725 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6726 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6727 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6728 }
6729
6730 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6731 numFaces);
6732 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6733 faceRectangles, numFaces * 4U);
6734 if (fwk_faceDetectMode ==
6735 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6736 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6737 CAM_INTF_META_FACE_LANDMARK, metadata) {
6738
6739 for (size_t i = 0; i < numFaces; i++) {
6740 // Map the co-ordinate sensor output coordinate system to active
6741 // array coordinate system.
6742 mCropRegionMapper.toActiveArray(
6743 landmarks->face_landmarks[i].left_eye_center.x,
6744 landmarks->face_landmarks[i].left_eye_center.y);
6745 mCropRegionMapper.toActiveArray(
6746 landmarks->face_landmarks[i].right_eye_center.x,
6747 landmarks->face_landmarks[i].right_eye_center.y);
6748 mCropRegionMapper.toActiveArray(
6749 landmarks->face_landmarks[i].mouth_center.x,
6750 landmarks->face_landmarks[i].mouth_center.y);
6751
6752 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006753 k+= TOTAL_LANDMARK_INDICES;
6754 }
6755 } else {
6756 for (size_t i = 0; i < numFaces; i++) {
6757 setInvalidLandmarks(faceLandmarks+k);
6758 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006759 }
6760 }
6761
6762 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6763 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6764 faceLandmarks, numFaces * 6U);
6765 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08006766 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
6767 CAM_INTF_META_FACE_BLINK, metadata) {
6768 uint8_t detected[MAX_ROI];
6769 uint8_t degree[MAX_ROI * 2];
6770 for (size_t i = 0; i < numFaces; i++) {
6771 detected[i] = blinks->blink[i].blink_detected;
6772 degree[2 * i] = blinks->blink[i].left_blink;
6773 degree[2 * i + 1] = blinks->blink[i].right_blink;
6774 }
6775 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
6776 detected, numFaces);
6777 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
6778 degree, numFaces * 2);
6779 }
6780 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
6781 CAM_INTF_META_FACE_SMILE, metadata) {
6782 uint8_t degree[MAX_ROI];
6783 uint8_t confidence[MAX_ROI];
6784 for (size_t i = 0; i < numFaces; i++) {
6785 degree[i] = smiles->smile[i].smile_degree;
6786 confidence[i] = smiles->smile[i].smile_confidence;
6787 }
6788 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
6789 degree, numFaces);
6790 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
6791 confidence, numFaces);
6792 }
6793 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
6794 CAM_INTF_META_FACE_GAZE, metadata) {
6795 int8_t angle[MAX_ROI];
6796 int32_t direction[MAX_ROI * 3];
6797 int8_t degree[MAX_ROI * 2];
6798 for (size_t i = 0; i < numFaces; i++) {
6799 angle[i] = gazes->gaze[i].gaze_angle;
6800 direction[3 * i] = gazes->gaze[i].updown_dir;
6801 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
6802 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
6803 degree[2 * i] = gazes->gaze[i].left_right_gaze;
6804 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
6805 }
6806 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
6807 (uint8_t *)angle, numFaces);
6808 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
6809 direction, numFaces * 3);
6810 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
6811 (uint8_t *)degree, numFaces * 2);
6812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006813 }
6814 }
6815 }
6816 }
6817
6818 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6819 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006820 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006821
Thierry Strudel54dc9782017-02-15 12:12:10 -08006822 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006823 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6824 // process histogram statistics info
Thierry Strudel54dc9782017-02-15 12:12:10 -08006825 uint32_t hist_buf[4][CAM_HISTOGRAM_STATS_SIZE];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006826 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006827 cam_histogram_data_t rHistData, grHistData, gbHistData, bHistData;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006828 memset(&rHistData, 0, sizeof(rHistData));
Thierry Strudel54dc9782017-02-15 12:12:10 -08006829 memset(&grHistData, 0, sizeof(grHistData));
6830 memset(&gbHistData, 0, sizeof(gbHistData));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006831 memset(&bHistData, 0, sizeof(bHistData));
6832
6833 switch (stats_data->type) {
6834 case CAM_HISTOGRAM_TYPE_BAYER:
6835 switch (stats_data->bayer_stats.data_type) {
6836 case CAM_STATS_CHANNEL_GR:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006837 rHistData = grHistData = gbHistData = bHistData =
6838 stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006839 break;
6840 case CAM_STATS_CHANNEL_GB:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006841 rHistData = grHistData = gbHistData = bHistData =
6842 stats_data->bayer_stats.gb_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006843 break;
6844 case CAM_STATS_CHANNEL_B:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006845 rHistData = grHistData = gbHistData = bHistData =
6846 stats_data->bayer_stats.b_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006847 break;
6848 case CAM_STATS_CHANNEL_ALL:
6849 rHistData = stats_data->bayer_stats.r_stats;
Thierry Strudel54dc9782017-02-15 12:12:10 -08006850 gbHistData = stats_data->bayer_stats.gb_stats;
6851 grHistData = stats_data->bayer_stats.gr_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006852 bHistData = stats_data->bayer_stats.b_stats;
6853 break;
6854 case CAM_STATS_CHANNEL_Y:
6855 case CAM_STATS_CHANNEL_R:
6856 default:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006857 rHistData = grHistData = gbHistData = bHistData =
6858 stats_data->bayer_stats.r_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006859 break;
6860 }
6861 break;
6862 case CAM_HISTOGRAM_TYPE_YUV:
Thierry Strudel54dc9782017-02-15 12:12:10 -08006863 rHistData = grHistData = gbHistData = bHistData =
6864 stats_data->yuv_stats;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006865 break;
6866 }
6867
6868 memcpy(hist_buf, rHistData.hist_buf, hist_size);
Thierry Strudel54dc9782017-02-15 12:12:10 -08006869 memcpy(hist_buf[1], gbHistData.hist_buf, hist_size);
6870 memcpy(hist_buf[2], grHistData.hist_buf, hist_size);
6871 memcpy(hist_buf[3], bHistData.hist_buf, hist_size);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006872
Thierry Strudel54dc9782017-02-15 12:12:10 -08006873 camMetadata.update(QCAMERA3_HISTOGRAM_STATS, (int32_t*)hist_buf, hist_size*4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006874 }
6875 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006876 }
6877
6878 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6879 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6880 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6881 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6882 }
6883
6884 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6885 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6886 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6887 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6888 }
6889
6890 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6891 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6892 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6893 CAM_MAX_SHADING_MAP_HEIGHT);
6894 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6895 CAM_MAX_SHADING_MAP_WIDTH);
6896 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6897 lensShadingMap->lens_shading, 4U * map_width * map_height);
6898 }
6899
6900 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6901 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6902 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6903 }
6904
6905 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6906 //Populate CAM_INTF_META_TONEMAP_CURVES
6907 /* ch0 = G, ch 1 = B, ch 2 = R*/
6908 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6909 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6910 tonemap->tonemap_points_cnt,
6911 CAM_MAX_TONEMAP_CURVE_SIZE);
6912 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6913 }
6914
6915 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6916 &tonemap->curves[0].tonemap_points[0][0],
6917 tonemap->tonemap_points_cnt * 2);
6918
6919 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6920 &tonemap->curves[1].tonemap_points[0][0],
6921 tonemap->tonemap_points_cnt * 2);
6922
6923 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6924 &tonemap->curves[2].tonemap_points[0][0],
6925 tonemap->tonemap_points_cnt * 2);
6926 }
6927
6928 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6929 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6930 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6931 CC_GAIN_MAX);
6932 }
6933
6934 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6935 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6936 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6937 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6938 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6939 }
6940
6941 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6942 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6943 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6944 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6945 toneCurve->tonemap_points_cnt,
6946 CAM_MAX_TONEMAP_CURVE_SIZE);
6947 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6948 }
6949 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6950 (float*)toneCurve->curve.tonemap_points,
6951 toneCurve->tonemap_points_cnt * 2);
6952 }
6953
6954 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6955 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6956 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6957 predColorCorrectionGains->gains, 4);
6958 }
6959
6960 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6961 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6962 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6963 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6964 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6965 }
6966
6967 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6968 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6969 }
6970
6971 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6972 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6973 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6974 }
6975
6976 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6977 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6978 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6979 }
6980
6981 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6982 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6983 *effectMode);
6984 if (NAME_NOT_FOUND != val) {
6985 uint8_t fwk_effectMode = (uint8_t)val;
6986 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6987 }
6988 }
6989
6990 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6991 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6992 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6993 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6994 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6995 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6996 }
6997 int32_t fwk_testPatternData[4];
6998 fwk_testPatternData[0] = testPatternData->r;
6999 fwk_testPatternData[3] = testPatternData->b;
7000 switch (gCamCapability[mCameraId]->color_arrangement) {
7001 case CAM_FILTER_ARRANGEMENT_RGGB:
7002 case CAM_FILTER_ARRANGEMENT_GRBG:
7003 fwk_testPatternData[1] = testPatternData->gr;
7004 fwk_testPatternData[2] = testPatternData->gb;
7005 break;
7006 case CAM_FILTER_ARRANGEMENT_GBRG:
7007 case CAM_FILTER_ARRANGEMENT_BGGR:
7008 fwk_testPatternData[2] = testPatternData->gr;
7009 fwk_testPatternData[1] = testPatternData->gb;
7010 break;
7011 default:
7012 LOGE("color arrangement %d is not supported",
7013 gCamCapability[mCameraId]->color_arrangement);
7014 break;
7015 }
7016 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7017 }
7018
7019 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7020 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7021 }
7022
7023 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7024 String8 str((const char *)gps_methods);
7025 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7026 }
7027
7028 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7029 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7030 }
7031
7032 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7033 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7034 }
7035
7036 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7037 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7038 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7039 }
7040
7041 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7042 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7043 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7044 }
7045
7046 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7047 int32_t fwk_thumb_size[2];
7048 fwk_thumb_size[0] = thumb_size->width;
7049 fwk_thumb_size[1] = thumb_size->height;
7050 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7051 }
7052
7053 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7054 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7055 privateData,
7056 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7057 }
7058
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007059 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007060 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007061 meteringMode, 1);
7062 }
7063
Thierry Strudel54dc9782017-02-15 12:12:10 -08007064 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7065 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7066 LOGD("hdr_scene_data: %d %f\n",
7067 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7068 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7069 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7070 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7071 &isHdr, 1);
7072 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7073 &isHdrConfidence, 1);
7074 }
7075
7076
7077
Thierry Strudel3d639192016-09-09 11:52:26 -07007078 if (metadata->is_tuning_params_valid) {
7079 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7080 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7081 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7082
7083
7084 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7085 sizeof(uint32_t));
7086 data += sizeof(uint32_t);
7087
7088 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7089 sizeof(uint32_t));
7090 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7091 data += sizeof(uint32_t);
7092
7093 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7094 sizeof(uint32_t));
7095 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7096 data += sizeof(uint32_t);
7097
7098 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7099 sizeof(uint32_t));
7100 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7101 data += sizeof(uint32_t);
7102
7103 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7104 sizeof(uint32_t));
7105 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7106 data += sizeof(uint32_t);
7107
7108 metadata->tuning_params.tuning_mod3_data_size = 0;
7109 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7110 sizeof(uint32_t));
7111 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7112 data += sizeof(uint32_t);
7113
7114 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7115 TUNING_SENSOR_DATA_MAX);
7116 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7117 count);
7118 data += count;
7119
7120 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7121 TUNING_VFE_DATA_MAX);
7122 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7123 count);
7124 data += count;
7125
7126 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7127 TUNING_CPP_DATA_MAX);
7128 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7129 count);
7130 data += count;
7131
7132 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7133 TUNING_CAC_DATA_MAX);
7134 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7135 count);
7136 data += count;
7137
7138 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7139 (int32_t *)(void *)tuning_meta_data_blob,
7140 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7141 }
7142
7143 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7144 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7145 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7146 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7147 NEUTRAL_COL_POINTS);
7148 }
7149
7150 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7151 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7152 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7153 }
7154
7155 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7156 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7157 // Adjust crop region from sensor output coordinate system to active
7158 // array coordinate system.
7159 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7160 hAeRegions->rect.width, hAeRegions->rect.height);
7161
7162 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7163 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7164 REGIONS_TUPLE_COUNT);
7165 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7166 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7167 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7168 hAeRegions->rect.height);
7169 }
7170
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007171 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7172 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7173 if (NAME_NOT_FOUND != val) {
7174 uint8_t fwkAfMode = (uint8_t)val;
7175 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7176 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7177 } else {
7178 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7179 val);
7180 }
7181 }
7182
Thierry Strudel3d639192016-09-09 11:52:26 -07007183 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7184 uint8_t fwk_afState = (uint8_t) *afState;
7185 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007186 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007187 }
7188
7189 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7190 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7191 }
7192
7193 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7194 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7195 }
7196
7197 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7198 uint8_t fwk_lensState = *lensState;
7199 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7200 }
7201
7202 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7203 /*af regions*/
7204 int32_t afRegions[REGIONS_TUPLE_COUNT];
7205 // Adjust crop region from sensor output coordinate system to active
7206 // array coordinate system.
7207 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7208 hAfRegions->rect.width, hAfRegions->rect.height);
7209
7210 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7211 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7212 REGIONS_TUPLE_COUNT);
7213 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7214 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7215 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7216 hAfRegions->rect.height);
7217 }
7218
7219 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007220 uint32_t ab_mode = *hal_ab_mode;
7221 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7222 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7223 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7224 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007225 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007226 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007227 if (NAME_NOT_FOUND != val) {
7228 uint8_t fwk_ab_mode = (uint8_t)val;
7229 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7230 }
7231 }
7232
7233 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7234 int val = lookupFwkName(SCENE_MODES_MAP,
7235 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7236 if (NAME_NOT_FOUND != val) {
7237 uint8_t fwkBestshotMode = (uint8_t)val;
7238 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7239 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7240 } else {
7241 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7242 }
7243 }
7244
7245 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7246 uint8_t fwk_mode = (uint8_t) *mode;
7247 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7248 }
7249
7250 /* Constant metadata values to be update*/
7251 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7252 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7253
7254 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7255 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7256
7257 int32_t hotPixelMap[2];
7258 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7259
7260 // CDS
7261 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7262 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7263 }
7264
Thierry Strudel04e026f2016-10-10 11:27:36 -07007265 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7266 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007267 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007268 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7269 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7270 } else {
7271 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7272 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007273
7274 if(fwk_hdr != curr_hdr_state) {
7275 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7276 if(fwk_hdr)
7277 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7278 else
7279 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7280 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007281 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7282 }
7283
Thierry Strudel54dc9782017-02-15 12:12:10 -08007284 //binning correction
7285 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7286 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7287 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7288 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7289 }
7290
Thierry Strudel04e026f2016-10-10 11:27:36 -07007291 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007292 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007293 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7294 int8_t is_ir_on = 0;
7295
7296 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7297 if(is_ir_on != curr_ir_state) {
7298 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7299 if(is_ir_on)
7300 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7301 else
7302 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7303 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007304 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007305 }
7306
Thierry Strudel269c81a2016-10-12 12:13:59 -07007307 // AEC SPEED
7308 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7309 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7310 }
7311
7312 // AWB SPEED
7313 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7314 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7315 }
7316
Thierry Strudel3d639192016-09-09 11:52:26 -07007317 // TNR
7318 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7319 uint8_t tnr_enable = tnr->denoise_enable;
7320 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007321 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7322 int8_t is_tnr_on = 0;
7323
7324 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7325 if(is_tnr_on != curr_tnr_state) {
7326 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7327 if(is_tnr_on)
7328 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7329 else
7330 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7331 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007332
7333 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7334 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7335 }
7336
7337 // Reprocess crop data
7338 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7339 uint8_t cnt = crop_data->num_of_streams;
7340 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7341 // mm-qcamera-daemon only posts crop_data for streams
7342 // not linked to pproc. So no valid crop metadata is not
7343 // necessarily an error case.
7344 LOGD("No valid crop metadata entries");
7345 } else {
7346 uint32_t reproc_stream_id;
7347 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7348 LOGD("No reprocessible stream found, ignore crop data");
7349 } else {
7350 int rc = NO_ERROR;
7351 Vector<int32_t> roi_map;
7352 int32_t *crop = new int32_t[cnt*4];
7353 if (NULL == crop) {
7354 rc = NO_MEMORY;
7355 }
7356 if (NO_ERROR == rc) {
7357 int32_t streams_found = 0;
7358 for (size_t i = 0; i < cnt; i++) {
7359 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7360 if (pprocDone) {
7361 // HAL already does internal reprocessing,
7362 // either via reprocessing before JPEG encoding,
7363 // or offline postprocessing for pproc bypass case.
7364 crop[0] = 0;
7365 crop[1] = 0;
7366 crop[2] = mInputStreamInfo.dim.width;
7367 crop[3] = mInputStreamInfo.dim.height;
7368 } else {
7369 crop[0] = crop_data->crop_info[i].crop.left;
7370 crop[1] = crop_data->crop_info[i].crop.top;
7371 crop[2] = crop_data->crop_info[i].crop.width;
7372 crop[3] = crop_data->crop_info[i].crop.height;
7373 }
7374 roi_map.add(crop_data->crop_info[i].roi_map.left);
7375 roi_map.add(crop_data->crop_info[i].roi_map.top);
7376 roi_map.add(crop_data->crop_info[i].roi_map.width);
7377 roi_map.add(crop_data->crop_info[i].roi_map.height);
7378 streams_found++;
7379 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7380 crop[0], crop[1], crop[2], crop[3]);
7381 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7382 crop_data->crop_info[i].roi_map.left,
7383 crop_data->crop_info[i].roi_map.top,
7384 crop_data->crop_info[i].roi_map.width,
7385 crop_data->crop_info[i].roi_map.height);
7386 break;
7387
7388 }
7389 }
7390 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7391 &streams_found, 1);
7392 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7393 crop, (size_t)(streams_found * 4));
7394 if (roi_map.array()) {
7395 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7396 roi_map.array(), roi_map.size());
7397 }
7398 }
7399 if (crop) {
7400 delete [] crop;
7401 }
7402 }
7403 }
7404 }
7405
7406 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7407 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7408 // so hardcoding the CAC result to OFF mode.
7409 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7410 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7411 } else {
7412 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7413 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7414 *cacMode);
7415 if (NAME_NOT_FOUND != val) {
7416 uint8_t resultCacMode = (uint8_t)val;
7417 // check whether CAC result from CB is equal to Framework set CAC mode
7418 // If not equal then set the CAC mode came in corresponding request
7419 if (fwk_cacMode != resultCacMode) {
7420 resultCacMode = fwk_cacMode;
7421 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007422 //Check if CAC is disabled by property
7423 if (m_cacModeDisabled) {
7424 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7425 }
7426
Thierry Strudel3d639192016-09-09 11:52:26 -07007427 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7428 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7429 } else {
7430 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7431 }
7432 }
7433 }
7434
7435 // Post blob of cam_cds_data through vendor tag.
7436 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7437 uint8_t cnt = cdsInfo->num_of_streams;
7438 cam_cds_data_t cdsDataOverride;
7439 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7440 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7441 cdsDataOverride.num_of_streams = 1;
7442 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7443 uint32_t reproc_stream_id;
7444 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7445 LOGD("No reprocessible stream found, ignore cds data");
7446 } else {
7447 for (size_t i = 0; i < cnt; i++) {
7448 if (cdsInfo->cds_info[i].stream_id ==
7449 reproc_stream_id) {
7450 cdsDataOverride.cds_info[0].cds_enable =
7451 cdsInfo->cds_info[i].cds_enable;
7452 break;
7453 }
7454 }
7455 }
7456 } else {
7457 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7458 }
7459 camMetadata.update(QCAMERA3_CDS_INFO,
7460 (uint8_t *)&cdsDataOverride,
7461 sizeof(cam_cds_data_t));
7462 }
7463
7464 // Ldaf calibration data
7465 if (!mLdafCalibExist) {
7466 IF_META_AVAILABLE(uint32_t, ldafCalib,
7467 CAM_INTF_META_LDAF_EXIF, metadata) {
7468 mLdafCalibExist = true;
7469 mLdafCalib[0] = ldafCalib[0];
7470 mLdafCalib[1] = ldafCalib[1];
7471 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7472 ldafCalib[0], ldafCalib[1]);
7473 }
7474 }
7475
Thierry Strudel54dc9782017-02-15 12:12:10 -08007476 // EXIF debug data through vendor tag
7477 /*
7478 * Mobicat Mask can assume 3 values:
7479 * 1 refers to Mobicat data,
7480 * 2 refers to Stats Debug and Exif Debug Data
7481 * 3 refers to Mobicat and Stats Debug Data
7482 * We want to make sure that we are sending Exif debug data
7483 * only when Mobicat Mask is 2.
7484 */
7485 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7486 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7487 (uint8_t *)(void *)mExifParams.debug_params,
7488 sizeof(mm_jpeg_debug_exif_params_t));
7489 }
7490
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007491 // Reprocess and DDM debug data through vendor tag
7492 cam_reprocess_info_t repro_info;
7493 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007494 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7495 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007496 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007497 }
7498 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7499 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007500 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007501 }
7502 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7503 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007504 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007505 }
7506 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7507 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007508 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007509 }
7510 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7511 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007512 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007513 }
7514 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007515 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007516 }
7517 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7518 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007519 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007520 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007521 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7522 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7523 }
7524 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7525 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7526 }
7527 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7528 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007529
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007530 // INSTANT AEC MODE
7531 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7532 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7533 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7534 }
7535
Shuzhen Wange763e802016-03-31 10:24:29 -07007536 // AF scene change
7537 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7538 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7539 }
7540
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007541 /* In batch mode, cache the first metadata in the batch */
7542 if (mBatchSize && firstMetadataInBatch) {
7543 mCachedMetadata.clear();
7544 mCachedMetadata = camMetadata;
7545 }
7546
Thierry Strudel3d639192016-09-09 11:52:26 -07007547 resultMetadata = camMetadata.release();
7548 return resultMetadata;
7549}
7550
7551/*===========================================================================
7552 * FUNCTION : saveExifParams
7553 *
7554 * DESCRIPTION:
7555 *
7556 * PARAMETERS :
7557 * @metadata : metadata information from callback
7558 *
7559 * RETURN : none
7560 *
7561 *==========================================================================*/
7562void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7563{
7564 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7565 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7566 if (mExifParams.debug_params) {
7567 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7568 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7569 }
7570 }
7571 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7572 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7573 if (mExifParams.debug_params) {
7574 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7575 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7576 }
7577 }
7578 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7579 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7580 if (mExifParams.debug_params) {
7581 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7582 mExifParams.debug_params->af_debug_params_valid = TRUE;
7583 }
7584 }
7585 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7586 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7587 if (mExifParams.debug_params) {
7588 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7589 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7590 }
7591 }
7592 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7593 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7594 if (mExifParams.debug_params) {
7595 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7596 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7597 }
7598 }
7599 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7600 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7601 if (mExifParams.debug_params) {
7602 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7603 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7604 }
7605 }
7606 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7607 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7608 if (mExifParams.debug_params) {
7609 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7610 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7611 }
7612 }
7613 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7614 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7615 if (mExifParams.debug_params) {
7616 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7617 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7618 }
7619 }
7620}
7621
7622/*===========================================================================
7623 * FUNCTION : get3AExifParams
7624 *
7625 * DESCRIPTION:
7626 *
7627 * PARAMETERS : none
7628 *
7629 *
7630 * RETURN : mm_jpeg_exif_params_t
7631 *
7632 *==========================================================================*/
7633mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7634{
7635 return mExifParams;
7636}
7637
7638/*===========================================================================
7639 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7640 *
7641 * DESCRIPTION:
7642 *
7643 * PARAMETERS :
7644 * @metadata : metadata information from callback
7645 *
7646 * RETURN : camera_metadata_t*
7647 * metadata in a format specified by fwk
7648 *==========================================================================*/
7649camera_metadata_t*
7650QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7651 (metadata_buffer_t *metadata)
7652{
7653 CameraMetadata camMetadata;
7654 camera_metadata_t *resultMetadata;
7655
7656
7657 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7658 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7659 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7660 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7661 }
7662
7663 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7664 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7665 &aecTrigger->trigger, 1);
7666 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7667 &aecTrigger->trigger_id, 1);
7668 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7669 aecTrigger->trigger);
7670 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7671 aecTrigger->trigger_id);
7672 }
7673
7674 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7675 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7676 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7677 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7678 }
7679
Thierry Strudel3d639192016-09-09 11:52:26 -07007680 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7681 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7682 &af_trigger->trigger, 1);
7683 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7684 af_trigger->trigger);
7685 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7686 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7687 af_trigger->trigger_id);
7688 }
7689
7690 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7691 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7692 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7693 if (NAME_NOT_FOUND != val) {
7694 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7695 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7696 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7697 } else {
7698 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7699 }
7700 }
7701
7702 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7703 uint32_t aeMode = CAM_AE_MODE_MAX;
7704 int32_t flashMode = CAM_FLASH_MODE_MAX;
7705 int32_t redeye = -1;
7706 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7707 aeMode = *pAeMode;
7708 }
7709 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7710 flashMode = *pFlashMode;
7711 }
7712 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7713 redeye = *pRedeye;
7714 }
7715
7716 if (1 == redeye) {
7717 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7718 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7719 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7720 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7721 flashMode);
7722 if (NAME_NOT_FOUND != val) {
7723 fwk_aeMode = (uint8_t)val;
7724 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7725 } else {
7726 LOGE("Unsupported flash mode %d", flashMode);
7727 }
7728 } else if (aeMode == CAM_AE_MODE_ON) {
7729 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7730 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7731 } else if (aeMode == CAM_AE_MODE_OFF) {
7732 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7733 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7734 } else {
7735 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7736 "flashMode:%d, aeMode:%u!!!",
7737 redeye, flashMode, aeMode);
7738 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007739 if (mInstantAEC) {
7740 // Increment frame Idx count untill a bound reached for instant AEC.
7741 mInstantAecFrameIdxCount++;
7742 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7743 CAM_INTF_META_AEC_INFO, metadata) {
7744 LOGH("ae_params->settled = %d",ae_params->settled);
7745 // If AEC settled, or if number of frames reached bound value,
7746 // should reset instant AEC.
7747 if (ae_params->settled ||
7748 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7749 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7750 mInstantAEC = false;
7751 mResetInstantAEC = true;
7752 mInstantAecFrameIdxCount = 0;
7753 }
7754 }
7755 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007756 resultMetadata = camMetadata.release();
7757 return resultMetadata;
7758}
7759
7760/*===========================================================================
7761 * FUNCTION : dumpMetadataToFile
7762 *
7763 * DESCRIPTION: Dumps tuning metadata to file system
7764 *
7765 * PARAMETERS :
7766 * @meta : tuning metadata
7767 * @dumpFrameCount : current dump frame count
7768 * @enabled : Enable mask
7769 *
7770 *==========================================================================*/
7771void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7772 uint32_t &dumpFrameCount,
7773 bool enabled,
7774 const char *type,
7775 uint32_t frameNumber)
7776{
7777 //Some sanity checks
7778 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7779 LOGE("Tuning sensor data size bigger than expected %d: %d",
7780 meta.tuning_sensor_data_size,
7781 TUNING_SENSOR_DATA_MAX);
7782 return;
7783 }
7784
7785 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7786 LOGE("Tuning VFE data size bigger than expected %d: %d",
7787 meta.tuning_vfe_data_size,
7788 TUNING_VFE_DATA_MAX);
7789 return;
7790 }
7791
7792 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7793 LOGE("Tuning CPP data size bigger than expected %d: %d",
7794 meta.tuning_cpp_data_size,
7795 TUNING_CPP_DATA_MAX);
7796 return;
7797 }
7798
7799 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7800 LOGE("Tuning CAC data size bigger than expected %d: %d",
7801 meta.tuning_cac_data_size,
7802 TUNING_CAC_DATA_MAX);
7803 return;
7804 }
7805 //
7806
7807 if(enabled){
7808 char timeBuf[FILENAME_MAX];
7809 char buf[FILENAME_MAX];
7810 memset(buf, 0, sizeof(buf));
7811 memset(timeBuf, 0, sizeof(timeBuf));
7812 time_t current_time;
7813 struct tm * timeinfo;
7814 time (&current_time);
7815 timeinfo = localtime (&current_time);
7816 if (timeinfo != NULL) {
7817 strftime (timeBuf, sizeof(timeBuf),
7818 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7819 }
7820 String8 filePath(timeBuf);
7821 snprintf(buf,
7822 sizeof(buf),
7823 "%dm_%s_%d.bin",
7824 dumpFrameCount,
7825 type,
7826 frameNumber);
7827 filePath.append(buf);
7828 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7829 if (file_fd >= 0) {
7830 ssize_t written_len = 0;
7831 meta.tuning_data_version = TUNING_DATA_VERSION;
7832 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7833 written_len += write(file_fd, data, sizeof(uint32_t));
7834 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7835 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7836 written_len += write(file_fd, data, sizeof(uint32_t));
7837 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7838 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7839 written_len += write(file_fd, data, sizeof(uint32_t));
7840 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7841 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7842 written_len += write(file_fd, data, sizeof(uint32_t));
7843 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7844 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7845 written_len += write(file_fd, data, sizeof(uint32_t));
7846 meta.tuning_mod3_data_size = 0;
7847 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7848 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7849 written_len += write(file_fd, data, sizeof(uint32_t));
7850 size_t total_size = meta.tuning_sensor_data_size;
7851 data = (void *)((uint8_t *)&meta.data);
7852 written_len += write(file_fd, data, total_size);
7853 total_size = meta.tuning_vfe_data_size;
7854 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7855 written_len += write(file_fd, data, total_size);
7856 total_size = meta.tuning_cpp_data_size;
7857 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7858 written_len += write(file_fd, data, total_size);
7859 total_size = meta.tuning_cac_data_size;
7860 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7861 written_len += write(file_fd, data, total_size);
7862 close(file_fd);
7863 }else {
7864 LOGE("fail to open file for metadata dumping");
7865 }
7866 }
7867}
7868
7869/*===========================================================================
7870 * FUNCTION : cleanAndSortStreamInfo
7871 *
7872 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7873 * and sort them such that raw stream is at the end of the list
7874 * This is a workaround for camera daemon constraint.
7875 *
7876 * PARAMETERS : None
7877 *
7878 *==========================================================================*/
7879void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7880{
7881 List<stream_info_t *> newStreamInfo;
7882
7883 /*clean up invalid streams*/
7884 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7885 it != mStreamInfo.end();) {
7886 if(((*it)->status) == INVALID){
7887 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7888 delete channel;
7889 free(*it);
7890 it = mStreamInfo.erase(it);
7891 } else {
7892 it++;
7893 }
7894 }
7895
7896 // Move preview/video/callback/snapshot streams into newList
7897 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7898 it != mStreamInfo.end();) {
7899 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7900 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7901 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7902 newStreamInfo.push_back(*it);
7903 it = mStreamInfo.erase(it);
7904 } else
7905 it++;
7906 }
7907 // Move raw streams into newList
7908 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7909 it != mStreamInfo.end();) {
7910 newStreamInfo.push_back(*it);
7911 it = mStreamInfo.erase(it);
7912 }
7913
7914 mStreamInfo = newStreamInfo;
7915}
7916
7917/*===========================================================================
7918 * FUNCTION : extractJpegMetadata
7919 *
7920 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7921 * JPEG metadata is cached in HAL, and return as part of capture
7922 * result when metadata is returned from camera daemon.
7923 *
7924 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7925 * @request: capture request
7926 *
7927 *==========================================================================*/
7928void QCamera3HardwareInterface::extractJpegMetadata(
7929 CameraMetadata& jpegMetadata,
7930 const camera3_capture_request_t *request)
7931{
7932 CameraMetadata frame_settings;
7933 frame_settings = request->settings;
7934
7935 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7936 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7937 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7938 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7939
7940 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7941 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7942 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7943 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7944
7945 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7946 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7947 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7948 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7949
7950 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7951 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7952 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7953 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7954
7955 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7956 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7957 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7958 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7959
7960 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7961 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7962 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7963 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7964
7965 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7966 int32_t thumbnail_size[2];
7967 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7968 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7969 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7970 int32_t orientation =
7971 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007972 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7974 int32_t temp;
7975 temp = thumbnail_size[0];
7976 thumbnail_size[0] = thumbnail_size[1];
7977 thumbnail_size[1] = temp;
7978 }
7979 }
7980 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7981 thumbnail_size,
7982 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7983 }
7984
7985}
7986
7987/*===========================================================================
7988 * FUNCTION : convertToRegions
7989 *
7990 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7991 *
7992 * PARAMETERS :
7993 * @rect : cam_rect_t struct to convert
7994 * @region : int32_t destination array
7995 * @weight : if we are converting from cam_area_t, weight is valid
7996 * else weight = -1
7997 *
7998 *==========================================================================*/
7999void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8000 int32_t *region, int weight)
8001{
8002 region[0] = rect.left;
8003 region[1] = rect.top;
8004 region[2] = rect.left + rect.width;
8005 region[3] = rect.top + rect.height;
8006 if (weight > -1) {
8007 region[4] = weight;
8008 }
8009}
8010
8011/*===========================================================================
8012 * FUNCTION : convertFromRegions
8013 *
8014 * DESCRIPTION: helper method to convert from array to cam_rect_t
8015 *
8016 * PARAMETERS :
8017 * @rect : cam_rect_t struct to convert
8018 * @region : int32_t destination array
8019 * @weight : if we are converting from cam_area_t, weight is valid
8020 * else weight = -1
8021 *
8022 *==========================================================================*/
8023void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008024 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008025{
Thierry Strudel3d639192016-09-09 11:52:26 -07008026 int32_t x_min = frame_settings.find(tag).data.i32[0];
8027 int32_t y_min = frame_settings.find(tag).data.i32[1];
8028 int32_t x_max = frame_settings.find(tag).data.i32[2];
8029 int32_t y_max = frame_settings.find(tag).data.i32[3];
8030 roi.weight = frame_settings.find(tag).data.i32[4];
8031 roi.rect.left = x_min;
8032 roi.rect.top = y_min;
8033 roi.rect.width = x_max - x_min;
8034 roi.rect.height = y_max - y_min;
8035}
8036
8037/*===========================================================================
8038 * FUNCTION : resetIfNeededROI
8039 *
8040 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8041 * crop region
8042 *
8043 * PARAMETERS :
8044 * @roi : cam_area_t struct to resize
8045 * @scalerCropRegion : cam_crop_region_t region to compare against
8046 *
8047 *
8048 *==========================================================================*/
8049bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8050 const cam_crop_region_t* scalerCropRegion)
8051{
8052 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8053 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8054 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8055 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8056
8057 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8058 * without having this check the calculations below to validate if the roi
8059 * is inside scalar crop region will fail resulting in the roi not being
8060 * reset causing algorithm to continue to use stale roi window
8061 */
8062 if (roi->weight == 0) {
8063 return true;
8064 }
8065
8066 if ((roi_x_max < scalerCropRegion->left) ||
8067 // right edge of roi window is left of scalar crop's left edge
8068 (roi_y_max < scalerCropRegion->top) ||
8069 // bottom edge of roi window is above scalar crop's top edge
8070 (roi->rect.left > crop_x_max) ||
8071 // left edge of roi window is beyond(right) of scalar crop's right edge
8072 (roi->rect.top > crop_y_max)){
8073 // top edge of roi windo is above scalar crop's top edge
8074 return false;
8075 }
8076 if (roi->rect.left < scalerCropRegion->left) {
8077 roi->rect.left = scalerCropRegion->left;
8078 }
8079 if (roi->rect.top < scalerCropRegion->top) {
8080 roi->rect.top = scalerCropRegion->top;
8081 }
8082 if (roi_x_max > crop_x_max) {
8083 roi_x_max = crop_x_max;
8084 }
8085 if (roi_y_max > crop_y_max) {
8086 roi_y_max = crop_y_max;
8087 }
8088 roi->rect.width = roi_x_max - roi->rect.left;
8089 roi->rect.height = roi_y_max - roi->rect.top;
8090 return true;
8091}
8092
8093/*===========================================================================
8094 * FUNCTION : convertLandmarks
8095 *
8096 * DESCRIPTION: helper method to extract the landmarks from face detection info
8097 *
8098 * PARAMETERS :
8099 * @landmark_data : input landmark data to be converted
8100 * @landmarks : int32_t destination array
8101 *
8102 *
8103 *==========================================================================*/
8104void QCamera3HardwareInterface::convertLandmarks(
8105 cam_face_landmarks_info_t landmark_data,
8106 int32_t *landmarks)
8107{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008108 if (landmark_data.is_left_eye_valid) {
8109 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8110 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8111 } else {
8112 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8113 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8114 }
8115
8116 if (landmark_data.is_right_eye_valid) {
8117 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8118 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8119 } else {
8120 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8121 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8122 }
8123
8124 if (landmark_data.is_mouth_valid) {
8125 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8126 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8127 } else {
8128 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8129 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8130 }
8131}
8132
8133/*===========================================================================
8134 * FUNCTION : setInvalidLandmarks
8135 *
8136 * DESCRIPTION: helper method to set invalid landmarks
8137 *
8138 * PARAMETERS :
8139 * @landmarks : int32_t destination array
8140 *
8141 *
8142 *==========================================================================*/
8143void QCamera3HardwareInterface::setInvalidLandmarks(
8144 int32_t *landmarks)
8145{
8146 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8147 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8148 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8149 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8150 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8151 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008152}
8153
8154#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008155
8156/*===========================================================================
8157 * FUNCTION : getCapabilities
8158 *
8159 * DESCRIPTION: query camera capability from back-end
8160 *
8161 * PARAMETERS :
8162 * @ops : mm-interface ops structure
8163 * @cam_handle : camera handle for which we need capability
8164 *
8165 * RETURN : ptr type of capability structure
8166 * capability for success
8167 * NULL for failure
8168 *==========================================================================*/
8169cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8170 uint32_t cam_handle)
8171{
8172 int rc = NO_ERROR;
8173 QCamera3HeapMemory *capabilityHeap = NULL;
8174 cam_capability_t *cap_ptr = NULL;
8175
8176 if (ops == NULL) {
8177 LOGE("Invalid arguments");
8178 return NULL;
8179 }
8180
8181 capabilityHeap = new QCamera3HeapMemory(1);
8182 if (capabilityHeap == NULL) {
8183 LOGE("creation of capabilityHeap failed");
8184 return NULL;
8185 }
8186
8187 /* Allocate memory for capability buffer */
8188 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8189 if(rc != OK) {
8190 LOGE("No memory for cappability");
8191 goto allocate_failed;
8192 }
8193
8194 /* Map memory for capability buffer */
8195 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8196
8197 rc = ops->map_buf(cam_handle,
8198 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8199 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8200 if(rc < 0) {
8201 LOGE("failed to map capability buffer");
8202 rc = FAILED_TRANSACTION;
8203 goto map_failed;
8204 }
8205
8206 /* Query Capability */
8207 rc = ops->query_capability(cam_handle);
8208 if(rc < 0) {
8209 LOGE("failed to query capability");
8210 rc = FAILED_TRANSACTION;
8211 goto query_failed;
8212 }
8213
8214 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8215 if (cap_ptr == NULL) {
8216 LOGE("out of memory");
8217 rc = NO_MEMORY;
8218 goto query_failed;
8219 }
8220
8221 memset(cap_ptr, 0, sizeof(cam_capability_t));
8222 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8223
8224 int index;
8225 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8226 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8227 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8228 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8229 }
8230
8231query_failed:
8232 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8233map_failed:
8234 capabilityHeap->deallocate();
8235allocate_failed:
8236 delete capabilityHeap;
8237
8238 if (rc != NO_ERROR) {
8239 return NULL;
8240 } else {
8241 return cap_ptr;
8242 }
8243}
8244
Thierry Strudel3d639192016-09-09 11:52:26 -07008245/*===========================================================================
8246 * FUNCTION : initCapabilities
8247 *
8248 * DESCRIPTION: initialize camera capabilities in static data struct
8249 *
8250 * PARAMETERS :
8251 * @cameraId : camera Id
8252 *
8253 * RETURN : int32_t type of status
8254 * NO_ERROR -- success
8255 * none-zero failure code
8256 *==========================================================================*/
8257int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8258{
8259 int rc = 0;
8260 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008261 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008262
8263 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8264 if (rc) {
8265 LOGE("camera_open failed. rc = %d", rc);
8266 goto open_failed;
8267 }
8268 if (!cameraHandle) {
8269 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8270 goto open_failed;
8271 }
8272
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008273 handle = get_main_camera_handle(cameraHandle->camera_handle);
8274 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8275 if (gCamCapability[cameraId] == NULL) {
8276 rc = FAILED_TRANSACTION;
8277 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008278 }
8279
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008280 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008281 if (is_dual_camera_by_idx(cameraId)) {
8282 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8283 gCamCapability[cameraId]->aux_cam_cap =
8284 getCapabilities(cameraHandle->ops, handle);
8285 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8286 rc = FAILED_TRANSACTION;
8287 free(gCamCapability[cameraId]);
8288 goto failed_op;
8289 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008290
8291 // Copy the main camera capability to main_cam_cap struct
8292 gCamCapability[cameraId]->main_cam_cap =
8293 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8294 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8295 LOGE("out of memory");
8296 rc = NO_MEMORY;
8297 goto failed_op;
8298 }
8299 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8300 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008301 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008302failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008303 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8304 cameraHandle = NULL;
8305open_failed:
8306 return rc;
8307}
8308
8309/*==========================================================================
8310 * FUNCTION : get3Aversion
8311 *
8312 * DESCRIPTION: get the Q3A S/W version
8313 *
8314 * PARAMETERS :
8315 * @sw_version: Reference of Q3A structure which will hold version info upon
8316 * return
8317 *
8318 * RETURN : None
8319 *
8320 *==========================================================================*/
8321void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8322{
8323 if(gCamCapability[mCameraId])
8324 sw_version = gCamCapability[mCameraId]->q3a_version;
8325 else
8326 LOGE("Capability structure NULL!");
8327}
8328
8329
8330/*===========================================================================
8331 * FUNCTION : initParameters
8332 *
8333 * DESCRIPTION: initialize camera parameters
8334 *
8335 * PARAMETERS :
8336 *
8337 * RETURN : int32_t type of status
8338 * NO_ERROR -- success
8339 * none-zero failure code
8340 *==========================================================================*/
8341int QCamera3HardwareInterface::initParameters()
8342{
8343 int rc = 0;
8344
8345 //Allocate Set Param Buffer
8346 mParamHeap = new QCamera3HeapMemory(1);
8347 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8348 if(rc != OK) {
8349 rc = NO_MEMORY;
8350 LOGE("Failed to allocate SETPARM Heap memory");
8351 delete mParamHeap;
8352 mParamHeap = NULL;
8353 return rc;
8354 }
8355
8356 //Map memory for parameters buffer
8357 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8358 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8359 mParamHeap->getFd(0),
8360 sizeof(metadata_buffer_t),
8361 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8362 if(rc < 0) {
8363 LOGE("failed to map SETPARM buffer");
8364 rc = FAILED_TRANSACTION;
8365 mParamHeap->deallocate();
8366 delete mParamHeap;
8367 mParamHeap = NULL;
8368 return rc;
8369 }
8370
8371 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8372
8373 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8374 return rc;
8375}
8376
8377/*===========================================================================
8378 * FUNCTION : deinitParameters
8379 *
8380 * DESCRIPTION: de-initialize camera parameters
8381 *
8382 * PARAMETERS :
8383 *
8384 * RETURN : NONE
8385 *==========================================================================*/
8386void QCamera3HardwareInterface::deinitParameters()
8387{
8388 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8389 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8390
8391 mParamHeap->deallocate();
8392 delete mParamHeap;
8393 mParamHeap = NULL;
8394
8395 mParameters = NULL;
8396
8397 free(mPrevParameters);
8398 mPrevParameters = NULL;
8399}
8400
8401/*===========================================================================
8402 * FUNCTION : calcMaxJpegSize
8403 *
8404 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8405 *
8406 * PARAMETERS :
8407 *
8408 * RETURN : max_jpeg_size
8409 *==========================================================================*/
8410size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8411{
8412 size_t max_jpeg_size = 0;
8413 size_t temp_width, temp_height;
8414 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8415 MAX_SIZES_CNT);
8416 for (size_t i = 0; i < count; i++) {
8417 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8418 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8419 if (temp_width * temp_height > max_jpeg_size ) {
8420 max_jpeg_size = temp_width * temp_height;
8421 }
8422 }
8423 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8424 return max_jpeg_size;
8425}
8426
8427/*===========================================================================
8428 * FUNCTION : getMaxRawSize
8429 *
8430 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8431 *
8432 * PARAMETERS :
8433 *
8434 * RETURN : Largest supported Raw Dimension
8435 *==========================================================================*/
8436cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8437{
8438 int max_width = 0;
8439 cam_dimension_t maxRawSize;
8440
8441 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8442 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8443 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8444 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8445 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8446 }
8447 }
8448 return maxRawSize;
8449}
8450
8451
8452/*===========================================================================
8453 * FUNCTION : calcMaxJpegDim
8454 *
8455 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8456 *
8457 * PARAMETERS :
8458 *
8459 * RETURN : max_jpeg_dim
8460 *==========================================================================*/
8461cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8462{
8463 cam_dimension_t max_jpeg_dim;
8464 cam_dimension_t curr_jpeg_dim;
8465 max_jpeg_dim.width = 0;
8466 max_jpeg_dim.height = 0;
8467 curr_jpeg_dim.width = 0;
8468 curr_jpeg_dim.height = 0;
8469 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8470 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8471 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8472 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8473 max_jpeg_dim.width * max_jpeg_dim.height ) {
8474 max_jpeg_dim.width = curr_jpeg_dim.width;
8475 max_jpeg_dim.height = curr_jpeg_dim.height;
8476 }
8477 }
8478 return max_jpeg_dim;
8479}
8480
8481/*===========================================================================
8482 * FUNCTION : addStreamConfig
8483 *
8484 * DESCRIPTION: adds the stream configuration to the array
8485 *
8486 * PARAMETERS :
8487 * @available_stream_configs : pointer to stream configuration array
8488 * @scalar_format : scalar format
8489 * @dim : configuration dimension
8490 * @config_type : input or output configuration type
8491 *
8492 * RETURN : NONE
8493 *==========================================================================*/
8494void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8495 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8496{
8497 available_stream_configs.add(scalar_format);
8498 available_stream_configs.add(dim.width);
8499 available_stream_configs.add(dim.height);
8500 available_stream_configs.add(config_type);
8501}
8502
8503/*===========================================================================
8504 * FUNCTION : suppportBurstCapture
8505 *
8506 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8507 *
8508 * PARAMETERS :
8509 * @cameraId : camera Id
8510 *
8511 * RETURN : true if camera supports BURST_CAPTURE
8512 * false otherwise
8513 *==========================================================================*/
8514bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8515{
8516 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8517 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8518 const int32_t highResWidth = 3264;
8519 const int32_t highResHeight = 2448;
8520
8521 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8522 // Maximum resolution images cannot be captured at >= 10fps
8523 // -> not supporting BURST_CAPTURE
8524 return false;
8525 }
8526
8527 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8528 // Maximum resolution images can be captured at >= 20fps
8529 // --> supporting BURST_CAPTURE
8530 return true;
8531 }
8532
8533 // Find the smallest highRes resolution, or largest resolution if there is none
8534 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8535 MAX_SIZES_CNT);
8536 size_t highRes = 0;
8537 while ((highRes + 1 < totalCnt) &&
8538 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8539 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8540 highResWidth * highResHeight)) {
8541 highRes++;
8542 }
8543 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8544 return true;
8545 } else {
8546 return false;
8547 }
8548}
8549
8550/*===========================================================================
8551 * FUNCTION : initStaticMetadata
8552 *
8553 * DESCRIPTION: initialize the static metadata
8554 *
8555 * PARAMETERS :
8556 * @cameraId : camera Id
8557 *
8558 * RETURN : int32_t type of status
8559 * 0 -- success
8560 * non-zero failure code
8561 *==========================================================================*/
8562int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8563{
8564 int rc = 0;
8565 CameraMetadata staticInfo;
8566 size_t count = 0;
8567 bool limitedDevice = false;
8568 char prop[PROPERTY_VALUE_MAX];
8569 bool supportBurst = false;
8570
8571 supportBurst = supportBurstCapture(cameraId);
8572
8573 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8574 * guaranteed or if min fps of max resolution is less than 20 fps, its
8575 * advertised as limited device*/
8576 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8577 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8578 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8579 !supportBurst;
8580
8581 uint8_t supportedHwLvl = limitedDevice ?
8582 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008583#ifndef USE_HAL_3_3
8584 // LEVEL_3 - This device will support level 3.
8585 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8586#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008587 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008588#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008589
8590 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8591 &supportedHwLvl, 1);
8592
8593 bool facingBack = false;
8594 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8595 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8596 facingBack = true;
8597 }
8598 /*HAL 3 only*/
8599 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8600 &gCamCapability[cameraId]->min_focus_distance, 1);
8601
8602 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8603 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8604
8605 /*should be using focal lengths but sensor doesn't provide that info now*/
8606 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8607 &gCamCapability[cameraId]->focal_length,
8608 1);
8609
8610 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8611 gCamCapability[cameraId]->apertures,
8612 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8613
8614 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8615 gCamCapability[cameraId]->filter_densities,
8616 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8617
8618
8619 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8620 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8621 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8622
8623 int32_t lens_shading_map_size[] = {
8624 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8625 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8626 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8627 lens_shading_map_size,
8628 sizeof(lens_shading_map_size)/sizeof(int32_t));
8629
8630 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8631 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8632
8633 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8634 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8635
8636 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8637 &gCamCapability[cameraId]->max_frame_duration, 1);
8638
8639 camera_metadata_rational baseGainFactor = {
8640 gCamCapability[cameraId]->base_gain_factor.numerator,
8641 gCamCapability[cameraId]->base_gain_factor.denominator};
8642 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8643 &baseGainFactor, 1);
8644
8645 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8646 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8647
8648 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8649 gCamCapability[cameraId]->pixel_array_size.height};
8650 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8651 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8652
8653 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8654 gCamCapability[cameraId]->active_array_size.top,
8655 gCamCapability[cameraId]->active_array_size.width,
8656 gCamCapability[cameraId]->active_array_size.height};
8657 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8658 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8659
8660 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8661 &gCamCapability[cameraId]->white_level, 1);
8662
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008663 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8664 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8665 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008666 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008667 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008668
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008669#ifndef USE_HAL_3_3
8670 bool hasBlackRegions = false;
8671 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8672 LOGW("black_region_count: %d is bounded to %d",
8673 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8674 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8675 }
8676 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8677 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8678 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8679 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8680 }
8681 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8682 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8683 hasBlackRegions = true;
8684 }
8685#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008686 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8687 &gCamCapability[cameraId]->flash_charge_duration, 1);
8688
8689 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8690 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8691
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008692 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8693 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8694 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008695 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8696 &timestampSource, 1);
8697
Thierry Strudel54dc9782017-02-15 12:12:10 -08008698 //update histogram vendor data
8699 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07008700 &gCamCapability[cameraId]->histogram_size, 1);
8701
Thierry Strudel54dc9782017-02-15 12:12:10 -08008702 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07008703 &gCamCapability[cameraId]->max_histogram_count, 1);
8704
8705 int32_t sharpness_map_size[] = {
8706 gCamCapability[cameraId]->sharpness_map_size.width,
8707 gCamCapability[cameraId]->sharpness_map_size.height};
8708
8709 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8710 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8711
8712 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8713 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8714
8715 int32_t scalar_formats[] = {
8716 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8717 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8718 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8719 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8720 HAL_PIXEL_FORMAT_RAW10,
8721 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8722 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8723 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8724 scalar_formats,
8725 scalar_formats_count);
8726
8727 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8728 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8729 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8730 count, MAX_SIZES_CNT, available_processed_sizes);
8731 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8732 available_processed_sizes, count * 2);
8733
8734 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8735 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8736 makeTable(gCamCapability[cameraId]->raw_dim,
8737 count, MAX_SIZES_CNT, available_raw_sizes);
8738 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8739 available_raw_sizes, count * 2);
8740
8741 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8742 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8743 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8744 count, MAX_SIZES_CNT, available_fps_ranges);
8745 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8746 available_fps_ranges, count * 2);
8747
8748 camera_metadata_rational exposureCompensationStep = {
8749 gCamCapability[cameraId]->exp_compensation_step.numerator,
8750 gCamCapability[cameraId]->exp_compensation_step.denominator};
8751 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8752 &exposureCompensationStep, 1);
8753
8754 Vector<uint8_t> availableVstabModes;
8755 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8756 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008757 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008758 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008759 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008760 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008761 count = IS_TYPE_MAX;
8762 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8763 for (size_t i = 0; i < count; i++) {
8764 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8765 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8766 eisSupported = true;
8767 break;
8768 }
8769 }
8770 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008771 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8772 }
8773 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8774 availableVstabModes.array(), availableVstabModes.size());
8775
8776 /*HAL 1 and HAL 3 common*/
8777 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8778 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8779 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8780 float maxZoom = maxZoomStep/minZoomStep;
8781 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8782 &maxZoom, 1);
8783
8784 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8785 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8786
8787 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8788 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8789 max3aRegions[2] = 0; /* AF not supported */
8790 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8791 max3aRegions, 3);
8792
8793 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8794 memset(prop, 0, sizeof(prop));
8795 property_get("persist.camera.facedetect", prop, "1");
8796 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8797 LOGD("Support face detection mode: %d",
8798 supportedFaceDetectMode);
8799
8800 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008801 /* support mode should be OFF if max number of face is 0 */
8802 if (maxFaces <= 0) {
8803 supportedFaceDetectMode = 0;
8804 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008805 Vector<uint8_t> availableFaceDetectModes;
8806 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8807 if (supportedFaceDetectMode == 1) {
8808 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8809 } else if (supportedFaceDetectMode == 2) {
8810 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8811 } else if (supportedFaceDetectMode == 3) {
8812 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8813 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8814 } else {
8815 maxFaces = 0;
8816 }
8817 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8818 availableFaceDetectModes.array(),
8819 availableFaceDetectModes.size());
8820 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8821 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08008822 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
8823 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
8824 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008825
Emilian Peev7650c122017-01-19 08:24:33 -08008826#ifdef SUPPORT_DEPTH_DATA
8827 //TODO: Update depth size accordingly, currently we use active array
8828 // as reference.
8829 int32_t depthWidth = gCamCapability[cameraId]->active_array_size.width;
8830 int32_t depthHeight = gCamCapability[cameraId]->active_array_size.height;
8831 //As per spec. depth cloud should be sample count / 16
8832 int32_t depthSamplesCount = depthWidth * depthHeight / 16;
8833 assert(0 < depthSamplesCount);
8834 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, &depthSamplesCount, 1);
8835
8836 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
8837 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT };
8838 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
8839 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
8840
8841 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8842 1, 1 };
8843 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
8844 depthMinDuration,
8845 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
8846
8847 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_BLOB, depthSamplesCount,
8848 1, 0 };
8849 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
8850 depthStallDuration,
8851 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
8852
8853 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
8854 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
8855#endif
8856
Thierry Strudel3d639192016-09-09 11:52:26 -07008857 int32_t exposureCompensationRange[] = {
8858 gCamCapability[cameraId]->exposure_compensation_min,
8859 gCamCapability[cameraId]->exposure_compensation_max};
8860 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8861 exposureCompensationRange,
8862 sizeof(exposureCompensationRange)/sizeof(int32_t));
8863
8864 uint8_t lensFacing = (facingBack) ?
8865 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8866 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8867
8868 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8869 available_thumbnail_sizes,
8870 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8871
8872 /*all sizes will be clubbed into this tag*/
8873 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8874 /*android.scaler.availableStreamConfigurations*/
8875 Vector<int32_t> available_stream_configs;
8876 cam_dimension_t active_array_dim;
8877 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8878 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8879 /* Add input/output stream configurations for each scalar formats*/
8880 for (size_t j = 0; j < scalar_formats_count; j++) {
8881 switch (scalar_formats[j]) {
8882 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8883 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8884 case HAL_PIXEL_FORMAT_RAW10:
8885 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8886 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8887 addStreamConfig(available_stream_configs, scalar_formats[j],
8888 gCamCapability[cameraId]->raw_dim[i],
8889 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8890 }
8891 break;
8892 case HAL_PIXEL_FORMAT_BLOB:
8893 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8894 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8895 addStreamConfig(available_stream_configs, scalar_formats[j],
8896 gCamCapability[cameraId]->picture_sizes_tbl[i],
8897 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8898 }
8899 break;
8900 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8901 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8902 default:
8903 cam_dimension_t largest_picture_size;
8904 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8905 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8906 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8907 addStreamConfig(available_stream_configs, scalar_formats[j],
8908 gCamCapability[cameraId]->picture_sizes_tbl[i],
8909 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8910 /* Book keep largest */
8911 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8912 >= largest_picture_size.width &&
8913 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8914 >= largest_picture_size.height)
8915 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8916 }
8917 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8918 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8919 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8920 addStreamConfig(available_stream_configs, scalar_formats[j],
8921 largest_picture_size,
8922 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8923 }
8924 break;
8925 }
8926 }
8927
8928 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8929 available_stream_configs.array(), available_stream_configs.size());
8930 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8931 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8932
8933 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8934 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8935
8936 /* android.scaler.availableMinFrameDurations */
8937 Vector<int64_t> available_min_durations;
8938 for (size_t j = 0; j < scalar_formats_count; j++) {
8939 switch (scalar_formats[j]) {
8940 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8941 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8942 case HAL_PIXEL_FORMAT_RAW10:
8943 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8944 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8945 available_min_durations.add(scalar_formats[j]);
8946 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8947 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8948 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8949 }
8950 break;
8951 default:
8952 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8953 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8954 available_min_durations.add(scalar_formats[j]);
8955 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8956 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8957 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8958 }
8959 break;
8960 }
8961 }
8962 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8963 available_min_durations.array(), available_min_durations.size());
8964
8965 Vector<int32_t> available_hfr_configs;
8966 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8967 int32_t fps = 0;
8968 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8969 case CAM_HFR_MODE_60FPS:
8970 fps = 60;
8971 break;
8972 case CAM_HFR_MODE_90FPS:
8973 fps = 90;
8974 break;
8975 case CAM_HFR_MODE_120FPS:
8976 fps = 120;
8977 break;
8978 case CAM_HFR_MODE_150FPS:
8979 fps = 150;
8980 break;
8981 case CAM_HFR_MODE_180FPS:
8982 fps = 180;
8983 break;
8984 case CAM_HFR_MODE_210FPS:
8985 fps = 210;
8986 break;
8987 case CAM_HFR_MODE_240FPS:
8988 fps = 240;
8989 break;
8990 case CAM_HFR_MODE_480FPS:
8991 fps = 480;
8992 break;
8993 case CAM_HFR_MODE_OFF:
8994 case CAM_HFR_MODE_MAX:
8995 default:
8996 break;
8997 }
8998
8999 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9000 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9001 /* For each HFR frame rate, need to advertise one variable fps range
9002 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9003 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9004 * set by the app. When video recording is started, [120, 120] is
9005 * set. This way sensor configuration does not change when recording
9006 * is started */
9007
9008 /* (width, height, fps_min, fps_max, batch_size_max) */
9009 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9010 j < MAX_SIZES_CNT; j++) {
9011 available_hfr_configs.add(
9012 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9013 available_hfr_configs.add(
9014 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9015 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9016 available_hfr_configs.add(fps);
9017 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9018
9019 /* (width, height, fps_min, fps_max, batch_size_max) */
9020 available_hfr_configs.add(
9021 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9022 available_hfr_configs.add(
9023 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9024 available_hfr_configs.add(fps);
9025 available_hfr_configs.add(fps);
9026 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9027 }
9028 }
9029 }
9030 //Advertise HFR capability only if the property is set
9031 memset(prop, 0, sizeof(prop));
9032 property_get("persist.camera.hal3hfr.enable", prop, "1");
9033 uint8_t hfrEnable = (uint8_t)atoi(prop);
9034
9035 if(hfrEnable && available_hfr_configs.array()) {
9036 staticInfo.update(
9037 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9038 available_hfr_configs.array(), available_hfr_configs.size());
9039 }
9040
9041 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9042 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9043 &max_jpeg_size, 1);
9044
9045 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9046 size_t size = 0;
9047 count = CAM_EFFECT_MODE_MAX;
9048 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9049 for (size_t i = 0; i < count; i++) {
9050 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9051 gCamCapability[cameraId]->supported_effects[i]);
9052 if (NAME_NOT_FOUND != val) {
9053 avail_effects[size] = (uint8_t)val;
9054 size++;
9055 }
9056 }
9057 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9058 avail_effects,
9059 size);
9060
9061 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9062 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9063 size_t supported_scene_modes_cnt = 0;
9064 count = CAM_SCENE_MODE_MAX;
9065 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9066 for (size_t i = 0; i < count; i++) {
9067 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9068 CAM_SCENE_MODE_OFF) {
9069 int val = lookupFwkName(SCENE_MODES_MAP,
9070 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9071 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009072
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 if (NAME_NOT_FOUND != val) {
9074 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9075 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9076 supported_scene_modes_cnt++;
9077 }
9078 }
9079 }
9080 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9081 avail_scene_modes,
9082 supported_scene_modes_cnt);
9083
9084 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9085 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9086 supported_scene_modes_cnt,
9087 CAM_SCENE_MODE_MAX,
9088 scene_mode_overrides,
9089 supported_indexes,
9090 cameraId);
9091
9092 if (supported_scene_modes_cnt == 0) {
9093 supported_scene_modes_cnt = 1;
9094 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9095 }
9096
9097 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9098 scene_mode_overrides, supported_scene_modes_cnt * 3);
9099
9100 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9101 ANDROID_CONTROL_MODE_AUTO,
9102 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9103 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9104 available_control_modes,
9105 3);
9106
9107 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9108 size = 0;
9109 count = CAM_ANTIBANDING_MODE_MAX;
9110 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9111 for (size_t i = 0; i < count; i++) {
9112 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9113 gCamCapability[cameraId]->supported_antibandings[i]);
9114 if (NAME_NOT_FOUND != val) {
9115 avail_antibanding_modes[size] = (uint8_t)val;
9116 size++;
9117 }
9118
9119 }
9120 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9121 avail_antibanding_modes,
9122 size);
9123
9124 uint8_t avail_abberation_modes[] = {
9125 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9126 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9127 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9128 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9129 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9130 if (0 == count) {
9131 // If no aberration correction modes are available for a device, this advertise OFF mode
9132 size = 1;
9133 } else {
9134 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9135 // So, advertize all 3 modes if atleast any one mode is supported as per the
9136 // new M requirement
9137 size = 3;
9138 }
9139 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9140 avail_abberation_modes,
9141 size);
9142
9143 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9144 size = 0;
9145 count = CAM_FOCUS_MODE_MAX;
9146 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9147 for (size_t i = 0; i < count; i++) {
9148 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9149 gCamCapability[cameraId]->supported_focus_modes[i]);
9150 if (NAME_NOT_FOUND != val) {
9151 avail_af_modes[size] = (uint8_t)val;
9152 size++;
9153 }
9154 }
9155 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9156 avail_af_modes,
9157 size);
9158
9159 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9160 size = 0;
9161 count = CAM_WB_MODE_MAX;
9162 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9163 for (size_t i = 0; i < count; i++) {
9164 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9165 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9166 gCamCapability[cameraId]->supported_white_balances[i]);
9167 if (NAME_NOT_FOUND != val) {
9168 avail_awb_modes[size] = (uint8_t)val;
9169 size++;
9170 }
9171 }
9172 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9173 avail_awb_modes,
9174 size);
9175
9176 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9177 count = CAM_FLASH_FIRING_LEVEL_MAX;
9178 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9179 count);
9180 for (size_t i = 0; i < count; i++) {
9181 available_flash_levels[i] =
9182 gCamCapability[cameraId]->supported_firing_levels[i];
9183 }
9184 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9185 available_flash_levels, count);
9186
9187 uint8_t flashAvailable;
9188 if (gCamCapability[cameraId]->flash_available)
9189 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9190 else
9191 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9192 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9193 &flashAvailable, 1);
9194
9195 Vector<uint8_t> avail_ae_modes;
9196 count = CAM_AE_MODE_MAX;
9197 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9198 for (size_t i = 0; i < count; i++) {
9199 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9200 }
9201 if (flashAvailable) {
9202 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9203 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009204 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009205 }
9206 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9207 avail_ae_modes.array(),
9208 avail_ae_modes.size());
9209
9210 int32_t sensitivity_range[2];
9211 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9212 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9213 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9214 sensitivity_range,
9215 sizeof(sensitivity_range) / sizeof(int32_t));
9216
9217 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9218 &gCamCapability[cameraId]->max_analog_sensitivity,
9219 1);
9220
9221 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9222 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9223 &sensor_orientation,
9224 1);
9225
9226 int32_t max_output_streams[] = {
9227 MAX_STALLING_STREAMS,
9228 MAX_PROCESSED_STREAMS,
9229 MAX_RAW_STREAMS};
9230 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9231 max_output_streams,
9232 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9233
9234 uint8_t avail_leds = 0;
9235 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9236 &avail_leds, 0);
9237
9238 uint8_t focus_dist_calibrated;
9239 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9240 gCamCapability[cameraId]->focus_dist_calibrated);
9241 if (NAME_NOT_FOUND != val) {
9242 focus_dist_calibrated = (uint8_t)val;
9243 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9244 &focus_dist_calibrated, 1);
9245 }
9246
9247 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9248 size = 0;
9249 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9250 MAX_TEST_PATTERN_CNT);
9251 for (size_t i = 0; i < count; i++) {
9252 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9253 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9254 if (NAME_NOT_FOUND != testpatternMode) {
9255 avail_testpattern_modes[size] = testpatternMode;
9256 size++;
9257 }
9258 }
9259 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9260 avail_testpattern_modes,
9261 size);
9262
9263 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9264 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9265 &max_pipeline_depth,
9266 1);
9267
9268 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9269 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9270 &partial_result_count,
9271 1);
9272
9273 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9274 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9275
9276 Vector<uint8_t> available_capabilities;
9277 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9278 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9279 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9280 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9281 if (supportBurst) {
9282 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9283 }
9284 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9285 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9286 if (hfrEnable && available_hfr_configs.array()) {
9287 available_capabilities.add(
9288 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9289 }
9290
9291 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9292 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9293 }
9294 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9295 available_capabilities.array(),
9296 available_capabilities.size());
9297
9298 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9299 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9300 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9301 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9302
9303 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9304 &aeLockAvailable, 1);
9305
9306 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9307 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9308 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9309 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9310
9311 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9312 &awbLockAvailable, 1);
9313
9314 int32_t max_input_streams = 1;
9315 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9316 &max_input_streams,
9317 1);
9318
9319 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9320 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9321 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9322 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9323 HAL_PIXEL_FORMAT_YCbCr_420_888};
9324 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9325 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9326
9327 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9328 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9329 &max_latency,
9330 1);
9331
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009332#ifndef USE_HAL_3_3
9333 int32_t isp_sensitivity_range[2];
9334 isp_sensitivity_range[0] =
9335 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9336 isp_sensitivity_range[1] =
9337 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9338 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9339 isp_sensitivity_range,
9340 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9341#endif
9342
Thierry Strudel3d639192016-09-09 11:52:26 -07009343 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9344 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9345 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9346 available_hot_pixel_modes,
9347 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9348
9349 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9350 ANDROID_SHADING_MODE_FAST,
9351 ANDROID_SHADING_MODE_HIGH_QUALITY};
9352 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9353 available_shading_modes,
9354 3);
9355
9356 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9357 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9358 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9359 available_lens_shading_map_modes,
9360 2);
9361
9362 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9363 ANDROID_EDGE_MODE_FAST,
9364 ANDROID_EDGE_MODE_HIGH_QUALITY,
9365 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9366 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9367 available_edge_modes,
9368 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9369
9370 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9371 ANDROID_NOISE_REDUCTION_MODE_FAST,
9372 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9373 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9374 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9375 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9376 available_noise_red_modes,
9377 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9378
9379 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9380 ANDROID_TONEMAP_MODE_FAST,
9381 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9382 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9383 available_tonemap_modes,
9384 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9385
9386 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9387 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9388 available_hot_pixel_map_modes,
9389 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9390
9391 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9392 gCamCapability[cameraId]->reference_illuminant1);
9393 if (NAME_NOT_FOUND != val) {
9394 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9395 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9396 }
9397
9398 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9399 gCamCapability[cameraId]->reference_illuminant2);
9400 if (NAME_NOT_FOUND != val) {
9401 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9402 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9403 }
9404
9405 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9406 (void *)gCamCapability[cameraId]->forward_matrix1,
9407 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9408
9409 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9410 (void *)gCamCapability[cameraId]->forward_matrix2,
9411 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9412
9413 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9414 (void *)gCamCapability[cameraId]->color_transform1,
9415 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9416
9417 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9418 (void *)gCamCapability[cameraId]->color_transform2,
9419 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9420
9421 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9422 (void *)gCamCapability[cameraId]->calibration_transform1,
9423 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9424
9425 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9426 (void *)gCamCapability[cameraId]->calibration_transform2,
9427 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9428
9429 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9430 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9431 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9432 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9433 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9434 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9435 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9436 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9437 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9438 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9439 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9440 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9441 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9442 ANDROID_JPEG_GPS_COORDINATES,
9443 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9444 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9445 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9446 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9447 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9448 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9449 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9450 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9451 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9452 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009453#ifndef USE_HAL_3_3
9454 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9455#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009456 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009457 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009458 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9459 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009460 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009461 /* DevCamDebug metadata request_keys_basic */
9462 DEVCAMDEBUG_META_ENABLE,
9463 /* DevCamDebug metadata end */
9464 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009465
9466 size_t request_keys_cnt =
9467 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9468 Vector<int32_t> available_request_keys;
9469 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9470 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9471 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9472 }
9473
9474 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9475 available_request_keys.array(), available_request_keys.size());
9476
9477 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9478 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9479 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9480 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9481 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9482 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9483 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9484 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9485 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9486 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9487 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9488 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9489 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9490 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9491 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9492 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9493 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009494 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009495 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9496 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9497 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009498 ANDROID_STATISTICS_FACE_SCORES,
9499#ifndef USE_HAL_3_3
9500 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9501#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009502 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009503 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009504 // DevCamDebug metadata result_keys_basic
9505 DEVCAMDEBUG_META_ENABLE,
9506 // DevCamDebug metadata result_keys AF
9507 DEVCAMDEBUG_AF_LENS_POSITION,
9508 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9509 DEVCAMDEBUG_AF_TOF_DISTANCE,
9510 DEVCAMDEBUG_AF_LUMA,
9511 DEVCAMDEBUG_AF_HAF_STATE,
9512 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9513 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9514 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9515 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9516 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9517 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9518 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9519 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9520 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9521 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9522 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9523 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9524 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9525 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9526 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9527 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9528 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9529 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9530 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9531 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9532 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9533 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9534 // DevCamDebug metadata result_keys AEC
9535 DEVCAMDEBUG_AEC_TARGET_LUMA,
9536 DEVCAMDEBUG_AEC_COMP_LUMA,
9537 DEVCAMDEBUG_AEC_AVG_LUMA,
9538 DEVCAMDEBUG_AEC_CUR_LUMA,
9539 DEVCAMDEBUG_AEC_LINECOUNT,
9540 DEVCAMDEBUG_AEC_REAL_GAIN,
9541 DEVCAMDEBUG_AEC_EXP_INDEX,
9542 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009543 // DevCamDebug metadata result_keys zzHDR
9544 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9545 DEVCAMDEBUG_AEC_L_LINECOUNT,
9546 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9547 DEVCAMDEBUG_AEC_S_LINECOUNT,
9548 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9549 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9550 // DevCamDebug metadata result_keys ADRC
9551 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9552 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9553 DEVCAMDEBUG_AEC_GTM_RATIO,
9554 DEVCAMDEBUG_AEC_LTM_RATIO,
9555 DEVCAMDEBUG_AEC_LA_RATIO,
9556 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009557 // DevCamDebug metadata result_keys AWB
9558 DEVCAMDEBUG_AWB_R_GAIN,
9559 DEVCAMDEBUG_AWB_G_GAIN,
9560 DEVCAMDEBUG_AWB_B_GAIN,
9561 DEVCAMDEBUG_AWB_CCT,
9562 DEVCAMDEBUG_AWB_DECISION,
9563 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009564 };
9565
Thierry Strudel3d639192016-09-09 11:52:26 -07009566 size_t result_keys_cnt =
9567 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9568
9569 Vector<int32_t> available_result_keys;
9570 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9571 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9572 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9573 }
9574 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9575 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9576 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9577 }
9578 if (supportedFaceDetectMode == 1) {
9579 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9580 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9581 } else if ((supportedFaceDetectMode == 2) ||
9582 (supportedFaceDetectMode == 3)) {
9583 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9584 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9585 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009586#ifndef USE_HAL_3_3
9587 if (hasBlackRegions) {
9588 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9589 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9590 }
9591#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009592 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9593 available_result_keys.array(), available_result_keys.size());
9594
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009595 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009596 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9597 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9598 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9599 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9600 ANDROID_SCALER_CROPPING_TYPE,
9601 ANDROID_SYNC_MAX_LATENCY,
9602 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9603 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9604 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9605 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9606 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9607 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9608 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9609 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9610 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9611 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9612 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9613 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9614 ANDROID_LENS_FACING,
9615 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9616 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9617 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9618 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9619 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9620 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9621 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9622 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9623 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9624 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9625 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9626 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9627 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9628 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9629 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9630 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9631 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9632 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9633 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9634 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009635 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009636 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9637 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9638 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9639 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9640 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9641 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9642 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9643 ANDROID_CONTROL_AVAILABLE_MODES,
9644 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9645 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9646 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9647 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009648 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
Emilian Peev7650c122017-01-19 08:24:33 -08009649#ifdef SUPPORT_DEPTH_DATA
9650 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9651 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9652 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9653 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9654 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
9655#endif
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009656#ifndef USE_HAL_3_3
9657 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9658 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9659#endif
9660 };
9661
9662 Vector<int32_t> available_characteristics_keys;
9663 available_characteristics_keys.appendArray(characteristics_keys_basic,
9664 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9665#ifndef USE_HAL_3_3
9666 if (hasBlackRegions) {
9667 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9668 }
9669#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009670 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009671 available_characteristics_keys.array(),
9672 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009673
9674 /*available stall durations depend on the hw + sw and will be different for different devices */
9675 /*have to add for raw after implementation*/
9676 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9677 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9678
9679 Vector<int64_t> available_stall_durations;
9680 for (uint32_t j = 0; j < stall_formats_count; j++) {
9681 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9682 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9683 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9684 available_stall_durations.add(stall_formats[j]);
9685 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9686 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9687 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9688 }
9689 } else {
9690 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9691 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9692 available_stall_durations.add(stall_formats[j]);
9693 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9694 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9695 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9696 }
9697 }
9698 }
9699 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9700 available_stall_durations.array(),
9701 available_stall_durations.size());
9702
9703 //QCAMERA3_OPAQUE_RAW
9704 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9705 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9706 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9707 case LEGACY_RAW:
9708 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9709 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9710 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9711 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9712 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9713 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9714 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9715 break;
9716 case MIPI_RAW:
9717 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9718 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9719 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9720 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9721 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9722 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9723 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9724 break;
9725 default:
9726 LOGE("unknown opaque_raw_format %d",
9727 gCamCapability[cameraId]->opaque_raw_fmt);
9728 break;
9729 }
9730 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9731
9732 Vector<int32_t> strides;
9733 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9734 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9735 cam_stream_buf_plane_info_t buf_planes;
9736 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9737 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9738 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9739 &gCamCapability[cameraId]->padding_info, &buf_planes);
9740 strides.add(buf_planes.plane_info.mp[0].stride);
9741 }
9742 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9743 strides.size());
9744
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009745 //TBD: remove the following line once backend advertises zzHDR in feature mask
9746 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009747 //Video HDR default
9748 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9749 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009750 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009751 int32_t vhdr_mode[] = {
9752 QCAMERA3_VIDEO_HDR_MODE_OFF,
9753 QCAMERA3_VIDEO_HDR_MODE_ON};
9754
9755 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9756 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9757 vhdr_mode, vhdr_mode_count);
9758 }
9759
Thierry Strudel3d639192016-09-09 11:52:26 -07009760 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9761 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9762 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9763
9764 uint8_t isMonoOnly =
9765 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9766 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9767 &isMonoOnly, 1);
9768
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009769#ifndef USE_HAL_3_3
9770 Vector<int32_t> opaque_size;
9771 for (size_t j = 0; j < scalar_formats_count; j++) {
9772 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9773 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9774 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9775 cam_stream_buf_plane_info_t buf_planes;
9776
9777 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9778 &gCamCapability[cameraId]->padding_info, &buf_planes);
9779
9780 if (rc == 0) {
9781 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9782 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9783 opaque_size.add(buf_planes.plane_info.frame_len);
9784 }else {
9785 LOGE("raw frame calculation failed!");
9786 }
9787 }
9788 }
9789 }
9790
9791 if ((opaque_size.size() > 0) &&
9792 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9793 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9794 else
9795 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9796#endif
9797
Thierry Strudel04e026f2016-10-10 11:27:36 -07009798 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9799 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9800 size = 0;
9801 count = CAM_IR_MODE_MAX;
9802 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9803 for (size_t i = 0; i < count; i++) {
9804 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9805 gCamCapability[cameraId]->supported_ir_modes[i]);
9806 if (NAME_NOT_FOUND != val) {
9807 avail_ir_modes[size] = (int32_t)val;
9808 size++;
9809 }
9810 }
9811 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9812 avail_ir_modes, size);
9813 }
9814
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009815 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9816 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9817 size = 0;
9818 count = CAM_AEC_CONVERGENCE_MAX;
9819 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9820 for (size_t i = 0; i < count; i++) {
9821 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9822 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9823 if (NAME_NOT_FOUND != val) {
9824 available_instant_aec_modes[size] = (int32_t)val;
9825 size++;
9826 }
9827 }
9828 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9829 available_instant_aec_modes, size);
9830 }
9831
Thierry Strudel54dc9782017-02-15 12:12:10 -08009832 int32_t sharpness_range[] = {
9833 gCamCapability[cameraId]->sharpness_ctrl.min_value,
9834 gCamCapability[cameraId]->sharpness_ctrl.max_value};
9835 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
9836
9837 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
9838 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
9839 size = 0;
9840 count = CAM_BINNING_CORRECTION_MODE_MAX;
9841 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
9842 for (size_t i = 0; i < count; i++) {
9843 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
9844 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
9845 gCamCapability[cameraId]->supported_binning_modes[i]);
9846 if (NAME_NOT_FOUND != val) {
9847 avail_binning_modes[size] = (int32_t)val;
9848 size++;
9849 }
9850 }
9851 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
9852 avail_binning_modes, size);
9853 }
9854
9855 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
9856 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
9857 size = 0;
9858 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
9859 for (size_t i = 0; i < count; i++) {
9860 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
9861 gCamCapability[cameraId]->supported_aec_modes[i]);
9862 if (NAME_NOT_FOUND != val)
9863 available_aec_modes[size++] = val;
9864 }
9865 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
9866 available_aec_modes, size);
9867 }
9868
9869 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
9870 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
9871 size = 0;
9872 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
9873 for (size_t i = 0; i < count; i++) {
9874 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
9875 gCamCapability[cameraId]->supported_iso_modes[i]);
9876 if (NAME_NOT_FOUND != val)
9877 available_iso_modes[size++] = val;
9878 }
9879 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
9880 available_iso_modes, size);
9881 }
9882
9883 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
9884 for (size_t i = 0; i < count; i++)
9885 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
9886 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
9887 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
9888
9889 int32_t available_saturation_range[4];
9890 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
9891 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
9892 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
9893 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
9894 staticInfo.update(QCAMERA3_SATURATION_RANGE,
9895 available_saturation_range, 4);
9896
9897 uint8_t is_hdr_values[2];
9898 is_hdr_values[0] = 0;
9899 is_hdr_values[1] = 1;
9900 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
9901 is_hdr_values, 2);
9902
9903 float is_hdr_confidence_range[2];
9904 is_hdr_confidence_range[0] = 0.0;
9905 is_hdr_confidence_range[1] = 1.0;
9906 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
9907 is_hdr_confidence_range, 2);
9908
Thierry Strudel3d639192016-09-09 11:52:26 -07009909 gStaticMetadata[cameraId] = staticInfo.release();
9910 return rc;
9911}
9912
9913/*===========================================================================
9914 * FUNCTION : makeTable
9915 *
9916 * DESCRIPTION: make a table of sizes
9917 *
9918 * PARAMETERS :
9919 *
9920 *
9921 *==========================================================================*/
9922void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9923 size_t max_size, int32_t *sizeTable)
9924{
9925 size_t j = 0;
9926 if (size > max_size) {
9927 size = max_size;
9928 }
9929 for (size_t i = 0; i < size; i++) {
9930 sizeTable[j] = dimTable[i].width;
9931 sizeTable[j+1] = dimTable[i].height;
9932 j+=2;
9933 }
9934}
9935
9936/*===========================================================================
9937 * FUNCTION : makeFPSTable
9938 *
9939 * DESCRIPTION: make a table of fps ranges
9940 *
9941 * PARAMETERS :
9942 *
9943 *==========================================================================*/
9944void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9945 size_t max_size, int32_t *fpsRangesTable)
9946{
9947 size_t j = 0;
9948 if (size > max_size) {
9949 size = max_size;
9950 }
9951 for (size_t i = 0; i < size; i++) {
9952 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9953 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9954 j+=2;
9955 }
9956}
9957
9958/*===========================================================================
9959 * FUNCTION : makeOverridesList
9960 *
9961 * DESCRIPTION: make a list of scene mode overrides
9962 *
9963 * PARAMETERS :
9964 *
9965 *
9966 *==========================================================================*/
9967void QCamera3HardwareInterface::makeOverridesList(
9968 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9969 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9970{
9971 /*daemon will give a list of overrides for all scene modes.
9972 However we should send the fwk only the overrides for the scene modes
9973 supported by the framework*/
9974 size_t j = 0;
9975 if (size > max_size) {
9976 size = max_size;
9977 }
9978 size_t focus_count = CAM_FOCUS_MODE_MAX;
9979 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9980 focus_count);
9981 for (size_t i = 0; i < size; i++) {
9982 bool supt = false;
9983 size_t index = supported_indexes[i];
9984 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9985 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9986 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9987 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9988 overridesTable[index].awb_mode);
9989 if (NAME_NOT_FOUND != val) {
9990 overridesList[j+1] = (uint8_t)val;
9991 }
9992 uint8_t focus_override = overridesTable[index].af_mode;
9993 for (size_t k = 0; k < focus_count; k++) {
9994 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9995 supt = true;
9996 break;
9997 }
9998 }
9999 if (supt) {
10000 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10001 focus_override);
10002 if (NAME_NOT_FOUND != val) {
10003 overridesList[j+2] = (uint8_t)val;
10004 }
10005 } else {
10006 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10007 }
10008 j+=3;
10009 }
10010}
10011
10012/*===========================================================================
10013 * FUNCTION : filterJpegSizes
10014 *
10015 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10016 * could be downscaled to
10017 *
10018 * PARAMETERS :
10019 *
10020 * RETURN : length of jpegSizes array
10021 *==========================================================================*/
10022
10023size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10024 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10025 uint8_t downscale_factor)
10026{
10027 if (0 == downscale_factor) {
10028 downscale_factor = 1;
10029 }
10030
10031 int32_t min_width = active_array_size.width / downscale_factor;
10032 int32_t min_height = active_array_size.height / downscale_factor;
10033 size_t jpegSizesCnt = 0;
10034 if (processedSizesCnt > maxCount) {
10035 processedSizesCnt = maxCount;
10036 }
10037 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10038 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10039 jpegSizes[jpegSizesCnt] = processedSizes[i];
10040 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10041 jpegSizesCnt += 2;
10042 }
10043 }
10044 return jpegSizesCnt;
10045}
10046
10047/*===========================================================================
10048 * FUNCTION : computeNoiseModelEntryS
10049 *
10050 * DESCRIPTION: function to map a given sensitivity to the S noise
10051 * model parameters in the DNG noise model.
10052 *
10053 * PARAMETERS : sens : the sensor sensitivity
10054 *
10055 ** RETURN : S (sensor amplification) noise
10056 *
10057 *==========================================================================*/
10058double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10059 double s = gCamCapability[mCameraId]->gradient_S * sens +
10060 gCamCapability[mCameraId]->offset_S;
10061 return ((s < 0.0) ? 0.0 : s);
10062}
10063
10064/*===========================================================================
10065 * FUNCTION : computeNoiseModelEntryO
10066 *
10067 * DESCRIPTION: function to map a given sensitivity to the O noise
10068 * model parameters in the DNG noise model.
10069 *
10070 * PARAMETERS : sens : the sensor sensitivity
10071 *
10072 ** RETURN : O (sensor readout) noise
10073 *
10074 *==========================================================================*/
10075double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10076 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10077 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10078 1.0 : (1.0 * sens / max_analog_sens);
10079 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10080 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10081 return ((o < 0.0) ? 0.0 : o);
10082}
10083
10084/*===========================================================================
10085 * FUNCTION : getSensorSensitivity
10086 *
10087 * DESCRIPTION: convert iso_mode to an integer value
10088 *
10089 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10090 *
10091 ** RETURN : sensitivity supported by sensor
10092 *
10093 *==========================================================================*/
10094int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10095{
10096 int32_t sensitivity;
10097
10098 switch (iso_mode) {
10099 case CAM_ISO_MODE_100:
10100 sensitivity = 100;
10101 break;
10102 case CAM_ISO_MODE_200:
10103 sensitivity = 200;
10104 break;
10105 case CAM_ISO_MODE_400:
10106 sensitivity = 400;
10107 break;
10108 case CAM_ISO_MODE_800:
10109 sensitivity = 800;
10110 break;
10111 case CAM_ISO_MODE_1600:
10112 sensitivity = 1600;
10113 break;
10114 default:
10115 sensitivity = -1;
10116 break;
10117 }
10118 return sensitivity;
10119}
10120
10121/*===========================================================================
10122 * FUNCTION : getCamInfo
10123 *
10124 * DESCRIPTION: query camera capabilities
10125 *
10126 * PARAMETERS :
10127 * @cameraId : camera Id
10128 * @info : camera info struct to be filled in with camera capabilities
10129 *
10130 * RETURN : int type of status
10131 * NO_ERROR -- success
10132 * none-zero failure code
10133 *==========================================================================*/
10134int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10135 struct camera_info *info)
10136{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010137 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010138 int rc = 0;
10139
10140 pthread_mutex_lock(&gCamLock);
10141 if (NULL == gCamCapability[cameraId]) {
10142 rc = initCapabilities(cameraId);
10143 if (rc < 0) {
10144 pthread_mutex_unlock(&gCamLock);
10145 return rc;
10146 }
10147 }
10148
10149 if (NULL == gStaticMetadata[cameraId]) {
10150 rc = initStaticMetadata(cameraId);
10151 if (rc < 0) {
10152 pthread_mutex_unlock(&gCamLock);
10153 return rc;
10154 }
10155 }
10156
10157 switch(gCamCapability[cameraId]->position) {
10158 case CAM_POSITION_BACK:
10159 case CAM_POSITION_BACK_AUX:
10160 info->facing = CAMERA_FACING_BACK;
10161 break;
10162
10163 case CAM_POSITION_FRONT:
10164 case CAM_POSITION_FRONT_AUX:
10165 info->facing = CAMERA_FACING_FRONT;
10166 break;
10167
10168 default:
10169 LOGE("Unknown position type %d for camera id:%d",
10170 gCamCapability[cameraId]->position, cameraId);
10171 rc = -1;
10172 break;
10173 }
10174
10175
10176 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010177#ifndef USE_HAL_3_3
10178 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10179#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010180 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010181#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010182 info->static_camera_characteristics = gStaticMetadata[cameraId];
10183
10184 //For now assume both cameras can operate independently.
10185 info->conflicting_devices = NULL;
10186 info->conflicting_devices_length = 0;
10187
10188 //resource cost is 100 * MIN(1.0, m/M),
10189 //where m is throughput requirement with maximum stream configuration
10190 //and M is CPP maximum throughput.
10191 float max_fps = 0.0;
10192 for (uint32_t i = 0;
10193 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10194 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10195 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10196 }
10197 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10198 gCamCapability[cameraId]->active_array_size.width *
10199 gCamCapability[cameraId]->active_array_size.height * max_fps /
10200 gCamCapability[cameraId]->max_pixel_bandwidth;
10201 info->resource_cost = 100 * MIN(1.0, ratio);
10202 LOGI("camera %d resource cost is %d", cameraId,
10203 info->resource_cost);
10204
10205 pthread_mutex_unlock(&gCamLock);
10206 return rc;
10207}
10208
10209/*===========================================================================
10210 * FUNCTION : translateCapabilityToMetadata
10211 *
10212 * DESCRIPTION: translate the capability into camera_metadata_t
10213 *
10214 * PARAMETERS : type of the request
10215 *
10216 *
10217 * RETURN : success: camera_metadata_t*
10218 * failure: NULL
10219 *
10220 *==========================================================================*/
10221camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10222{
10223 if (mDefaultMetadata[type] != NULL) {
10224 return mDefaultMetadata[type];
10225 }
10226 //first time we are handling this request
10227 //fill up the metadata structure using the wrapper class
10228 CameraMetadata settings;
10229 //translate from cam_capability_t to camera_metadata_tag_t
10230 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10231 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10232 int32_t defaultRequestID = 0;
10233 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10234
10235 /* OIS disable */
10236 char ois_prop[PROPERTY_VALUE_MAX];
10237 memset(ois_prop, 0, sizeof(ois_prop));
10238 property_get("persist.camera.ois.disable", ois_prop, "0");
10239 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10240
10241 /* Force video to use OIS */
10242 char videoOisProp[PROPERTY_VALUE_MAX];
10243 memset(videoOisProp, 0, sizeof(videoOisProp));
10244 property_get("persist.camera.ois.video", videoOisProp, "1");
10245 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010246
10247 // Hybrid AE enable/disable
10248 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10249 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10250 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10251 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10252
Thierry Strudel3d639192016-09-09 11:52:26 -070010253 uint8_t controlIntent = 0;
10254 uint8_t focusMode;
10255 uint8_t vsMode;
10256 uint8_t optStabMode;
10257 uint8_t cacMode;
10258 uint8_t edge_mode;
10259 uint8_t noise_red_mode;
10260 uint8_t tonemap_mode;
10261 bool highQualityModeEntryAvailable = FALSE;
10262 bool fastModeEntryAvailable = FALSE;
10263 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10264 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010265
Thierry Strudel3d639192016-09-09 11:52:26 -070010266 switch (type) {
10267 case CAMERA3_TEMPLATE_PREVIEW:
10268 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10269 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10270 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10271 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10272 edge_mode = ANDROID_EDGE_MODE_FAST;
10273 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10274 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10275 break;
10276 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10277 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10278 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10279 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10280 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10281 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10282 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10283 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10284 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10285 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10286 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10287 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10288 highQualityModeEntryAvailable = TRUE;
10289 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10290 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10291 fastModeEntryAvailable = TRUE;
10292 }
10293 }
10294 if (highQualityModeEntryAvailable) {
10295 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10296 } else if (fastModeEntryAvailable) {
10297 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10298 }
10299 break;
10300 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10301 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10302 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10303 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010304 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10305 edge_mode = ANDROID_EDGE_MODE_FAST;
10306 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10307 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10308 if (forceVideoOis)
10309 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10310 break;
10311 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10312 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10313 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10314 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010315 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10316 edge_mode = ANDROID_EDGE_MODE_FAST;
10317 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10318 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10319 if (forceVideoOis)
10320 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10321 break;
10322 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10323 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10324 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10325 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10326 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10327 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10328 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10329 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10330 break;
10331 case CAMERA3_TEMPLATE_MANUAL:
10332 edge_mode = ANDROID_EDGE_MODE_FAST;
10333 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10334 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10335 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10336 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10337 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10338 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10339 break;
10340 default:
10341 edge_mode = ANDROID_EDGE_MODE_FAST;
10342 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10343 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10344 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10345 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10346 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10347 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10348 break;
10349 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010350 // Set CAC to OFF if underlying device doesn't support
10351 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10352 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10353 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010354 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10355 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10356 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10357 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10358 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10359 }
10360 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
10361
10362 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10363 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10364 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10365 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10366 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10367 || ois_disable)
10368 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10369 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
10370
10371 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10372 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10373
10374 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10375 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10376
10377 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10378 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10379
10380 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10381 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10382
10383 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10384 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10385
10386 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10387 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10388
10389 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10390 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10391
10392 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10393 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10394
10395 /*flash*/
10396 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10397 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10398
10399 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10400 settings.update(ANDROID_FLASH_FIRING_POWER,
10401 &flashFiringLevel, 1);
10402
10403 /* lens */
10404 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10405 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10406
10407 if (gCamCapability[mCameraId]->filter_densities_count) {
10408 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10409 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10410 gCamCapability[mCameraId]->filter_densities_count);
10411 }
10412
10413 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10414 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10415
10416 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
10417 float default_focus_distance = 0;
10418 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
10419 }
10420
10421 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10422 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10423
10424 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10425 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10426
10427 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10428 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10429
10430 /* face detection (default to OFF) */
10431 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10432 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10433
Thierry Strudel54dc9782017-02-15 12:12:10 -080010434 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10435 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010436
10437 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10438 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10439
10440 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10441 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10442
10443 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10444 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
10445
10446 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10447 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10448
10449 /* Exposure time(Update the Min Exposure Time)*/
10450 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10451 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10452
10453 /* frame duration */
10454 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10455 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10456
10457 /* sensitivity */
10458 static const int32_t default_sensitivity = 100;
10459 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010460#ifndef USE_HAL_3_3
10461 static const int32_t default_isp_sensitivity =
10462 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10463 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10464#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010465
10466 /*edge mode*/
10467 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10468
10469 /*noise reduction mode*/
10470 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10471
10472 /*color correction mode*/
10473 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10474 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10475
10476 /*transform matrix mode*/
10477 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10478
10479 int32_t scaler_crop_region[4];
10480 scaler_crop_region[0] = 0;
10481 scaler_crop_region[1] = 0;
10482 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10483 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10484 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10485
10486 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10487 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10488
10489 /*focus distance*/
10490 float focus_distance = 0.0;
10491 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10492
10493 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010494 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010495 float max_range = 0.0;
10496 float max_fixed_fps = 0.0;
10497 int32_t fps_range[2] = {0, 0};
10498 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10499 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010500 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10501 TEMPLATE_MAX_PREVIEW_FPS) {
10502 continue;
10503 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010504 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10505 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10506 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10507 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10508 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10509 if (range > max_range) {
10510 fps_range[0] =
10511 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10512 fps_range[1] =
10513 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10514 max_range = range;
10515 }
10516 } else {
10517 if (range < 0.01 && max_fixed_fps <
10518 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10519 fps_range[0] =
10520 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10521 fps_range[1] =
10522 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10523 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10524 }
10525 }
10526 }
10527 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10528
10529 /*precapture trigger*/
10530 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10531 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10532
10533 /*af trigger*/
10534 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10535 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10536
10537 /* ae & af regions */
10538 int32_t active_region[] = {
10539 gCamCapability[mCameraId]->active_array_size.left,
10540 gCamCapability[mCameraId]->active_array_size.top,
10541 gCamCapability[mCameraId]->active_array_size.left +
10542 gCamCapability[mCameraId]->active_array_size.width,
10543 gCamCapability[mCameraId]->active_array_size.top +
10544 gCamCapability[mCameraId]->active_array_size.height,
10545 0};
10546 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10547 sizeof(active_region) / sizeof(active_region[0]));
10548 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10549 sizeof(active_region) / sizeof(active_region[0]));
10550
10551 /* black level lock */
10552 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10553 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10554
10555 /* lens shading map mode */
10556 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10557 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10558 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10559 }
10560 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10561
10562 //special defaults for manual template
10563 if (type == CAMERA3_TEMPLATE_MANUAL) {
10564 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10565 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10566
10567 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10568 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10569
10570 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10571 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10572
10573 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10574 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10575
10576 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10577 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10578
10579 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10580 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10581 }
10582
10583
10584 /* TNR
10585 * We'll use this location to determine which modes TNR will be set.
10586 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10587 * This is not to be confused with linking on a per stream basis that decision
10588 * is still on per-session basis and will be handled as part of config stream
10589 */
10590 uint8_t tnr_enable = 0;
10591
10592 if (m_bTnrPreview || m_bTnrVideo) {
10593
10594 switch (type) {
10595 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10596 tnr_enable = 1;
10597 break;
10598
10599 default:
10600 tnr_enable = 0;
10601 break;
10602 }
10603
10604 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10605 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10606 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10607
10608 LOGD("TNR:%d with process plate %d for template:%d",
10609 tnr_enable, tnr_process_type, type);
10610 }
10611
10612 //Update Link tags to default
10613 int32_t sync_type = CAM_TYPE_STANDALONE;
10614 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10615
10616 int32_t is_main = 0; //this doesn't matter as app should overwrite
10617 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10618
10619 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10620
10621 /* CDS default */
10622 char prop[PROPERTY_VALUE_MAX];
10623 memset(prop, 0, sizeof(prop));
10624 property_get("persist.camera.CDS", prop, "Auto");
10625 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10626 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10627 if (CAM_CDS_MODE_MAX == cds_mode) {
10628 cds_mode = CAM_CDS_MODE_AUTO;
10629 }
10630
10631 /* Disabling CDS in templates which have TNR enabled*/
10632 if (tnr_enable)
10633 cds_mode = CAM_CDS_MODE_OFF;
10634
10635 int32_t mode = cds_mode;
10636 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010637
Thierry Strudel269c81a2016-10-12 12:13:59 -070010638 /* Manual Convergence AEC Speed is disabled by default*/
10639 float default_aec_speed = 0;
10640 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10641
10642 /* Manual Convergence AWB Speed is disabled by default*/
10643 float default_awb_speed = 0;
10644 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10645
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010646 // Set instant AEC to normal convergence by default
10647 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10648 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10649
Shuzhen Wang19463d72016-03-08 11:09:52 -080010650 /* hybrid ae */
10651 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10652
Thierry Strudel3d639192016-09-09 11:52:26 -070010653 mDefaultMetadata[type] = settings.release();
10654
10655 return mDefaultMetadata[type];
10656}
10657
10658/*===========================================================================
10659 * FUNCTION : setFrameParameters
10660 *
10661 * DESCRIPTION: set parameters per frame as requested in the metadata from
10662 * framework
10663 *
10664 * PARAMETERS :
10665 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010666 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010667 * @blob_request: Whether this request is a blob request or not
10668 *
10669 * RETURN : success: NO_ERROR
10670 * failure:
10671 *==========================================================================*/
10672int QCamera3HardwareInterface::setFrameParameters(
10673 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010674 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010675 int blob_request,
10676 uint32_t snapshotStreamId)
10677{
10678 /*translate from camera_metadata_t type to parm_type_t*/
10679 int rc = 0;
10680 int32_t hal_version = CAM_HAL_V3;
10681
10682 clear_metadata_buffer(mParameters);
10683 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10684 LOGE("Failed to set hal version in the parameters");
10685 return BAD_VALUE;
10686 }
10687
10688 /*we need to update the frame number in the parameters*/
10689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10690 request->frame_number)) {
10691 LOGE("Failed to set the frame number in the parameters");
10692 return BAD_VALUE;
10693 }
10694
10695 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010697 LOGE("Failed to set stream type mask in the parameters");
10698 return BAD_VALUE;
10699 }
10700
10701 if (mUpdateDebugLevel) {
10702 uint32_t dummyDebugLevel = 0;
10703 /* The value of dummyDebugLevel is irrelavent. On
10704 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10706 dummyDebugLevel)) {
10707 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10708 return BAD_VALUE;
10709 }
10710 mUpdateDebugLevel = false;
10711 }
10712
10713 if(request->settings != NULL){
10714 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10715 if (blob_request)
10716 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10717 }
10718
10719 return rc;
10720}
10721
10722/*===========================================================================
10723 * FUNCTION : setReprocParameters
10724 *
10725 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10726 * return it.
10727 *
10728 * PARAMETERS :
10729 * @request : request that needs to be serviced
10730 *
10731 * RETURN : success: NO_ERROR
10732 * failure:
10733 *==========================================================================*/
10734int32_t QCamera3HardwareInterface::setReprocParameters(
10735 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10736 uint32_t snapshotStreamId)
10737{
10738 /*translate from camera_metadata_t type to parm_type_t*/
10739 int rc = 0;
10740
10741 if (NULL == request->settings){
10742 LOGE("Reprocess settings cannot be NULL");
10743 return BAD_VALUE;
10744 }
10745
10746 if (NULL == reprocParam) {
10747 LOGE("Invalid reprocessing metadata buffer");
10748 return BAD_VALUE;
10749 }
10750 clear_metadata_buffer(reprocParam);
10751
10752 /*we need to update the frame number in the parameters*/
10753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10754 request->frame_number)) {
10755 LOGE("Failed to set the frame number in the parameters");
10756 return BAD_VALUE;
10757 }
10758
10759 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10760 if (rc < 0) {
10761 LOGE("Failed to translate reproc request");
10762 return rc;
10763 }
10764
10765 CameraMetadata frame_settings;
10766 frame_settings = request->settings;
10767 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10768 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10769 int32_t *crop_count =
10770 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10771 int32_t *crop_data =
10772 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10773 int32_t *roi_map =
10774 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10775 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10776 cam_crop_data_t crop_meta;
10777 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10778 crop_meta.num_of_streams = 1;
10779 crop_meta.crop_info[0].crop.left = crop_data[0];
10780 crop_meta.crop_info[0].crop.top = crop_data[1];
10781 crop_meta.crop_info[0].crop.width = crop_data[2];
10782 crop_meta.crop_info[0].crop.height = crop_data[3];
10783
10784 crop_meta.crop_info[0].roi_map.left =
10785 roi_map[0];
10786 crop_meta.crop_info[0].roi_map.top =
10787 roi_map[1];
10788 crop_meta.crop_info[0].roi_map.width =
10789 roi_map[2];
10790 crop_meta.crop_info[0].roi_map.height =
10791 roi_map[3];
10792
10793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10794 rc = BAD_VALUE;
10795 }
10796 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10797 request->input_buffer->stream,
10798 crop_meta.crop_info[0].crop.left,
10799 crop_meta.crop_info[0].crop.top,
10800 crop_meta.crop_info[0].crop.width,
10801 crop_meta.crop_info[0].crop.height);
10802 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10803 request->input_buffer->stream,
10804 crop_meta.crop_info[0].roi_map.left,
10805 crop_meta.crop_info[0].roi_map.top,
10806 crop_meta.crop_info[0].roi_map.width,
10807 crop_meta.crop_info[0].roi_map.height);
10808 } else {
10809 LOGE("Invalid reprocess crop count %d!", *crop_count);
10810 }
10811 } else {
10812 LOGE("No crop data from matching output stream");
10813 }
10814
10815 /* These settings are not needed for regular requests so handle them specially for
10816 reprocess requests; information needed for EXIF tags */
10817 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10818 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10819 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10820 if (NAME_NOT_FOUND != val) {
10821 uint32_t flashMode = (uint32_t)val;
10822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10823 rc = BAD_VALUE;
10824 }
10825 } else {
10826 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10827 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10828 }
10829 } else {
10830 LOGH("No flash mode in reprocess settings");
10831 }
10832
10833 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10834 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10836 rc = BAD_VALUE;
10837 }
10838 } else {
10839 LOGH("No flash state in reprocess settings");
10840 }
10841
10842 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10843 uint8_t *reprocessFlags =
10844 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10846 *reprocessFlags)) {
10847 rc = BAD_VALUE;
10848 }
10849 }
10850
Thierry Strudel54dc9782017-02-15 12:12:10 -080010851 // Add exif debug data to internal metadata
10852 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
10853 mm_jpeg_debug_exif_params_t *debug_params =
10854 (mm_jpeg_debug_exif_params_t *)frame_settings.find
10855 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
10856 // AE
10857 if (debug_params->ae_debug_params_valid == TRUE) {
10858 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
10859 debug_params->ae_debug_params);
10860 }
10861 // AWB
10862 if (debug_params->awb_debug_params_valid == TRUE) {
10863 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
10864 debug_params->awb_debug_params);
10865 }
10866 // AF
10867 if (debug_params->af_debug_params_valid == TRUE) {
10868 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
10869 debug_params->af_debug_params);
10870 }
10871 // ASD
10872 if (debug_params->asd_debug_params_valid == TRUE) {
10873 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
10874 debug_params->asd_debug_params);
10875 }
10876 // Stats
10877 if (debug_params->stats_debug_params_valid == TRUE) {
10878 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
10879 debug_params->stats_debug_params);
10880 }
10881 // BE Stats
10882 if (debug_params->bestats_debug_params_valid == TRUE) {
10883 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
10884 debug_params->bestats_debug_params);
10885 }
10886 // BHIST
10887 if (debug_params->bhist_debug_params_valid == TRUE) {
10888 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
10889 debug_params->bhist_debug_params);
10890 }
10891 // 3A Tuning
10892 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
10893 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
10894 debug_params->q3a_tuning_debug_params);
10895 }
10896 }
10897
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010898 // Add metadata which reprocess needs
10899 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10900 cam_reprocess_info_t *repro_info =
10901 (cam_reprocess_info_t *)frame_settings.find
10902 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010903 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010904 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010905 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010906 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010907 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010908 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010909 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010910 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010911 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010912 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010913 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010914 repro_info->pipeline_flip);
10915 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10916 repro_info->af_roi);
10917 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10918 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010919 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10920 CAM_INTF_PARM_ROTATION metadata then has been added in
10921 translateToHalMetadata. HAL need to keep this new rotation
10922 metadata. Otherwise, the old rotation info saved in the vendor tag
10923 would be used */
10924 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10925 CAM_INTF_PARM_ROTATION, reprocParam) {
10926 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10927 } else {
10928 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010929 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010930 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010931 }
10932
10933 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10934 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10935 roi.width and roi.height would be the final JPEG size.
10936 For now, HAL only checks this for reprocess request */
10937 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10938 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10939 uint8_t *enable =
10940 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10941 if (*enable == TRUE) {
10942 int32_t *crop_data =
10943 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10944 cam_stream_crop_info_t crop_meta;
10945 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10946 crop_meta.stream_id = 0;
10947 crop_meta.crop.left = crop_data[0];
10948 crop_meta.crop.top = crop_data[1];
10949 crop_meta.crop.width = crop_data[2];
10950 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010951 // The JPEG crop roi should match cpp output size
10952 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10953 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10954 crop_meta.roi_map.left = 0;
10955 crop_meta.roi_map.top = 0;
10956 crop_meta.roi_map.width = cpp_crop->crop.width;
10957 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010958 }
10959 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10960 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010961 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010962 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010963 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10964 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010965 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010966 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10967
10968 // Add JPEG scale information
10969 cam_dimension_t scale_dim;
10970 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10971 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10972 int32_t *roi =
10973 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10974 scale_dim.width = roi[2];
10975 scale_dim.height = roi[3];
10976 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10977 scale_dim);
10978 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10979 scale_dim.width, scale_dim.height, mCameraId);
10980 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 }
10982 }
10983
10984 return rc;
10985}
10986
10987/*===========================================================================
10988 * FUNCTION : saveRequestSettings
10989 *
10990 * DESCRIPTION: Add any settings that might have changed to the request settings
10991 * and save the settings to be applied on the frame
10992 *
10993 * PARAMETERS :
10994 * @jpegMetadata : the extracted and/or modified jpeg metadata
10995 * @request : request with initial settings
10996 *
10997 * RETURN :
10998 * camera_metadata_t* : pointer to the saved request settings
10999 *==========================================================================*/
11000camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11001 const CameraMetadata &jpegMetadata,
11002 camera3_capture_request_t *request)
11003{
11004 camera_metadata_t *resultMetadata;
11005 CameraMetadata camMetadata;
11006 camMetadata = request->settings;
11007
11008 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11009 int32_t thumbnail_size[2];
11010 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11011 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11012 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11013 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11014 }
11015
11016 if (request->input_buffer != NULL) {
11017 uint8_t reprocessFlags = 1;
11018 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11019 (uint8_t*)&reprocessFlags,
11020 sizeof(reprocessFlags));
11021 }
11022
11023 resultMetadata = camMetadata.release();
11024 return resultMetadata;
11025}
11026
11027/*===========================================================================
11028 * FUNCTION : setHalFpsRange
11029 *
11030 * DESCRIPTION: set FPS range parameter
11031 *
11032 *
11033 * PARAMETERS :
11034 * @settings : Metadata from framework
11035 * @hal_metadata: Metadata buffer
11036 *
11037 *
11038 * RETURN : success: NO_ERROR
11039 * failure:
11040 *==========================================================================*/
11041int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11042 metadata_buffer_t *hal_metadata)
11043{
11044 int32_t rc = NO_ERROR;
11045 cam_fps_range_t fps_range;
11046 fps_range.min_fps = (float)
11047 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11048 fps_range.max_fps = (float)
11049 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11050 fps_range.video_min_fps = fps_range.min_fps;
11051 fps_range.video_max_fps = fps_range.max_fps;
11052
11053 LOGD("aeTargetFpsRange fps: [%f %f]",
11054 fps_range.min_fps, fps_range.max_fps);
11055 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11056 * follows:
11057 * ---------------------------------------------------------------|
11058 * Video stream is absent in configure_streams |
11059 * (Camcorder preview before the first video record |
11060 * ---------------------------------------------------------------|
11061 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11062 * | | | vid_min/max_fps|
11063 * ---------------------------------------------------------------|
11064 * NO | [ 30, 240] | 240 | [240, 240] |
11065 * |-------------|-------------|----------------|
11066 * | [240, 240] | 240 | [240, 240] |
11067 * ---------------------------------------------------------------|
11068 * Video stream is present in configure_streams |
11069 * ---------------------------------------------------------------|
11070 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11071 * | | | vid_min/max_fps|
11072 * ---------------------------------------------------------------|
11073 * NO | [ 30, 240] | 240 | [240, 240] |
11074 * (camcorder prev |-------------|-------------|----------------|
11075 * after video rec | [240, 240] | 240 | [240, 240] |
11076 * is stopped) | | | |
11077 * ---------------------------------------------------------------|
11078 * YES | [ 30, 240] | 240 | [240, 240] |
11079 * |-------------|-------------|----------------|
11080 * | [240, 240] | 240 | [240, 240] |
11081 * ---------------------------------------------------------------|
11082 * When Video stream is absent in configure_streams,
11083 * preview fps = sensor_fps / batchsize
11084 * Eg: for 240fps at batchSize 4, preview = 60fps
11085 * for 120fps at batchSize 4, preview = 30fps
11086 *
11087 * When video stream is present in configure_streams, preview fps is as per
11088 * the ratio of preview buffers to video buffers requested in process
11089 * capture request
11090 */
11091 mBatchSize = 0;
11092 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11093 fps_range.min_fps = fps_range.video_max_fps;
11094 fps_range.video_min_fps = fps_range.video_max_fps;
11095 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11096 fps_range.max_fps);
11097 if (NAME_NOT_FOUND != val) {
11098 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11099 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11100 return BAD_VALUE;
11101 }
11102
11103 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11104 /* If batchmode is currently in progress and the fps changes,
11105 * set the flag to restart the sensor */
11106 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11107 (mHFRVideoFps != fps_range.max_fps)) {
11108 mNeedSensorRestart = true;
11109 }
11110 mHFRVideoFps = fps_range.max_fps;
11111 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11112 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11113 mBatchSize = MAX_HFR_BATCH_SIZE;
11114 }
11115 }
11116 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11117
11118 }
11119 } else {
11120 /* HFR mode is session param in backend/ISP. This should be reset when
11121 * in non-HFR mode */
11122 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11124 return BAD_VALUE;
11125 }
11126 }
11127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11128 return BAD_VALUE;
11129 }
11130 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11131 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11132 return rc;
11133}
11134
11135/*===========================================================================
11136 * FUNCTION : translateToHalMetadata
11137 *
11138 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11139 *
11140 *
11141 * PARAMETERS :
11142 * @request : request sent from framework
11143 *
11144 *
11145 * RETURN : success: NO_ERROR
11146 * failure:
11147 *==========================================================================*/
11148int QCamera3HardwareInterface::translateToHalMetadata
11149 (const camera3_capture_request_t *request,
11150 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011151 uint32_t snapshotStreamId) {
11152 if (request == nullptr || hal_metadata == nullptr) {
11153 return BAD_VALUE;
11154 }
11155
11156 int64_t minFrameDuration = getMinFrameDuration(request);
11157
11158 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11159 minFrameDuration);
11160}
11161
11162int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11163 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11164 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11165
Thierry Strudel3d639192016-09-09 11:52:26 -070011166 int rc = 0;
11167 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011168 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011169
11170 /* Do not change the order of the following list unless you know what you are
11171 * doing.
11172 * The order is laid out in such a way that parameters in the front of the table
11173 * may be used to override the parameters later in the table. Examples are:
11174 * 1. META_MODE should precede AEC/AWB/AF MODE
11175 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11176 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11177 * 4. Any mode should precede it's corresponding settings
11178 */
11179 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11180 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11182 rc = BAD_VALUE;
11183 }
11184 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11185 if (rc != NO_ERROR) {
11186 LOGE("extractSceneMode failed");
11187 }
11188 }
11189
11190 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11191 uint8_t fwk_aeMode =
11192 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11193 uint8_t aeMode;
11194 int32_t redeye;
11195
11196 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11197 aeMode = CAM_AE_MODE_OFF;
11198 } else {
11199 aeMode = CAM_AE_MODE_ON;
11200 }
11201 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11202 redeye = 1;
11203 } else {
11204 redeye = 0;
11205 }
11206
11207 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11208 fwk_aeMode);
11209 if (NAME_NOT_FOUND != val) {
11210 int32_t flashMode = (int32_t)val;
11211 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11212 }
11213
11214 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11216 rc = BAD_VALUE;
11217 }
11218 }
11219
11220 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11221 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11222 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11223 fwk_whiteLevel);
11224 if (NAME_NOT_FOUND != val) {
11225 uint8_t whiteLevel = (uint8_t)val;
11226 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11227 rc = BAD_VALUE;
11228 }
11229 }
11230 }
11231
11232 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11233 uint8_t fwk_cacMode =
11234 frame_settings.find(
11235 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11236 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11237 fwk_cacMode);
11238 if (NAME_NOT_FOUND != val) {
11239 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11240 bool entryAvailable = FALSE;
11241 // Check whether Frameworks set CAC mode is supported in device or not
11242 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11243 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11244 entryAvailable = TRUE;
11245 break;
11246 }
11247 }
11248 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11249 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11250 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11251 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11252 if (entryAvailable == FALSE) {
11253 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11254 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11255 } else {
11256 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11257 // High is not supported and so set the FAST as spec say's underlying
11258 // device implementation can be the same for both modes.
11259 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11260 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11261 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11262 // in order to avoid the fps drop due to high quality
11263 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11264 } else {
11265 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11266 }
11267 }
11268 }
11269 LOGD("Final cacMode is %d", cacMode);
11270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11271 rc = BAD_VALUE;
11272 }
11273 } else {
11274 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11275 }
11276 }
11277
11278 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
11279 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
11280 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11281 fwk_focusMode);
11282 if (NAME_NOT_FOUND != val) {
11283 uint8_t focusMode = (uint8_t)val;
11284 LOGD("set focus mode %d", focusMode);
11285 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11286 rc = BAD_VALUE;
11287 }
11288 }
11289 }
11290
11291 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
11292 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11294 focalDistance)) {
11295 rc = BAD_VALUE;
11296 }
11297 }
11298
11299 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11300 uint8_t fwk_antibandingMode =
11301 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11302 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11303 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11304 if (NAME_NOT_FOUND != val) {
11305 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011306 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11307 if (m60HzZone) {
11308 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11309 } else {
11310 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11311 }
11312 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11314 hal_antibandingMode)) {
11315 rc = BAD_VALUE;
11316 }
11317 }
11318 }
11319
11320 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11321 int32_t expCompensation = frame_settings.find(
11322 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11323 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11324 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11325 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11326 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011327 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11329 expCompensation)) {
11330 rc = BAD_VALUE;
11331 }
11332 }
11333
11334 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11335 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11337 rc = BAD_VALUE;
11338 }
11339 }
11340 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11341 rc = setHalFpsRange(frame_settings, hal_metadata);
11342 if (rc != NO_ERROR) {
11343 LOGE("setHalFpsRange failed");
11344 }
11345 }
11346
11347 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11348 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11350 rc = BAD_VALUE;
11351 }
11352 }
11353
11354 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11355 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11356 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11357 fwk_effectMode);
11358 if (NAME_NOT_FOUND != val) {
11359 uint8_t effectMode = (uint8_t)val;
11360 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11361 rc = BAD_VALUE;
11362 }
11363 }
11364 }
11365
11366 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11367 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11369 colorCorrectMode)) {
11370 rc = BAD_VALUE;
11371 }
11372 }
11373
11374 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11375 cam_color_correct_gains_t colorCorrectGains;
11376 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11377 colorCorrectGains.gains[i] =
11378 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11379 }
11380 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11381 colorCorrectGains)) {
11382 rc = BAD_VALUE;
11383 }
11384 }
11385
11386 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11387 cam_color_correct_matrix_t colorCorrectTransform;
11388 cam_rational_type_t transform_elem;
11389 size_t num = 0;
11390 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11391 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11392 transform_elem.numerator =
11393 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11394 transform_elem.denominator =
11395 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11396 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11397 num++;
11398 }
11399 }
11400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11401 colorCorrectTransform)) {
11402 rc = BAD_VALUE;
11403 }
11404 }
11405
11406 cam_trigger_t aecTrigger;
11407 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11408 aecTrigger.trigger_id = -1;
11409 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11410 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11411 aecTrigger.trigger =
11412 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11413 aecTrigger.trigger_id =
11414 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11415 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11416 aecTrigger)) {
11417 rc = BAD_VALUE;
11418 }
11419 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11420 aecTrigger.trigger, aecTrigger.trigger_id);
11421 }
11422
11423 /*af_trigger must come with a trigger id*/
11424 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11425 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11426 cam_trigger_t af_trigger;
11427 af_trigger.trigger =
11428 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11429 af_trigger.trigger_id =
11430 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11431 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11432 rc = BAD_VALUE;
11433 }
11434 LOGD("AfTrigger: %d AfTriggerID: %d",
11435 af_trigger.trigger, af_trigger.trigger_id);
11436 }
11437
11438 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11439 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11441 rc = BAD_VALUE;
11442 }
11443 }
11444 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11445 cam_edge_application_t edge_application;
11446 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011447
Thierry Strudel3d639192016-09-09 11:52:26 -070011448 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11449 edge_application.sharpness = 0;
11450 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011451 edge_application.sharpness =
11452 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11453 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11454 int32_t sharpness =
11455 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11456 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11457 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11458 LOGD("Setting edge mode sharpness %d", sharpness);
11459 edge_application.sharpness = sharpness;
11460 }
11461 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011462 }
11463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11464 rc = BAD_VALUE;
11465 }
11466 }
11467
11468 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11469 int32_t respectFlashMode = 1;
11470 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11471 uint8_t fwk_aeMode =
11472 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11473 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11474 respectFlashMode = 0;
11475 LOGH("AE Mode controls flash, ignore android.flash.mode");
11476 }
11477 }
11478 if (respectFlashMode) {
11479 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11480 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11481 LOGH("flash mode after mapping %d", val);
11482 // To check: CAM_INTF_META_FLASH_MODE usage
11483 if (NAME_NOT_FOUND != val) {
11484 uint8_t flashMode = (uint8_t)val;
11485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11486 rc = BAD_VALUE;
11487 }
11488 }
11489 }
11490 }
11491
11492 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11493 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11494 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11495 rc = BAD_VALUE;
11496 }
11497 }
11498
11499 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11500 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11501 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11502 flashFiringTime)) {
11503 rc = BAD_VALUE;
11504 }
11505 }
11506
11507 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11508 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11510 hotPixelMode)) {
11511 rc = BAD_VALUE;
11512 }
11513 }
11514
11515 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11516 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11517 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11518 lensAperture)) {
11519 rc = BAD_VALUE;
11520 }
11521 }
11522
11523 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11524 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11525 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11526 filterDensity)) {
11527 rc = BAD_VALUE;
11528 }
11529 }
11530
11531 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11532 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11533 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11534 focalLength)) {
11535 rc = BAD_VALUE;
11536 }
11537 }
11538
11539 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11540 uint8_t optStabMode =
11541 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11543 optStabMode)) {
11544 rc = BAD_VALUE;
11545 }
11546 }
11547
11548 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11549 uint8_t videoStabMode =
11550 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11551 LOGD("videoStabMode from APP = %d", videoStabMode);
11552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11553 videoStabMode)) {
11554 rc = BAD_VALUE;
11555 }
11556 }
11557
11558
11559 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11560 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11561 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11562 noiseRedMode)) {
11563 rc = BAD_VALUE;
11564 }
11565 }
11566
11567 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11568 float reprocessEffectiveExposureFactor =
11569 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11571 reprocessEffectiveExposureFactor)) {
11572 rc = BAD_VALUE;
11573 }
11574 }
11575
11576 cam_crop_region_t scalerCropRegion;
11577 bool scalerCropSet = false;
11578 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11579 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11580 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11581 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11582 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11583
11584 // Map coordinate system from active array to sensor output.
11585 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11586 scalerCropRegion.width, scalerCropRegion.height);
11587
11588 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11589 scalerCropRegion)) {
11590 rc = BAD_VALUE;
11591 }
11592 scalerCropSet = true;
11593 }
11594
11595 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11596 int64_t sensorExpTime =
11597 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11598 LOGD("setting sensorExpTime %lld", sensorExpTime);
11599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11600 sensorExpTime)) {
11601 rc = BAD_VALUE;
11602 }
11603 }
11604
11605 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11606 int64_t sensorFrameDuration =
11607 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011608 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11609 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11610 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11611 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11612 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11613 sensorFrameDuration)) {
11614 rc = BAD_VALUE;
11615 }
11616 }
11617
11618 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11619 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11620 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11621 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11622 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11623 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11624 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11626 sensorSensitivity)) {
11627 rc = BAD_VALUE;
11628 }
11629 }
11630
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011631#ifndef USE_HAL_3_3
11632 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11633 int32_t ispSensitivity =
11634 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11635 if (ispSensitivity <
11636 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11637 ispSensitivity =
11638 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11639 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11640 }
11641 if (ispSensitivity >
11642 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11643 ispSensitivity =
11644 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11645 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11646 }
11647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11648 ispSensitivity)) {
11649 rc = BAD_VALUE;
11650 }
11651 }
11652#endif
11653
Thierry Strudel3d639192016-09-09 11:52:26 -070011654 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11655 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11657 rc = BAD_VALUE;
11658 }
11659 }
11660
11661 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11662 uint8_t fwk_facedetectMode =
11663 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11664
11665 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11666 fwk_facedetectMode);
11667
11668 if (NAME_NOT_FOUND != val) {
11669 uint8_t facedetectMode = (uint8_t)val;
11670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11671 facedetectMode)) {
11672 rc = BAD_VALUE;
11673 }
11674 }
11675 }
11676
Thierry Strudel54dc9782017-02-15 12:12:10 -080011677 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011679 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070011680 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11681 histogramMode)) {
11682 rc = BAD_VALUE;
11683 }
11684 }
11685
11686 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11687 uint8_t sharpnessMapMode =
11688 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11690 sharpnessMapMode)) {
11691 rc = BAD_VALUE;
11692 }
11693 }
11694
11695 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11696 uint8_t tonemapMode =
11697 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11699 rc = BAD_VALUE;
11700 }
11701 }
11702 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11703 /*All tonemap channels will have the same number of points*/
11704 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11705 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11706 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11707 cam_rgb_tonemap_curves tonemapCurves;
11708 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11709 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11710 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11711 tonemapCurves.tonemap_points_cnt,
11712 CAM_MAX_TONEMAP_CURVE_SIZE);
11713 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11714 }
11715
11716 /* ch0 = G*/
11717 size_t point = 0;
11718 cam_tonemap_curve_t tonemapCurveGreen;
11719 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11720 for (size_t j = 0; j < 2; j++) {
11721 tonemapCurveGreen.tonemap_points[i][j] =
11722 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11723 point++;
11724 }
11725 }
11726 tonemapCurves.curves[0] = tonemapCurveGreen;
11727
11728 /* ch 1 = B */
11729 point = 0;
11730 cam_tonemap_curve_t tonemapCurveBlue;
11731 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11732 for (size_t j = 0; j < 2; j++) {
11733 tonemapCurveBlue.tonemap_points[i][j] =
11734 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11735 point++;
11736 }
11737 }
11738 tonemapCurves.curves[1] = tonemapCurveBlue;
11739
11740 /* ch 2 = R */
11741 point = 0;
11742 cam_tonemap_curve_t tonemapCurveRed;
11743 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11744 for (size_t j = 0; j < 2; j++) {
11745 tonemapCurveRed.tonemap_points[i][j] =
11746 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11747 point++;
11748 }
11749 }
11750 tonemapCurves.curves[2] = tonemapCurveRed;
11751
11752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11753 tonemapCurves)) {
11754 rc = BAD_VALUE;
11755 }
11756 }
11757
11758 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11759 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11760 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11761 captureIntent)) {
11762 rc = BAD_VALUE;
11763 }
11764 }
11765
11766 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11767 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11769 blackLevelLock)) {
11770 rc = BAD_VALUE;
11771 }
11772 }
11773
11774 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11775 uint8_t lensShadingMapMode =
11776 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11778 lensShadingMapMode)) {
11779 rc = BAD_VALUE;
11780 }
11781 }
11782
11783 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11784 cam_area_t roi;
11785 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011786 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011787
11788 // Map coordinate system from active array to sensor output.
11789 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11790 roi.rect.height);
11791
11792 if (scalerCropSet) {
11793 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11794 }
11795 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11796 rc = BAD_VALUE;
11797 }
11798 }
11799
11800 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11801 cam_area_t roi;
11802 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011803 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011804
11805 // Map coordinate system from active array to sensor output.
11806 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11807 roi.rect.height);
11808
11809 if (scalerCropSet) {
11810 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11811 }
11812 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11813 rc = BAD_VALUE;
11814 }
11815 }
11816
11817 // CDS for non-HFR non-video mode
11818 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11819 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11820 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11821 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11822 LOGE("Invalid CDS mode %d!", *fwk_cds);
11823 } else {
11824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11825 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11826 rc = BAD_VALUE;
11827 }
11828 }
11829 }
11830
Thierry Strudel04e026f2016-10-10 11:27:36 -070011831 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011832 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011833 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011834 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11835 }
11836 if (m_bVideoHdrEnabled)
11837 vhdr = CAM_VIDEO_HDR_MODE_ON;
11838
Thierry Strudel54dc9782017-02-15 12:12:10 -080011839 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
11840
11841 if(vhdr != curr_hdr_state)
11842 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
11843
Mansoor Aftab93a66e52017-01-26 14:58:25 -080011844 rc = setVideoHdrMode(mParameters, vhdr);
11845 if (rc != NO_ERROR) {
11846 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011847 }
11848
11849 //IR
11850 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11851 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11852 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011853 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
11854 uint8_t isIRon = 0;
11855
11856 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011857 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11858 LOGE("Invalid IR mode %d!", fwk_ir);
11859 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011860 if(isIRon != curr_ir_state )
11861 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
11862
Thierry Strudel04e026f2016-10-10 11:27:36 -070011863 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11864 CAM_INTF_META_IR_MODE, fwk_ir)) {
11865 rc = BAD_VALUE;
11866 }
11867 }
11868 }
11869
Thierry Strudel54dc9782017-02-15 12:12:10 -080011870 //Binning Correction Mode
11871 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
11872 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
11873 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
11874 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
11875 || (0 > fwk_binning_correction)) {
11876 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
11877 } else {
11878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11879 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
11880 rc = BAD_VALUE;
11881 }
11882 }
11883 }
11884
Thierry Strudel269c81a2016-10-12 12:13:59 -070011885 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11886 float aec_speed;
11887 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11888 LOGD("AEC Speed :%f", aec_speed);
11889 if ( aec_speed < 0 ) {
11890 LOGE("Invalid AEC mode %f!", aec_speed);
11891 } else {
11892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11893 aec_speed)) {
11894 rc = BAD_VALUE;
11895 }
11896 }
11897 }
11898
11899 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11900 float awb_speed;
11901 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11902 LOGD("AWB Speed :%f", awb_speed);
11903 if ( awb_speed < 0 ) {
11904 LOGE("Invalid AWB mode %f!", awb_speed);
11905 } else {
11906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11907 awb_speed)) {
11908 rc = BAD_VALUE;
11909 }
11910 }
11911 }
11912
Thierry Strudel3d639192016-09-09 11:52:26 -070011913 // TNR
11914 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11915 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11916 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011917 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070011918 cam_denoise_param_t tnr;
11919 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11920 tnr.process_plates =
11921 (cam_denoise_process_type_t)frame_settings.find(
11922 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11923 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011924
11925 if(b_TnrRequested != curr_tnr_state)
11926 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
11927
Thierry Strudel3d639192016-09-09 11:52:26 -070011928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11929 rc = BAD_VALUE;
11930 }
11931 }
11932
Thierry Strudel54dc9782017-02-15 12:12:10 -080011933 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011934 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080011935 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11937 *exposure_metering_mode)) {
11938 rc = BAD_VALUE;
11939 }
11940 }
11941
Thierry Strudel3d639192016-09-09 11:52:26 -070011942 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11943 int32_t fwk_testPatternMode =
11944 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11945 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11946 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11947
11948 if (NAME_NOT_FOUND != testPatternMode) {
11949 cam_test_pattern_data_t testPatternData;
11950 memset(&testPatternData, 0, sizeof(testPatternData));
11951 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11952 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11953 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11954 int32_t *fwk_testPatternData =
11955 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11956 testPatternData.r = fwk_testPatternData[0];
11957 testPatternData.b = fwk_testPatternData[3];
11958 switch (gCamCapability[mCameraId]->color_arrangement) {
11959 case CAM_FILTER_ARRANGEMENT_RGGB:
11960 case CAM_FILTER_ARRANGEMENT_GRBG:
11961 testPatternData.gr = fwk_testPatternData[1];
11962 testPatternData.gb = fwk_testPatternData[2];
11963 break;
11964 case CAM_FILTER_ARRANGEMENT_GBRG:
11965 case CAM_FILTER_ARRANGEMENT_BGGR:
11966 testPatternData.gr = fwk_testPatternData[2];
11967 testPatternData.gb = fwk_testPatternData[1];
11968 break;
11969 default:
11970 LOGE("color arrangement %d is not supported",
11971 gCamCapability[mCameraId]->color_arrangement);
11972 break;
11973 }
11974 }
11975 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11976 testPatternData)) {
11977 rc = BAD_VALUE;
11978 }
11979 } else {
11980 LOGE("Invalid framework sensor test pattern mode %d",
11981 fwk_testPatternMode);
11982 }
11983 }
11984
11985 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11986 size_t count = 0;
11987 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11988 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11989 gps_coords.data.d, gps_coords.count, count);
11990 if (gps_coords.count != count) {
11991 rc = BAD_VALUE;
11992 }
11993 }
11994
11995 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11996 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11997 size_t count = 0;
11998 const char *gps_methods_src = (const char *)
11999 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12000 memset(gps_methods, '\0', sizeof(gps_methods));
12001 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12002 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12003 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12004 if (GPS_PROCESSING_METHOD_SIZE != count) {
12005 rc = BAD_VALUE;
12006 }
12007 }
12008
12009 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12010 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12012 gps_timestamp)) {
12013 rc = BAD_VALUE;
12014 }
12015 }
12016
12017 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12018 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12019 cam_rotation_info_t rotation_info;
12020 if (orientation == 0) {
12021 rotation_info.rotation = ROTATE_0;
12022 } else if (orientation == 90) {
12023 rotation_info.rotation = ROTATE_90;
12024 } else if (orientation == 180) {
12025 rotation_info.rotation = ROTATE_180;
12026 } else if (orientation == 270) {
12027 rotation_info.rotation = ROTATE_270;
12028 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012029 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012030 rotation_info.streamId = snapshotStreamId;
12031 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12033 rc = BAD_VALUE;
12034 }
12035 }
12036
12037 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12038 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12039 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12040 rc = BAD_VALUE;
12041 }
12042 }
12043
12044 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12045 uint32_t thumb_quality = (uint32_t)
12046 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12048 thumb_quality)) {
12049 rc = BAD_VALUE;
12050 }
12051 }
12052
12053 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12054 cam_dimension_t dim;
12055 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12056 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12058 rc = BAD_VALUE;
12059 }
12060 }
12061
12062 // Internal metadata
12063 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12064 size_t count = 0;
12065 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12066 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12067 privatedata.data.i32, privatedata.count, count);
12068 if (privatedata.count != count) {
12069 rc = BAD_VALUE;
12070 }
12071 }
12072
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012073 // ISO/Exposure Priority
12074 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12075 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12076 cam_priority_mode_t mode =
12077 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12078 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12079 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12080 use_iso_exp_pty.previewOnly = FALSE;
12081 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12082 use_iso_exp_pty.value = *ptr;
12083
12084 if(CAM_ISO_PRIORITY == mode) {
12085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12086 use_iso_exp_pty)) {
12087 rc = BAD_VALUE;
12088 }
12089 }
12090 else {
12091 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12092 use_iso_exp_pty)) {
12093 rc = BAD_VALUE;
12094 }
12095 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012096
12097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12098 rc = BAD_VALUE;
12099 }
12100 }
12101 } else {
12102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12103 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012104 }
12105 }
12106
12107 // Saturation
12108 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12109 int32_t* use_saturation =
12110 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12112 rc = BAD_VALUE;
12113 }
12114 }
12115
Thierry Strudel3d639192016-09-09 11:52:26 -070012116 // EV step
12117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12118 gCamCapability[mCameraId]->exp_compensation_step)) {
12119 rc = BAD_VALUE;
12120 }
12121
12122 // CDS info
12123 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12124 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12125 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12126
12127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12128 CAM_INTF_META_CDS_DATA, *cdsData)) {
12129 rc = BAD_VALUE;
12130 }
12131 }
12132
Shuzhen Wang19463d72016-03-08 11:09:52 -080012133 // Hybrid AE
12134 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12135 uint8_t *hybrid_ae = (uint8_t *)
12136 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12137
12138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12139 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12140 rc = BAD_VALUE;
12141 }
12142 }
12143
Thierry Strudel3d639192016-09-09 11:52:26 -070012144 return rc;
12145}
12146
12147/*===========================================================================
12148 * FUNCTION : captureResultCb
12149 *
12150 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12151 *
12152 * PARAMETERS :
12153 * @frame : frame information from mm-camera-interface
12154 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12155 * @userdata: userdata
12156 *
12157 * RETURN : NONE
12158 *==========================================================================*/
12159void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12160 camera3_stream_buffer_t *buffer,
12161 uint32_t frame_number, bool isInputBuffer, void *userdata)
12162{
12163 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12164 if (hw == NULL) {
12165 LOGE("Invalid hw %p", hw);
12166 return;
12167 }
12168
12169 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12170 return;
12171}
12172
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012173/*===========================================================================
12174 * FUNCTION : setBufferErrorStatus
12175 *
12176 * DESCRIPTION: Callback handler for channels to report any buffer errors
12177 *
12178 * PARAMETERS :
12179 * @ch : Channel on which buffer error is reported from
12180 * @frame_number : frame number on which buffer error is reported on
12181 * @buffer_status : buffer error status
12182 * @userdata: userdata
12183 *
12184 * RETURN : NONE
12185 *==========================================================================*/
12186void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12187 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12188{
12189 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12190 if (hw == NULL) {
12191 LOGE("Invalid hw %p", hw);
12192 return;
12193 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012194
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012195 hw->setBufferErrorStatus(ch, frame_number, err);
12196 return;
12197}
12198
12199void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12200 uint32_t frameNumber, camera3_buffer_status_t err)
12201{
12202 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12203 pthread_mutex_lock(&mMutex);
12204
12205 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12206 if (req.frame_number != frameNumber)
12207 continue;
12208 for (auto& k : req.mPendingBufferList) {
12209 if(k.stream->priv == ch) {
12210 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12211 }
12212 }
12213 }
12214
12215 pthread_mutex_unlock(&mMutex);
12216 return;
12217}
Thierry Strudel3d639192016-09-09 11:52:26 -070012218/*===========================================================================
12219 * FUNCTION : initialize
12220 *
12221 * DESCRIPTION: Pass framework callback pointers to HAL
12222 *
12223 * PARAMETERS :
12224 *
12225 *
12226 * RETURN : Success : 0
12227 * Failure: -ENODEV
12228 *==========================================================================*/
12229
12230int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12231 const camera3_callback_ops_t *callback_ops)
12232{
12233 LOGD("E");
12234 QCamera3HardwareInterface *hw =
12235 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12236 if (!hw) {
12237 LOGE("NULL camera device");
12238 return -ENODEV;
12239 }
12240
12241 int rc = hw->initialize(callback_ops);
12242 LOGD("X");
12243 return rc;
12244}
12245
12246/*===========================================================================
12247 * FUNCTION : configure_streams
12248 *
12249 * DESCRIPTION:
12250 *
12251 * PARAMETERS :
12252 *
12253 *
12254 * RETURN : Success: 0
12255 * Failure: -EINVAL (if stream configuration is invalid)
12256 * -ENODEV (fatal error)
12257 *==========================================================================*/
12258
12259int QCamera3HardwareInterface::configure_streams(
12260 const struct camera3_device *device,
12261 camera3_stream_configuration_t *stream_list)
12262{
12263 LOGD("E");
12264 QCamera3HardwareInterface *hw =
12265 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12266 if (!hw) {
12267 LOGE("NULL camera device");
12268 return -ENODEV;
12269 }
12270 int rc = hw->configureStreams(stream_list);
12271 LOGD("X");
12272 return rc;
12273}
12274
12275/*===========================================================================
12276 * FUNCTION : construct_default_request_settings
12277 *
12278 * DESCRIPTION: Configure a settings buffer to meet the required use case
12279 *
12280 * PARAMETERS :
12281 *
12282 *
12283 * RETURN : Success: Return valid metadata
12284 * Failure: Return NULL
12285 *==========================================================================*/
12286const camera_metadata_t* QCamera3HardwareInterface::
12287 construct_default_request_settings(const struct camera3_device *device,
12288 int type)
12289{
12290
12291 LOGD("E");
12292 camera_metadata_t* fwk_metadata = NULL;
12293 QCamera3HardwareInterface *hw =
12294 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12295 if (!hw) {
12296 LOGE("NULL camera device");
12297 return NULL;
12298 }
12299
12300 fwk_metadata = hw->translateCapabilityToMetadata(type);
12301
12302 LOGD("X");
12303 return fwk_metadata;
12304}
12305
12306/*===========================================================================
12307 * FUNCTION : process_capture_request
12308 *
12309 * DESCRIPTION:
12310 *
12311 * PARAMETERS :
12312 *
12313 *
12314 * RETURN :
12315 *==========================================================================*/
12316int QCamera3HardwareInterface::process_capture_request(
12317 const struct camera3_device *device,
12318 camera3_capture_request_t *request)
12319{
12320 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012321 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012322 QCamera3HardwareInterface *hw =
12323 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12324 if (!hw) {
12325 LOGE("NULL camera device");
12326 return -EINVAL;
12327 }
12328
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012329 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012330 LOGD("X");
12331 return rc;
12332}
12333
12334/*===========================================================================
12335 * FUNCTION : dump
12336 *
12337 * DESCRIPTION:
12338 *
12339 * PARAMETERS :
12340 *
12341 *
12342 * RETURN :
12343 *==========================================================================*/
12344
12345void QCamera3HardwareInterface::dump(
12346 const struct camera3_device *device, int fd)
12347{
12348 /* Log level property is read when "adb shell dumpsys media.camera" is
12349 called so that the log level can be controlled without restarting
12350 the media server */
12351 getLogLevel();
12352
12353 LOGD("E");
12354 QCamera3HardwareInterface *hw =
12355 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12356 if (!hw) {
12357 LOGE("NULL camera device");
12358 return;
12359 }
12360
12361 hw->dump(fd);
12362 LOGD("X");
12363 return;
12364}
12365
12366/*===========================================================================
12367 * FUNCTION : flush
12368 *
12369 * DESCRIPTION:
12370 *
12371 * PARAMETERS :
12372 *
12373 *
12374 * RETURN :
12375 *==========================================================================*/
12376
12377int QCamera3HardwareInterface::flush(
12378 const struct camera3_device *device)
12379{
12380 int rc;
12381 LOGD("E");
12382 QCamera3HardwareInterface *hw =
12383 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12384 if (!hw) {
12385 LOGE("NULL camera device");
12386 return -EINVAL;
12387 }
12388
12389 pthread_mutex_lock(&hw->mMutex);
12390 // Validate current state
12391 switch (hw->mState) {
12392 case STARTED:
12393 /* valid state */
12394 break;
12395
12396 case ERROR:
12397 pthread_mutex_unlock(&hw->mMutex);
12398 hw->handleCameraDeviceError();
12399 return -ENODEV;
12400
12401 default:
12402 LOGI("Flush returned during state %d", hw->mState);
12403 pthread_mutex_unlock(&hw->mMutex);
12404 return 0;
12405 }
12406 pthread_mutex_unlock(&hw->mMutex);
12407
12408 rc = hw->flush(true /* restart channels */ );
12409 LOGD("X");
12410 return rc;
12411}
12412
12413/*===========================================================================
12414 * FUNCTION : close_camera_device
12415 *
12416 * DESCRIPTION:
12417 *
12418 * PARAMETERS :
12419 *
12420 *
12421 * RETURN :
12422 *==========================================================================*/
12423int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12424{
12425 int ret = NO_ERROR;
12426 QCamera3HardwareInterface *hw =
12427 reinterpret_cast<QCamera3HardwareInterface *>(
12428 reinterpret_cast<camera3_device_t *>(device)->priv);
12429 if (!hw) {
12430 LOGE("NULL camera device");
12431 return BAD_VALUE;
12432 }
12433
12434 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12435 delete hw;
12436 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012437 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012438 return ret;
12439}
12440
12441/*===========================================================================
12442 * FUNCTION : getWaveletDenoiseProcessPlate
12443 *
12444 * DESCRIPTION: query wavelet denoise process plate
12445 *
12446 * PARAMETERS : None
12447 *
12448 * RETURN : WNR prcocess plate value
12449 *==========================================================================*/
12450cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12451{
12452 char prop[PROPERTY_VALUE_MAX];
12453 memset(prop, 0, sizeof(prop));
12454 property_get("persist.denoise.process.plates", prop, "0");
12455 int processPlate = atoi(prop);
12456 switch(processPlate) {
12457 case 0:
12458 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12459 case 1:
12460 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12461 case 2:
12462 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12463 case 3:
12464 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12465 default:
12466 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12467 }
12468}
12469
12470
12471/*===========================================================================
12472 * FUNCTION : getTemporalDenoiseProcessPlate
12473 *
12474 * DESCRIPTION: query temporal denoise process plate
12475 *
12476 * PARAMETERS : None
12477 *
12478 * RETURN : TNR prcocess plate value
12479 *==========================================================================*/
12480cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12481{
12482 char prop[PROPERTY_VALUE_MAX];
12483 memset(prop, 0, sizeof(prop));
12484 property_get("persist.tnr.process.plates", prop, "0");
12485 int processPlate = atoi(prop);
12486 switch(processPlate) {
12487 case 0:
12488 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12489 case 1:
12490 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12491 case 2:
12492 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12493 case 3:
12494 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12495 default:
12496 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12497 }
12498}
12499
12500
12501/*===========================================================================
12502 * FUNCTION : extractSceneMode
12503 *
12504 * DESCRIPTION: Extract scene mode from frameworks set metadata
12505 *
12506 * PARAMETERS :
12507 * @frame_settings: CameraMetadata reference
12508 * @metaMode: ANDROID_CONTORL_MODE
12509 * @hal_metadata: hal metadata structure
12510 *
12511 * RETURN : None
12512 *==========================================================================*/
12513int32_t QCamera3HardwareInterface::extractSceneMode(
12514 const CameraMetadata &frame_settings, uint8_t metaMode,
12515 metadata_buffer_t *hal_metadata)
12516{
12517 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012518 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12519
12520 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12521 LOGD("Ignoring control mode OFF_KEEP_STATE");
12522 return NO_ERROR;
12523 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012524
12525 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12526 camera_metadata_ro_entry entry =
12527 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12528 if (0 == entry.count)
12529 return rc;
12530
12531 uint8_t fwk_sceneMode = entry.data.u8[0];
12532
12533 int val = lookupHalName(SCENE_MODES_MAP,
12534 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12535 fwk_sceneMode);
12536 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012537 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012538 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012539 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012540 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012541
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012542 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12543 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12544 }
12545
12546 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12547 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012548 cam_hdr_param_t hdr_params;
12549 hdr_params.hdr_enable = 1;
12550 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12551 hdr_params.hdr_need_1x = false;
12552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12553 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12554 rc = BAD_VALUE;
12555 }
12556 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012557
Thierry Strudel3d639192016-09-09 11:52:26 -070012558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12559 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012563
12564 if (mForceHdrSnapshot) {
12565 cam_hdr_param_t hdr_params;
12566 hdr_params.hdr_enable = 1;
12567 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12568 hdr_params.hdr_need_1x = false;
12569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12570 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
12571 rc = BAD_VALUE;
12572 }
12573 }
12574
Thierry Strudel3d639192016-09-09 11:52:26 -070012575 return rc;
12576}
12577
12578/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070012579 * FUNCTION : setVideoHdrMode
12580 *
12581 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
12582 *
12583 * PARAMETERS :
12584 * @hal_metadata: hal metadata structure
12585 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
12586 *
12587 * RETURN : None
12588 *==========================================================================*/
12589int32_t QCamera3HardwareInterface::setVideoHdrMode(
12590 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
12591{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012592 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
12593 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
12594 }
12595
12596 LOGE("Invalid Video HDR mode %d!", vhdr);
12597 return BAD_VALUE;
12598}
12599
12600/*===========================================================================
12601 * FUNCTION : setSensorHDR
12602 *
12603 * DESCRIPTION: Enable/disable sensor HDR.
12604 *
12605 * PARAMETERS :
12606 * @hal_metadata: hal metadata structure
12607 * @enable: boolean whether to enable/disable sensor HDR
12608 *
12609 * RETURN : None
12610 *==========================================================================*/
12611int32_t QCamera3HardwareInterface::setSensorHDR(
12612 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
12613{
Thierry Strudel04e026f2016-10-10 11:27:36 -070012614 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012615 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
12616
12617 if (enable) {
12618 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
12619 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
12620 #ifdef _LE_CAMERA_
12621 //Default to staggered HDR for IOT
12622 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
12623 #else
12624 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
12625 #endif
12626 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
12627 }
12628
12629 bool isSupported = false;
12630 switch (sensor_hdr) {
12631 case CAM_SENSOR_HDR_IN_SENSOR:
12632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12633 CAM_QCOM_FEATURE_SENSOR_HDR) {
12634 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012635 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012636 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012637 break;
12638 case CAM_SENSOR_HDR_ZIGZAG:
12639 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12640 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
12641 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012642 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012643 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012644 break;
12645 case CAM_SENSOR_HDR_STAGGERED:
12646 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
12647 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
12648 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012649 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012650 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012651 break;
12652 case CAM_SENSOR_HDR_OFF:
12653 isSupported = true;
12654 LOGD("Turning off sensor HDR");
12655 break;
12656 default:
12657 LOGE("HDR mode %d not supported", sensor_hdr);
12658 rc = BAD_VALUE;
12659 break;
12660 }
12661
12662 if(isSupported) {
12663 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12664 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
12665 rc = BAD_VALUE;
12666 } else {
12667 if(!isVideoHdrEnable)
12668 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070012669 }
12670 }
12671 return rc;
12672}
12673
12674/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012675 * FUNCTION : needRotationReprocess
12676 *
12677 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12678 *
12679 * PARAMETERS : none
12680 *
12681 * RETURN : true: needed
12682 * false: no need
12683 *==========================================================================*/
12684bool QCamera3HardwareInterface::needRotationReprocess()
12685{
12686 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12687 // current rotation is not zero, and pp has the capability to process rotation
12688 LOGH("need do reprocess for rotation");
12689 return true;
12690 }
12691
12692 return false;
12693}
12694
12695/*===========================================================================
12696 * FUNCTION : needReprocess
12697 *
12698 * DESCRIPTION: if reprocess in needed
12699 *
12700 * PARAMETERS : none
12701 *
12702 * RETURN : true: needed
12703 * false: no need
12704 *==========================================================================*/
12705bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12706{
12707 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12708 // TODO: add for ZSL HDR later
12709 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12710 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12711 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12712 return true;
12713 } else {
12714 LOGH("already post processed frame");
12715 return false;
12716 }
12717 }
12718 return needRotationReprocess();
12719}
12720
12721/*===========================================================================
12722 * FUNCTION : needJpegExifRotation
12723 *
12724 * DESCRIPTION: if rotation from jpeg is needed
12725 *
12726 * PARAMETERS : none
12727 *
12728 * RETURN : true: needed
12729 * false: no need
12730 *==========================================================================*/
12731bool QCamera3HardwareInterface::needJpegExifRotation()
12732{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012733 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012734 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12735 LOGD("Need use Jpeg EXIF Rotation");
12736 return true;
12737 }
12738 return false;
12739}
12740
12741/*===========================================================================
12742 * FUNCTION : addOfflineReprocChannel
12743 *
12744 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12745 * coming from input channel
12746 *
12747 * PARAMETERS :
12748 * @config : reprocess configuration
12749 * @inputChHandle : pointer to the input (source) channel
12750 *
12751 *
12752 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12753 *==========================================================================*/
12754QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12755 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12756{
12757 int32_t rc = NO_ERROR;
12758 QCamera3ReprocessChannel *pChannel = NULL;
12759
12760 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012761 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12762 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012763 if (NULL == pChannel) {
12764 LOGE("no mem for reprocess channel");
12765 return NULL;
12766 }
12767
12768 rc = pChannel->initialize(IS_TYPE_NONE);
12769 if (rc != NO_ERROR) {
12770 LOGE("init reprocess channel failed, ret = %d", rc);
12771 delete pChannel;
12772 return NULL;
12773 }
12774
12775 // pp feature config
12776 cam_pp_feature_config_t pp_config;
12777 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12778
12779 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12780 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12781 & CAM_QCOM_FEATURE_DSDN) {
12782 //Use CPP CDS incase h/w supports it.
12783 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12784 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12785 }
12786 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12787 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12788 }
12789
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012790 if (config.hdr_param.hdr_enable) {
12791 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12792 pp_config.hdr_param = config.hdr_param;
12793 }
12794
12795 if (mForceHdrSnapshot) {
12796 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12797 pp_config.hdr_param.hdr_enable = 1;
12798 pp_config.hdr_param.hdr_need_1x = 0;
12799 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12800 }
12801
Thierry Strudel3d639192016-09-09 11:52:26 -070012802 rc = pChannel->addReprocStreamsFromSource(pp_config,
12803 config,
12804 IS_TYPE_NONE,
12805 mMetadataChannel);
12806
12807 if (rc != NO_ERROR) {
12808 delete pChannel;
12809 return NULL;
12810 }
12811 return pChannel;
12812}
12813
12814/*===========================================================================
12815 * FUNCTION : getMobicatMask
12816 *
12817 * DESCRIPTION: returns mobicat mask
12818 *
12819 * PARAMETERS : none
12820 *
12821 * RETURN : mobicat mask
12822 *
12823 *==========================================================================*/
12824uint8_t QCamera3HardwareInterface::getMobicatMask()
12825{
12826 return m_MobicatMask;
12827}
12828
12829/*===========================================================================
12830 * FUNCTION : setMobicat
12831 *
12832 * DESCRIPTION: set Mobicat on/off.
12833 *
12834 * PARAMETERS :
12835 * @params : none
12836 *
12837 * RETURN : int32_t type of status
12838 * NO_ERROR -- success
12839 * none-zero failure code
12840 *==========================================================================*/
12841int32_t QCamera3HardwareInterface::setMobicat()
12842{
12843 char value [PROPERTY_VALUE_MAX];
12844 property_get("persist.camera.mobicat", value, "0");
12845 int32_t ret = NO_ERROR;
12846 uint8_t enableMobi = (uint8_t)atoi(value);
12847
12848 if (enableMobi) {
12849 tune_cmd_t tune_cmd;
12850 tune_cmd.type = SET_RELOAD_CHROMATIX;
12851 tune_cmd.module = MODULE_ALL;
12852 tune_cmd.value = TRUE;
12853 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12854 CAM_INTF_PARM_SET_VFE_COMMAND,
12855 tune_cmd);
12856
12857 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12858 CAM_INTF_PARM_SET_PP_COMMAND,
12859 tune_cmd);
12860 }
12861 m_MobicatMask = enableMobi;
12862
12863 return ret;
12864}
12865
12866/*===========================================================================
12867* FUNCTION : getLogLevel
12868*
12869* DESCRIPTION: Reads the log level property into a variable
12870*
12871* PARAMETERS :
12872* None
12873*
12874* RETURN :
12875* None
12876*==========================================================================*/
12877void QCamera3HardwareInterface::getLogLevel()
12878{
12879 char prop[PROPERTY_VALUE_MAX];
12880 uint32_t globalLogLevel = 0;
12881
12882 property_get("persist.camera.hal.debug", prop, "0");
12883 int val = atoi(prop);
12884 if (0 <= val) {
12885 gCamHal3LogLevel = (uint32_t)val;
12886 }
12887
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012888 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012889 gKpiDebugLevel = atoi(prop);
12890
12891 property_get("persist.camera.global.debug", prop, "0");
12892 val = atoi(prop);
12893 if (0 <= val) {
12894 globalLogLevel = (uint32_t)val;
12895 }
12896
12897 /* Highest log level among hal.logs and global.logs is selected */
12898 if (gCamHal3LogLevel < globalLogLevel)
12899 gCamHal3LogLevel = globalLogLevel;
12900
12901 return;
12902}
12903
12904/*===========================================================================
12905 * FUNCTION : validateStreamRotations
12906 *
12907 * DESCRIPTION: Check if the rotations requested are supported
12908 *
12909 * PARAMETERS :
12910 * @stream_list : streams to be configured
12911 *
12912 * RETURN : NO_ERROR on success
12913 * -EINVAL on failure
12914 *
12915 *==========================================================================*/
12916int QCamera3HardwareInterface::validateStreamRotations(
12917 camera3_stream_configuration_t *streamList)
12918{
12919 int rc = NO_ERROR;
12920
12921 /*
12922 * Loop through all streams requested in configuration
12923 * Check if unsupported rotations have been requested on any of them
12924 */
12925 for (size_t j = 0; j < streamList->num_streams; j++){
12926 camera3_stream_t *newStream = streamList->streams[j];
12927
12928 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12929 bool isImplDef = (newStream->format ==
12930 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12931 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12932 isImplDef);
12933
12934 if (isRotated && (!isImplDef || isZsl)) {
12935 LOGE("Error: Unsupported rotation of %d requested for stream"
12936 "type:%d and stream format:%d",
12937 newStream->rotation, newStream->stream_type,
12938 newStream->format);
12939 rc = -EINVAL;
12940 break;
12941 }
12942 }
12943
12944 return rc;
12945}
12946
12947/*===========================================================================
12948* FUNCTION : getFlashInfo
12949*
12950* DESCRIPTION: Retrieve information about whether the device has a flash.
12951*
12952* PARAMETERS :
12953* @cameraId : Camera id to query
12954* @hasFlash : Boolean indicating whether there is a flash device
12955* associated with given camera
12956* @flashNode : If a flash device exists, this will be its device node.
12957*
12958* RETURN :
12959* None
12960*==========================================================================*/
12961void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12962 bool& hasFlash,
12963 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12964{
12965 cam_capability_t* camCapability = gCamCapability[cameraId];
12966 if (NULL == camCapability) {
12967 hasFlash = false;
12968 flashNode[0] = '\0';
12969 } else {
12970 hasFlash = camCapability->flash_available;
12971 strlcpy(flashNode,
12972 (char*)camCapability->flash_dev_name,
12973 QCAMERA_MAX_FILEPATH_LENGTH);
12974 }
12975}
12976
12977/*===========================================================================
12978* FUNCTION : getEepromVersionInfo
12979*
12980* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12981*
12982* PARAMETERS : None
12983*
12984* RETURN : string describing EEPROM version
12985* "\0" if no such info available
12986*==========================================================================*/
12987const char *QCamera3HardwareInterface::getEepromVersionInfo()
12988{
12989 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12990}
12991
12992/*===========================================================================
12993* FUNCTION : getLdafCalib
12994*
12995* DESCRIPTION: Retrieve Laser AF calibration data
12996*
12997* PARAMETERS : None
12998*
12999* RETURN : Two uint32_t describing laser AF calibration data
13000* NULL if none is available.
13001*==========================================================================*/
13002const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13003{
13004 if (mLdafCalibExist) {
13005 return &mLdafCalib[0];
13006 } else {
13007 return NULL;
13008 }
13009}
13010
13011/*===========================================================================
13012 * FUNCTION : dynamicUpdateMetaStreamInfo
13013 *
13014 * DESCRIPTION: This function:
13015 * (1) stops all the channels
13016 * (2) returns error on pending requests and buffers
13017 * (3) sends metastream_info in setparams
13018 * (4) starts all channels
13019 * This is useful when sensor has to be restarted to apply any
13020 * settings such as frame rate from a different sensor mode
13021 *
13022 * PARAMETERS : None
13023 *
13024 * RETURN : NO_ERROR on success
13025 * Error codes on failure
13026 *
13027 *==========================================================================*/
13028int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13029{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013030 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013031 int rc = NO_ERROR;
13032
13033 LOGD("E");
13034
13035 rc = stopAllChannels();
13036 if (rc < 0) {
13037 LOGE("stopAllChannels failed");
13038 return rc;
13039 }
13040
13041 rc = notifyErrorForPendingRequests();
13042 if (rc < 0) {
13043 LOGE("notifyErrorForPendingRequests failed");
13044 return rc;
13045 }
13046
13047 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13048 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13049 "Format:%d",
13050 mStreamConfigInfo.type[i],
13051 mStreamConfigInfo.stream_sizes[i].width,
13052 mStreamConfigInfo.stream_sizes[i].height,
13053 mStreamConfigInfo.postprocess_mask[i],
13054 mStreamConfigInfo.format[i]);
13055 }
13056
13057 /* Send meta stream info once again so that ISP can start */
13058 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13059 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13060 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13061 mParameters);
13062 if (rc < 0) {
13063 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13064 }
13065
13066 rc = startAllChannels();
13067 if (rc < 0) {
13068 LOGE("startAllChannels failed");
13069 return rc;
13070 }
13071
13072 LOGD("X");
13073 return rc;
13074}
13075
13076/*===========================================================================
13077 * FUNCTION : stopAllChannels
13078 *
13079 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13080 *
13081 * PARAMETERS : None
13082 *
13083 * RETURN : NO_ERROR on success
13084 * Error codes on failure
13085 *
13086 *==========================================================================*/
13087int32_t QCamera3HardwareInterface::stopAllChannels()
13088{
13089 int32_t rc = NO_ERROR;
13090
13091 LOGD("Stopping all channels");
13092 // Stop the Streams/Channels
13093 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13094 it != mStreamInfo.end(); it++) {
13095 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13096 if (channel) {
13097 channel->stop();
13098 }
13099 (*it)->status = INVALID;
13100 }
13101
13102 if (mSupportChannel) {
13103 mSupportChannel->stop();
13104 }
13105 if (mAnalysisChannel) {
13106 mAnalysisChannel->stop();
13107 }
13108 if (mRawDumpChannel) {
13109 mRawDumpChannel->stop();
13110 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013111 if (mHdrPlusRawSrcChannel) {
13112 mHdrPlusRawSrcChannel->stop();
13113 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013114 if (mMetadataChannel) {
13115 /* If content of mStreamInfo is not 0, there is metadata stream */
13116 mMetadataChannel->stop();
13117 }
13118
13119 LOGD("All channels stopped");
13120 return rc;
13121}
13122
13123/*===========================================================================
13124 * FUNCTION : startAllChannels
13125 *
13126 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13127 *
13128 * PARAMETERS : None
13129 *
13130 * RETURN : NO_ERROR on success
13131 * Error codes on failure
13132 *
13133 *==========================================================================*/
13134int32_t QCamera3HardwareInterface::startAllChannels()
13135{
13136 int32_t rc = NO_ERROR;
13137
13138 LOGD("Start all channels ");
13139 // Start the Streams/Channels
13140 if (mMetadataChannel) {
13141 /* If content of mStreamInfo is not 0, there is metadata stream */
13142 rc = mMetadataChannel->start();
13143 if (rc < 0) {
13144 LOGE("META channel start failed");
13145 return rc;
13146 }
13147 }
13148 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13149 it != mStreamInfo.end(); it++) {
13150 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13151 if (channel) {
13152 rc = channel->start();
13153 if (rc < 0) {
13154 LOGE("channel start failed");
13155 return rc;
13156 }
13157 }
13158 }
13159 if (mAnalysisChannel) {
13160 mAnalysisChannel->start();
13161 }
13162 if (mSupportChannel) {
13163 rc = mSupportChannel->start();
13164 if (rc < 0) {
13165 LOGE("Support channel start failed");
13166 return rc;
13167 }
13168 }
13169 if (mRawDumpChannel) {
13170 rc = mRawDumpChannel->start();
13171 if (rc < 0) {
13172 LOGE("RAW dump channel start failed");
13173 return rc;
13174 }
13175 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013176 if (mHdrPlusRawSrcChannel) {
13177 rc = mHdrPlusRawSrcChannel->start();
13178 if (rc < 0) {
13179 LOGE("HDR+ RAW channel start failed");
13180 return rc;
13181 }
13182 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013183
13184 LOGD("All channels started");
13185 return rc;
13186}
13187
13188/*===========================================================================
13189 * FUNCTION : notifyErrorForPendingRequests
13190 *
13191 * DESCRIPTION: This function sends error for all the pending requests/buffers
13192 *
13193 * PARAMETERS : None
13194 *
13195 * RETURN : Error codes
13196 * NO_ERROR on success
13197 *
13198 *==========================================================================*/
13199int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13200{
13201 int32_t rc = NO_ERROR;
13202 unsigned int frameNum = 0;
13203 camera3_capture_result_t result;
13204 camera3_stream_buffer_t *pStream_Buf = NULL;
13205
13206 memset(&result, 0, sizeof(camera3_capture_result_t));
13207
13208 if (mPendingRequestsList.size() > 0) {
13209 pendingRequestIterator i = mPendingRequestsList.begin();
13210 frameNum = i->frame_number;
13211 } else {
13212 /* There might still be pending buffers even though there are
13213 no pending requests. Setting the frameNum to MAX so that
13214 all the buffers with smaller frame numbers are returned */
13215 frameNum = UINT_MAX;
13216 }
13217
13218 LOGH("Oldest frame num on mPendingRequestsList = %u",
13219 frameNum);
13220
Emilian Peev7650c122017-01-19 08:24:33 -080013221 notifyErrorFoPendingDepthData(mDepthChannel);
13222
Thierry Strudel3d639192016-09-09 11:52:26 -070013223 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13224 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13225
13226 if (req->frame_number < frameNum) {
13227 // Send Error notify to frameworks for each buffer for which
13228 // metadata buffer is already sent
13229 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13230 req->frame_number, req->mPendingBufferList.size());
13231
13232 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13233 if (NULL == pStream_Buf) {
13234 LOGE("No memory for pending buffers array");
13235 return NO_MEMORY;
13236 }
13237 memset(pStream_Buf, 0,
13238 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13239 result.result = NULL;
13240 result.frame_number = req->frame_number;
13241 result.num_output_buffers = req->mPendingBufferList.size();
13242 result.output_buffers = pStream_Buf;
13243
13244 size_t index = 0;
13245 for (auto info = req->mPendingBufferList.begin();
13246 info != req->mPendingBufferList.end(); ) {
13247
13248 camera3_notify_msg_t notify_msg;
13249 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13250 notify_msg.type = CAMERA3_MSG_ERROR;
13251 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13252 notify_msg.message.error.error_stream = info->stream;
13253 notify_msg.message.error.frame_number = req->frame_number;
13254 pStream_Buf[index].acquire_fence = -1;
13255 pStream_Buf[index].release_fence = -1;
13256 pStream_Buf[index].buffer = info->buffer;
13257 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13258 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013259 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013260 index++;
13261 // Remove buffer from list
13262 info = req->mPendingBufferList.erase(info);
13263 }
13264
13265 // Remove this request from Map
13266 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13267 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13268 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13269
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013270 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013271
13272 delete [] pStream_Buf;
13273 } else {
13274
13275 // Go through the pending requests info and send error request to framework
13276 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13277
13278 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13279
13280 // Send error notify to frameworks
13281 camera3_notify_msg_t notify_msg;
13282 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13283 notify_msg.type = CAMERA3_MSG_ERROR;
13284 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13285 notify_msg.message.error.error_stream = NULL;
13286 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013287 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013288
13289 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13290 if (NULL == pStream_Buf) {
13291 LOGE("No memory for pending buffers array");
13292 return NO_MEMORY;
13293 }
13294 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13295
13296 result.result = NULL;
13297 result.frame_number = req->frame_number;
13298 result.input_buffer = i->input_buffer;
13299 result.num_output_buffers = req->mPendingBufferList.size();
13300 result.output_buffers = pStream_Buf;
13301
13302 size_t index = 0;
13303 for (auto info = req->mPendingBufferList.begin();
13304 info != req->mPendingBufferList.end(); ) {
13305 pStream_Buf[index].acquire_fence = -1;
13306 pStream_Buf[index].release_fence = -1;
13307 pStream_Buf[index].buffer = info->buffer;
13308 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13309 pStream_Buf[index].stream = info->stream;
13310 index++;
13311 // Remove buffer from list
13312 info = req->mPendingBufferList.erase(info);
13313 }
13314
13315 // Remove this request from Map
13316 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13317 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13318 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13319
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013320 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013321 delete [] pStream_Buf;
13322 i = erasePendingRequest(i);
13323 }
13324 }
13325
13326 /* Reset pending frame Drop list and requests list */
13327 mPendingFrameDropList.clear();
13328
13329 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13330 req.mPendingBufferList.clear();
13331 }
13332 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013333 LOGH("Cleared all the pending buffers ");
13334
13335 return rc;
13336}
13337
13338bool QCamera3HardwareInterface::isOnEncoder(
13339 const cam_dimension_t max_viewfinder_size,
13340 uint32_t width, uint32_t height)
13341{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013342 return ((width > (uint32_t)max_viewfinder_size.width) ||
13343 (height > (uint32_t)max_viewfinder_size.height) ||
13344 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13345 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013346}
13347
13348/*===========================================================================
13349 * FUNCTION : setBundleInfo
13350 *
13351 * DESCRIPTION: Set bundle info for all streams that are bundle.
13352 *
13353 * PARAMETERS : None
13354 *
13355 * RETURN : NO_ERROR on success
13356 * Error codes on failure
13357 *==========================================================================*/
13358int32_t QCamera3HardwareInterface::setBundleInfo()
13359{
13360 int32_t rc = NO_ERROR;
13361
13362 if (mChannelHandle) {
13363 cam_bundle_config_t bundleInfo;
13364 memset(&bundleInfo, 0, sizeof(bundleInfo));
13365 rc = mCameraHandle->ops->get_bundle_info(
13366 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13367 if (rc != NO_ERROR) {
13368 LOGE("get_bundle_info failed");
13369 return rc;
13370 }
13371 if (mAnalysisChannel) {
13372 mAnalysisChannel->setBundleInfo(bundleInfo);
13373 }
13374 if (mSupportChannel) {
13375 mSupportChannel->setBundleInfo(bundleInfo);
13376 }
13377 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13378 it != mStreamInfo.end(); it++) {
13379 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13380 channel->setBundleInfo(bundleInfo);
13381 }
13382 if (mRawDumpChannel) {
13383 mRawDumpChannel->setBundleInfo(bundleInfo);
13384 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013385 if (mHdrPlusRawSrcChannel) {
13386 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13387 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013388 }
13389
13390 return rc;
13391}
13392
13393/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013394 * FUNCTION : setInstantAEC
13395 *
13396 * DESCRIPTION: Set Instant AEC related params.
13397 *
13398 * PARAMETERS :
13399 * @meta: CameraMetadata reference
13400 *
13401 * RETURN : NO_ERROR on success
13402 * Error codes on failure
13403 *==========================================================================*/
13404int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13405{
13406 int32_t rc = NO_ERROR;
13407 uint8_t val = 0;
13408 char prop[PROPERTY_VALUE_MAX];
13409
13410 // First try to configure instant AEC from framework metadata
13411 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13412 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13413 }
13414
13415 // If framework did not set this value, try to read from set prop.
13416 if (val == 0) {
13417 memset(prop, 0, sizeof(prop));
13418 property_get("persist.camera.instant.aec", prop, "0");
13419 val = (uint8_t)atoi(prop);
13420 }
13421
13422 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13423 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13424 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13425 mInstantAEC = val;
13426 mInstantAECSettledFrameNumber = 0;
13427 mInstantAecFrameIdxCount = 0;
13428 LOGH("instantAEC value set %d",val);
13429 if (mInstantAEC) {
13430 memset(prop, 0, sizeof(prop));
13431 property_get("persist.camera.ae.instant.bound", prop, "10");
13432 int32_t aec_frame_skip_cnt = atoi(prop);
13433 if (aec_frame_skip_cnt >= 0) {
13434 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13435 } else {
13436 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13437 rc = BAD_VALUE;
13438 }
13439 }
13440 } else {
13441 LOGE("Bad instant aec value set %d", val);
13442 rc = BAD_VALUE;
13443 }
13444 return rc;
13445}
13446
13447/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013448 * FUNCTION : get_num_overall_buffers
13449 *
13450 * DESCRIPTION: Estimate number of pending buffers across all requests.
13451 *
13452 * PARAMETERS : None
13453 *
13454 * RETURN : Number of overall pending buffers
13455 *
13456 *==========================================================================*/
13457uint32_t PendingBuffersMap::get_num_overall_buffers()
13458{
13459 uint32_t sum_buffers = 0;
13460 for (auto &req : mPendingBuffersInRequest) {
13461 sum_buffers += req.mPendingBufferList.size();
13462 }
13463 return sum_buffers;
13464}
13465
13466/*===========================================================================
13467 * FUNCTION : removeBuf
13468 *
13469 * DESCRIPTION: Remove a matching buffer from tracker.
13470 *
13471 * PARAMETERS : @buffer: image buffer for the callback
13472 *
13473 * RETURN : None
13474 *
13475 *==========================================================================*/
13476void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13477{
13478 bool buffer_found = false;
13479 for (auto req = mPendingBuffersInRequest.begin();
13480 req != mPendingBuffersInRequest.end(); req++) {
13481 for (auto k = req->mPendingBufferList.begin();
13482 k != req->mPendingBufferList.end(); k++ ) {
13483 if (k->buffer == buffer) {
13484 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13485 req->frame_number, buffer);
13486 k = req->mPendingBufferList.erase(k);
13487 if (req->mPendingBufferList.empty()) {
13488 // Remove this request from Map
13489 req = mPendingBuffersInRequest.erase(req);
13490 }
13491 buffer_found = true;
13492 break;
13493 }
13494 }
13495 if (buffer_found) {
13496 break;
13497 }
13498 }
13499 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13500 get_num_overall_buffers());
13501}
13502
13503/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013504 * FUNCTION : getBufErrStatus
13505 *
13506 * DESCRIPTION: get buffer error status
13507 *
13508 * PARAMETERS : @buffer: buffer handle
13509 *
13510 * RETURN : Error status
13511 *
13512 *==========================================================================*/
13513int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13514{
13515 for (auto& req : mPendingBuffersInRequest) {
13516 for (auto& k : req.mPendingBufferList) {
13517 if (k.buffer == buffer)
13518 return k.bufStatus;
13519 }
13520 }
13521 return CAMERA3_BUFFER_STATUS_OK;
13522}
13523
13524/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013525 * FUNCTION : setPAAFSupport
13526 *
13527 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13528 * feature mask according to stream type and filter
13529 * arrangement
13530 *
13531 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13532 * @stream_type: stream type
13533 * @filter_arrangement: filter arrangement
13534 *
13535 * RETURN : None
13536 *==========================================================================*/
13537void QCamera3HardwareInterface::setPAAFSupport(
13538 cam_feature_mask_t& feature_mask,
13539 cam_stream_type_t stream_type,
13540 cam_color_filter_arrangement_t filter_arrangement)
13541{
13542 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
13543 feature_mask, stream_type, filter_arrangement);
13544
13545 switch (filter_arrangement) {
13546 case CAM_FILTER_ARRANGEMENT_RGGB:
13547 case CAM_FILTER_ARRANGEMENT_GRBG:
13548 case CAM_FILTER_ARRANGEMENT_GBRG:
13549 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013550 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13551 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013552 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
13553 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13554 }
13555 break;
13556 case CAM_FILTER_ARRANGEMENT_Y:
13557 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
13558 feature_mask |= CAM_QCOM_FEATURE_PAAF;
13559 }
13560 break;
13561 default:
13562 break;
13563 }
13564}
13565
13566/*===========================================================================
13567* FUNCTION : getSensorMountAngle
13568*
13569* DESCRIPTION: Retrieve sensor mount angle
13570*
13571* PARAMETERS : None
13572*
13573* RETURN : sensor mount angle in uint32_t
13574*==========================================================================*/
13575uint32_t QCamera3HardwareInterface::getSensorMountAngle()
13576{
13577 return gCamCapability[mCameraId]->sensor_mount_angle;
13578}
13579
13580/*===========================================================================
13581* FUNCTION : getRelatedCalibrationData
13582*
13583* DESCRIPTION: Retrieve related system calibration data
13584*
13585* PARAMETERS : None
13586*
13587* RETURN : Pointer of related system calibration data
13588*==========================================================================*/
13589const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
13590{
13591 return (const cam_related_system_calibration_data_t *)
13592 &(gCamCapability[mCameraId]->related_cam_calibration);
13593}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070013594
13595/*===========================================================================
13596 * FUNCTION : is60HzZone
13597 *
13598 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
13599 *
13600 * PARAMETERS : None
13601 *
13602 * RETURN : True if in 60Hz zone, False otherwise
13603 *==========================================================================*/
13604bool QCamera3HardwareInterface::is60HzZone()
13605{
13606 time_t t = time(NULL);
13607 struct tm lt;
13608
13609 struct tm* r = localtime_r(&t, &lt);
13610
13611 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
13612 return true;
13613 else
13614 return false;
13615}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070013616
13617/*===========================================================================
13618 * FUNCTION : adjustBlackLevelForCFA
13619 *
13620 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
13621 * of bayer CFA (Color Filter Array).
13622 *
13623 * PARAMETERS : @input: black level pattern in the order of RGGB
13624 * @output: black level pattern in the order of CFA
13625 * @color_arrangement: CFA color arrangement
13626 *
13627 * RETURN : None
13628 *==========================================================================*/
13629template<typename T>
13630void QCamera3HardwareInterface::adjustBlackLevelForCFA(
13631 T input[BLACK_LEVEL_PATTERN_CNT],
13632 T output[BLACK_LEVEL_PATTERN_CNT],
13633 cam_color_filter_arrangement_t color_arrangement)
13634{
13635 switch (color_arrangement) {
13636 case CAM_FILTER_ARRANGEMENT_GRBG:
13637 output[0] = input[1];
13638 output[1] = input[0];
13639 output[2] = input[3];
13640 output[3] = input[2];
13641 break;
13642 case CAM_FILTER_ARRANGEMENT_GBRG:
13643 output[0] = input[2];
13644 output[1] = input[3];
13645 output[2] = input[0];
13646 output[3] = input[1];
13647 break;
13648 case CAM_FILTER_ARRANGEMENT_BGGR:
13649 output[0] = input[3];
13650 output[1] = input[2];
13651 output[2] = input[1];
13652 output[3] = input[0];
13653 break;
13654 case CAM_FILTER_ARRANGEMENT_RGGB:
13655 output[0] = input[0];
13656 output[1] = input[1];
13657 output[2] = input[2];
13658 output[3] = input[3];
13659 break;
13660 default:
13661 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13662 break;
13663 }
13664}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013665
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013666void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
13667 CameraMetadata &resultMetadata,
13668 std::shared_ptr<metadata_buffer_t> settings)
13669{
13670 if (settings == nullptr) {
13671 ALOGE("%s: settings is nullptr.", __FUNCTION__);
13672 return;
13673 }
13674
13675 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
13676 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
13677 }
13678
13679 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13680 String8 str((const char *)gps_methods);
13681 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13682 }
13683
13684 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13685 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13686 }
13687
13688 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13689 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13690 }
13691
13692 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13693 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13694 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13695 }
13696
13697 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13698 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13699 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13700 }
13701
13702 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13703 int32_t fwk_thumb_size[2];
13704 fwk_thumb_size[0] = thumb_size->width;
13705 fwk_thumb_size[1] = thumb_size->height;
13706 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13707 }
13708
13709 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13710 uint8_t fwk_intent = intent[0];
13711 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13712 }
13713}
13714
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013715bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13716 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13717{
13718 if (hdrPlusRequest == nullptr) return false;
13719
13720 // Check noise reduction mode is high quality.
13721 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13722 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13723 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080013724 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
13725 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013726 return false;
13727 }
13728
13729 // Check edge mode is high quality.
13730 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13731 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13732 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13733 return false;
13734 }
13735
13736 if (request.num_output_buffers != 1 ||
13737 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13738 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080013739 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
13740 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
13741 request.output_buffers[0].stream->width,
13742 request.output_buffers[0].stream->height,
13743 request.output_buffers[0].stream->format);
13744 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013745 return false;
13746 }
13747
13748 // Get a YUV buffer from pic channel.
13749 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13750 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13751 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13752 if (res != OK) {
13753 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13754 __FUNCTION__, strerror(-res), res);
13755 return false;
13756 }
13757
13758 pbcamera::StreamBuffer buffer;
13759 buffer.streamId = kPbYuvOutputStreamId;
13760 buffer.data = yuvBuffer->buffer;
13761 buffer.dataSize = yuvBuffer->frame_len;
13762
13763 pbcamera::CaptureRequest pbRequest;
13764 pbRequest.id = request.frame_number;
13765 pbRequest.outputBuffers.push_back(buffer);
13766
13767 // Submit an HDR+ capture request to HDR+ service.
13768 res = mHdrPlusClient->submitCaptureRequest(&pbRequest);
13769 if (res != OK) {
13770 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13771 strerror(-res), res);
13772 return false;
13773 }
13774
13775 hdrPlusRequest->yuvBuffer = yuvBuffer;
13776 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13777
13778 return true;
13779}
13780
Chien-Yu Chenee335912017-02-09 17:53:20 -080013781status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
13782{
13783 if (mHdrPlusClient == nullptr) {
13784 ALOGD("%s: HDR+ client is not created.", __FUNCTION__);
13785 return -ENODEV;
13786 }
13787
13788 // Connect to HDR+ service
13789 status_t res = mHdrPlusClient->connect(this);
13790 if (res != OK) {
13791 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
13792 strerror(-res), res);
13793 return res;
13794 }
13795
13796 // Set static metadata.
13797 res = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
13798 if (res != OK) {
13799 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
13800 strerror(-res), res);
13801 mHdrPlusClient->disconnect();
13802 return res;
13803 }
13804
13805 // Configure stream for HDR+.
13806 res = configureHdrPlusStreamsLocked();
13807 if (res != OK) {
13808 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
13809 mHdrPlusClient->disconnect();
13810 return res;
13811 }
13812
13813 mHdrPlusModeEnabled = true;
13814 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
13815
13816 return OK;
13817}
13818
13819void QCamera3HardwareInterface::disableHdrPlusModeLocked()
13820{
13821 // Disconnect from HDR+ service.
13822 if (mHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
13823 mHdrPlusClient->disconnect();
13824 }
13825
13826 mHdrPlusModeEnabled = false;
13827 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
13828}
13829
13830status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013831{
13832 pbcamera::InputConfiguration inputConfig;
13833 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
13834 status_t res = OK;
13835
13836 // Configure HDR+ client streams.
13837 // Get input config.
13838 if (mHdrPlusRawSrcChannel) {
13839 // HDR+ input buffers will be provided by HAL.
13840 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
13841 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
13842 if (res != OK) {
13843 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
13844 __FUNCTION__, strerror(-res), res);
13845 return res;
13846 }
13847
13848 inputConfig.isSensorInput = false;
13849 } else {
13850 // Sensor MIPI will send data to Easel.
13851 inputConfig.isSensorInput = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -080013852 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
13853 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
13854 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
13855 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
13856 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
13857 if (mSensorModeInfo.num_raw_bits != 10) {
13858 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
13859 mSensorModeInfo.num_raw_bits);
13860 return BAD_VALUE;
13861 }
13862
13863 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013864 }
13865
13866 // Get output configurations.
13867 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080013868 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013869
13870 // Easel may need to output YUV output buffers if mPictureChannel was created.
13871 pbcamera::StreamConfiguration yuvOutputConfig;
13872 if (mPictureChannel != nullptr) {
13873 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
13874 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
13875 if (res != OK) {
13876 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
13877 __FUNCTION__, strerror(-res), res);
13878
13879 return res;
13880 }
13881
13882 outputStreamConfigs.push_back(yuvOutputConfig);
13883 }
13884
13885 // TODO: consider other channels for YUV output buffers.
13886
13887 res = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
13888 if (res != OK) {
13889 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
13890 strerror(-res), res);
13891 return res;
13892 }
13893
13894 return OK;
13895}
13896
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013897void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13898 const camera_metadata_t &resultMetadata) {
13899 if (result != nullptr) {
13900 if (result->outputBuffers.size() != 1) {
13901 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13902 result->outputBuffers.size());
13903 return;
13904 }
13905
13906 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13907 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13908 result->outputBuffers[0].streamId);
13909 return;
13910 }
13911
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013912 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013913 HdrPlusPendingRequest pendingRequest;
13914 {
13915 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13916 auto req = mHdrPlusPendingRequests.find(result->requestId);
13917 pendingRequest = req->second;
13918 }
13919
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013920 // Update the result metadata with the settings of the HDR+ still capture request because
13921 // the result metadata belongs to a ZSL buffer.
13922 CameraMetadata metadata;
13923 metadata = &resultMetadata;
13924 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
13925 camera_metadata_t* updatedResultMetadata = metadata.release();
13926
13927 QCamera3PicChannel *picChannel =
13928 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13929
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013930 // Check if dumping HDR+ YUV output is enabled.
13931 char prop[PROPERTY_VALUE_MAX];
13932 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13933 bool dumpYuvOutput = atoi(prop);
13934
13935 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013936 // Dump yuv buffer to a ppm file.
13937 pbcamera::StreamConfiguration outputConfig;
13938 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13939 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13940 if (rc == OK) {
13941 char buf[FILENAME_MAX] = {};
13942 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13943 result->requestId, result->outputBuffers[0].streamId,
13944 outputConfig.image.width, outputConfig.image.height);
13945
13946 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13947 } else {
13948 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13949 __FUNCTION__, strerror(-rc), rc);
13950 }
13951 }
13952
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013953 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
13954 auto halMetadata = std::make_shared<metadata_buffer_t>();
13955 clear_metadata_buffer(halMetadata.get());
13956
13957 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
13958 // encoding.
13959 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
13960 halStreamId, /*minFrameDuration*/0);
13961 if (res == OK) {
13962 // Return the buffer to pic channel for encoding.
13963 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13964 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13965 halMetadata);
13966 } else {
13967 // Return the buffer without encoding.
13968 // TODO: This should not happen but we may want to report an error buffer to camera
13969 // service.
13970 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
13971 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
13972 strerror(-res), res);
13973 }
13974
13975 // Send HDR+ metadata to framework.
13976 {
13977 pthread_mutex_lock(&mMutex);
13978
13979 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
13980 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
13981 pthread_mutex_unlock(&mMutex);
13982 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013983
13984 // Remove the HDR+ pending request.
13985 {
13986 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13987 auto req = mHdrPlusPendingRequests.find(result->requestId);
13988 mHdrPlusPendingRequests.erase(req);
13989 }
13990 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013991}
13992
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013993void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13994 // TODO: Handle HDR+ capture failures and send the failure to framework.
13995 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13996 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13997
13998 // Return the buffer to pic channel.
13999 QCamera3PicChannel *picChannel =
14000 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14001 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14002
14003 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014004}
14005
Thierry Strudel3d639192016-09-09 11:52:26 -070014006}; //end namespace qcamera